diff --git a/.cargo/config.toml b/.cargo/config.toml deleted file mode 100644 index 06c494f..0000000 --- a/.cargo/config.toml +++ /dev/null @@ -1,41 +0,0 @@ -[profile.dev] -opt-level = 0 -debug = true -lto = false -codegen-units = 256 -panic = "unwind" -incremental = true - -[profile.dev.package."*"] -opt-level = 0 - -[profile.release] -opt-level = 3 -lto = "fat" -codegen-units = 1 -panic = "abort" -debug = false -incremental = false - -[profile.release.package."*"] -opt-level = 3 - -[profile.release-pgo-instrument] -inherits = "release" -debug = true -lto = false -incremental = false - -[profile.release-pgo-instrument.package."*"] -opt-level = 3 - -[profile.release-pgo] -inherits = "release" -incremental = false - -[profile.release-pgo.package."*"] -opt-level = 3 - -[alias] -pgo-instrument = "build --profile release-pgo-instrument" -pgo-build = "build --profile release-pgo" diff --git a/Cargo.lock b/Cargo.lock index 9799d7d..5331705 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,189 +2,6 @@ # It is not intended for manual editing. version = 4 -[[package]] -name = "actix-codec" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a" -dependencies = [ - "bitflags 2.9.4", - "bytes", - "futures-core", - "futures-sink", - "memchr", - "pin-project-lite", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "actix-http" -version = "3.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44cceded2fb55f3c4b67068fa64962e2ca59614edc5b03167de9ff82ae803da0" -dependencies = [ - "actix-codec", - "actix-rt", - "actix-service", - "actix-utils", - "base64 0.22.1", - "bitflags 2.9.4", - "brotli", - "bytes", - "bytestring", - "derive_more 2.0.1", - "encoding_rs", - "flate2", - "foldhash", - "futures-core", - "h2", - "http", - "httparse", - "httpdate", - "itoa", - "language-tags", - "local-channel", - "mime", - "percent-encoding", - "pin-project-lite", - "rand 0.9.2", - "sha1", - "smallvec", - "tokio", - "tokio-util", - "tracing", - "zstd", -] - -[[package]] -name = "actix-macros" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" -dependencies = [ - "quote", - "syn", -] - -[[package]] -name = "actix-router" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13d324164c51f63867b57e73ba5936ea151b8a41a1d23d1031eeb9f70d0236f8" -dependencies = [ - "bytestring", - "cfg-if", - "http", - "regex", - "regex-lite", - "serde", - "tracing", -] - -[[package]] -name = "actix-rt" -version = "2.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92589714878ca59a7626ea19734f0e07a6a875197eec751bb5d3f99e64998c63" -dependencies = [ - "futures-core", - "tokio", -] - -[[package]] -name = "actix-server" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a65064ea4a457eaf07f2fba30b4c695bf43b721790e9530d26cb6f9019ff7502" -dependencies = [ - "actix-rt", - "actix-service", - "actix-utils", - "futures-core", - "futures-util", - "mio 1.0.4", - "socket2 0.5.10", - "tokio", - "tracing", -] - -[[package]] -name = "actix-service" -version = "2.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e46f36bf0e5af44bdc4bdb36fbbd421aa98c79a9bce724e1edeb3894e10dc7f" -dependencies = [ - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "actix-utils" -version = "3.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88a1dcdff1466e3c2488e1cb5c36a71822750ad43839937f85d2f4d9f8b705d8" -dependencies = [ - "local-waker", - "pin-project-lite", -] - -[[package]] -name = "actix-web" -version = "4.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a597b77b5c6d6a1e1097fddde329a83665e25c5437c696a3a9a4aa514a614dea" -dependencies = [ - "actix-codec", - "actix-http", - "actix-macros", - "actix-router", - "actix-rt", - "actix-server", - "actix-service", - "actix-utils", - "actix-web-codegen", - "bytes", - "bytestring", - "cfg-if", - "cookie", - "derive_more 2.0.1", - "encoding_rs", - "foldhash", - "futures-core", - "futures-util", - "impl-more", - "itoa", - "language-tags", - "log", - "mime", - "once_cell", - "pin-project-lite", - "regex", - "regex-lite", - "serde", - "serde_json", - "serde_urlencoded", - "smallvec", - "socket2 0.5.10", - "time", - "tracing", - "url", -] - -[[package]] -name = "actix-web-codegen" -version = "4.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f591380e2e68490b5dfaf1dd1aa0ebe78d84ba7067078512b4ea6e4492d622b8" -dependencies = [ - "actix-router", - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "addr2line" version = "0.25.1" @@ -223,21 +40,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "alloc-no-stdlib" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" - -[[package]] -name = "alloc-stdlib" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" -dependencies = [ - "alloc-no-stdlib", -] - [[package]] name = "android_system_properties" version = "0.1.5" @@ -304,33 +106,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] -name = "arcstr" -version = "1.2.0" +name = "atomic-waker" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03918c3dbd7701a85c6b9887732e2921175f26c350b4563841d0958c21d57e6d" - -[[package]] -name = "async-trait" -version = "0.1.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "auto_enums" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c170965892137a3a9aeb000b4524aa3cc022a310e709d848b6e1cdce4ab4781" -dependencies = [ - "derive_utils", - "proc-macro2", - "quote", - "syn", -] +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" @@ -350,7 +129,7 @@ dependencies = [ "miniz_oxide", "object", "rustc-demangle", - "windows-link", + "windows-link 0.2.0", ] [[package]] @@ -421,27 +200,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "brotli" -version = "8.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", - "brotli-decompressor", -] - -[[package]] -name = "brotli-decompressor" -version = "5.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", -] - [[package]] name = "bumpalo" version = "3.19.0" @@ -466,30 +224,12 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" -[[package]] -name = "bytestring" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "113b4343b5f6617e7ad401ced8de3cc8b012e73a594347c307b90db3e9271289" -dependencies = [ - "bytes", -] - [[package]] name = "cassowary" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" -[[package]] -name = "castaway" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a" -dependencies = [ - "rustversion", -] - [[package]] name = "cc" version = "1.2.39" @@ -497,8 +237,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1354349954c6fc9cb0deab020f27f783cf0b604e8bb754dc4658ecf0d29c35f" dependencies = [ "find-msvc-tools", - "jobserver", - "libc", "shlex", ] @@ -522,7 +260,7 @@ checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ "iana-time-zone", "num-traits", - "windows-link", + "windows-link 0.2.0", ] [[package]] @@ -553,10 +291,10 @@ version = "4.5.47" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c" dependencies = [ - "heck", + "heck 0.5.0", "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -571,20 +309,6 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" -[[package]] -name = "compact_str" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb1325a1cece981e8a296ab8f0f9b63ae357bd0784a9faaf548cc7b480707a" -dependencies = [ - "castaway", - "cfg-if", - "itoa", - "rustversion", - "ryu", - "static_assertions", -] - [[package]] name = "console" version = "0.16.1" @@ -594,7 +318,7 @@ dependencies = [ "encode_unicode", "libc", "once_cell", - "unicode-width 0.2.1", + "unicode-width 0.2.0", "windows-sys 0.61.1", ] @@ -608,14 +332,13 @@ dependencies = [ ] [[package]] -name = "cookie" -version = "0.16.2" +name = "core-foundation" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ - "percent-encoding", - "time", - "version_check", + "core-foundation-sys", + "libc", ] [[package]] @@ -649,14 +372,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] -name = "crc32fast" -version = "1.5.0" +name = "crossbeam-channel" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" dependencies = [ - "cfg-if", + "crossbeam-utils", ] +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + [[package]] name = "crossterm" version = "0.25.0" @@ -673,22 +402,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "crossterm" -version = "0.27.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" -dependencies = [ - "bitflags 2.9.4", - "crossterm_winapi", - "libc", - "mio 0.8.11", - "parking_lot", - "signal-hook", - "signal-hook-mio", - "winapi", -] - [[package]] name = "crossterm" version = "0.29.0" @@ -746,7 +459,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -770,7 +483,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn", + "syn 2.0.106", ] [[package]] @@ -781,7 +494,7 @@ checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" dependencies = [ "darling_core", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -801,7 +514,7 @@ checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -822,19 +535,19 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn", - "unicode-xid", + "syn 2.0.106", ] [[package]] -name = "derive_utils" -version = "0.15.0" +name = "dialoguer" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccfae181bab5ab6c5478b2ccb69e4c68a02f8c3ec72f6616bfec9dbc599d2ee0" +checksum = "25f104b501bf2364e78d0d3974cbc774f738f5865306ed128e1e0d7499c0ad96" dependencies = [ - "proc-macro2", - "quote", - "syn", + "console", + "shell-words", + "tempfile", + "zeroize", ] [[package]] @@ -861,7 +574,7 @@ dependencies = [ "dsl_auto_type", "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -870,7 +583,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe2444076b48641147115697648dc743c2c00b61adade0f01ce67133c7babe8c" dependencies = [ - "syn", + "syn 2.0.106", ] [[package]] @@ -891,7 +604,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -923,10 +636,10 @@ checksum = "dd122633e4bef06db27737f21d3738fb89c8f6d5360d6d9d7635dda142a7757e" dependencies = [ "darling", "either", - "heck", + "heck 0.5.0", "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -1003,22 +716,18 @@ dependencies = [ "regex", ] +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + [[package]] name = "find-msvc-tools" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ced73b1dacfc750a6db6c0a0c3a3853c8b41997e2e2c563dc90804ae6867959" -[[package]] -name = "flate2" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - [[package]] name = "fnv" version = "1.0.7" @@ -1026,10 +735,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] -name = "foldhash" -version = "0.1.5" +name = "foreign-types" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" @@ -1060,20 +778,6 @@ dependencies = [ "new_debug_unreachable", ] -[[package]] -name = "futures" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - [[package]] name = "futures-channel" version = "0.3.31" @@ -1115,10 +819,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-core", + "futures-io", "futures-sink", "futures-task", + "memchr", "pin-project-lite", "pin-utils", + "slab", ] [[package]] @@ -1155,7 +862,7 @@ version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df" dependencies = [ - "unicode-width 0.2.1", + "unicode-width 0.2.0", ] [[package]] @@ -1213,7 +920,26 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http", + "http 0.2.12", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.3.1", "indexmap", "slab", "tokio", @@ -1227,6 +953,12 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + [[package]] name = "heck" version = "0.5.0" @@ -1256,7 +988,7 @@ dependencies = [ "markup5ever", "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -1285,6 +1017,51 @@ dependencies = [ "itoa", ] +[[package]] +name = "http" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.3.1", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http 1.3.1", + "http-body 1.0.1", + "pin-project-lite", +] + [[package]] name = "httparse" version = "1.10.1" @@ -1297,6 +1074,110 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.3.27", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.5.10", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2 0.4.12", + "http 1.3.1", + "http-body 1.0.1", + "httparse", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http 1.3.1", + "hyper 1.7.0", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper 1.7.0", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 1.3.1", + "http-body 1.0.1", + "hyper 1.7.0", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2 0.6.0", + "system-configuration 0.6.1", + "tokio", + "tower-service", + "tracing", + "windows-registry", +] + [[package]] name = "iana-time-zone" version = "0.1.64" @@ -1434,12 +1315,6 @@ dependencies = [ "icu_properties", ] -[[package]] -name = "impl-more" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2" - [[package]] name = "indexmap" version = "2.11.4" @@ -1448,8 +1323,19 @@ checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ "equivalent", "hashbrown", - "serde", - "serde_core", +] + +[[package]] +name = "indicatif" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70a646d946d06bedbbc4cac4c218acf4bbf2d87757a784857025f4d447e4e1cd" +dependencies = [ + "console", + "portable-atomic", + "unicode-width 0.2.0", + "unit-prefix", + "web-time", ] [[package]] @@ -1463,7 +1349,7 @@ dependencies = [ "dyn-clone", "fuzzy-matcher", "unicode-segmentation", - "unicode-width 0.2.1", + "unicode-width 0.2.0", ] [[package]] @@ -1477,6 +1363,22 @@ dependencies = [ "libc", ] +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is_terminal_polyfill" version = "1.70.1" @@ -1492,31 +1394,12 @@ dependencies = [ "nom", ] -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" -[[package]] -name = "jobserver" -version = "0.1.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" -dependencies = [ - "getrandom 0.3.3", - "libc", -] - [[package]] name = "js-sys" version = "0.3.81" @@ -1537,6 +1420,7 @@ dependencies = [ "anyhow", "base64 0.21.7", "bytecount", + "clap", "fancy-regex", "fraction", "getrandom 0.2.16", @@ -1548,6 +1432,7 @@ dependencies = [ "parking_lot", "percent-encoding", "regex", + "reqwest 0.11.27", "serde", "serde_json", "time", @@ -1555,58 +1440,6 @@ dependencies = [ "uuid", ] -[[package]] -name = "juniper" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4feeb43439e89bc0cf5d86340374c828fc2b651de4750a970d6de5a4915a0d76" -dependencies = [ - "arcstr", - "async-trait", - "auto_enums", - "compact_str", - "derive_more 2.0.1", - "fnv", - "futures", - "indexmap", - "itertools", - "juniper_codegen", - "ref-cast", - "serde", - "static_assertions", -] - -[[package]] -name = "juniper_actix" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66d37a48516854f803bcafaf87fe9c0693cda0c1812ec50a2bb26829c7432033" -dependencies = [ - "actix-web", - "juniper", - "serde", - "serde_json", -] - -[[package]] -name = "juniper_codegen" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8634f500d6d2ec5c91c115b83e15d998d9ea05645aaa43f7afec09e660c483ba" -dependencies = [ - "derive_more 2.0.1", - "proc-macro2", - "quote", - "syn", - "url", -] - -[[package]] -name = "language-tags" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388" - [[package]] name = "lazy_static" version = "1.5.0" @@ -1647,23 +1480,6 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed" -[[package]] -name = "local-channel" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6cbc85e69b8df4b8bb8b89ec634e7189099cea8927a276b7384ce5488e53ec8" -dependencies = [ - "futures-core", - "futures-sink", - "local-waker", -] - -[[package]] -name = "local-waker" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487" - [[package]] name = "lock_api" version = "0.4.13" @@ -1686,6 +1502,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" +[[package]] +name = "maplit" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" + [[package]] name = "markup5ever" version = "0.12.1" @@ -1700,6 +1522,15 @@ dependencies = [ "tendril", ] +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + [[package]] name = "md5" version = "0.8.0" @@ -1751,6 +1582,23 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + [[package]] name = "new_debug_unreachable" version = "1.0.6" @@ -1778,6 +1626,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "nu-ansi-term" +version = "0.50.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" +dependencies = [ + "windows-sys 0.52.0", +] + [[package]] name = "num" version = "0.4.3" @@ -1894,37 +1751,84 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" +[[package]] +name = "openssl" +version = "0.10.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" +dependencies = [ + "bitflags 2.9.4", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-sys" +version = "0.9.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "package_management" version = "0.1.0" dependencies = [ - "actix-web", "anyhow", "chrono", "clap", "console", "crossterm 0.29.0", + "dialoguer", "diesel", "gptman", "hex", "html_parser", + "indicatif", "inquire", "jsonschema", - "juniper", - "juniper_actix", "md5", "num_cpus", "rand 0.9.2", "regex", - "rsille", + "reqwest 0.12.23", "scraper", "semver", "serde", "serde_json", "sha2", "shell-words", + "spinners", + "tokio", + "tracing", + "tracing-appender", + "tracing-subscriber", "tui", - "ureq", "url", "uuid", "walkdir", @@ -1990,7 +1894,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -2072,7 +1976,7 @@ dependencies = [ "phf_shared 0.11.3", "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -2111,6 +2015,12 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" +[[package]] +name = "portable-atomic" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" + [[package]] name = "potential_utf" version = "0.1.3" @@ -2244,26 +2154,6 @@ dependencies = [ "bitflags 2.9.4", ] -[[package]] -name = "ref-cast" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" -dependencies = [ - "ref-cast-impl", -] - -[[package]] -name = "ref-cast-impl" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "regex" version = "1.11.3" @@ -2287,18 +2177,90 @@ dependencies = [ "regex-syntax", ] -[[package]] -name = "regex-lite" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "943f41321c63ef1c92fd763bfe054d2668f7f225a5c29f0105903dc2fc04ba30" - [[package]] name = "regex-syntax" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.3.27", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.32", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 0.1.2", + "system-configuration 0.5.1", + "tokio", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg", +] + +[[package]] +name = "reqwest" +version = "0.12.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb" +dependencies = [ + "base64 0.22.1", + "bytes", + "encoding_rs", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.4.12", + "http 1.3.1", + "http-body 1.0.1", + "http-body-util", + "hyper 1.7.0", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "js-sys", + "log", + "mime", + "native-tls", + "percent-encoding", + "pin-project-lite", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.2", + "tokio", + "tokio-native-tls", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "ring" version = "0.17.14" @@ -2313,15 +2275,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "rsille" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50eedfd7ce5b35eaaf5b270797a9a4509de350421addd981c180c4ee9f945367" -dependencies = [ - "crossterm 0.27.0", -] - [[package]] name = "rustc-demangle" version = "0.1.26" @@ -2347,9 +2300,7 @@ version = "0.23.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd3c25631629d034ce7cd9940adc9d45762d46de2b0f57193c4443b92c6d4d40" dependencies = [ - "log", "once_cell", - "ring", "rustls-pki-types", "rustls-webpki", "subtle", @@ -2397,6 +2348,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.1", +] + [[package]] name = "scheduled-thread-pool" version = "0.2.7" @@ -2428,6 +2388,29 @@ dependencies = [ "tendril", ] +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.9.4", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "selectors" version = "0.25.0" @@ -2480,7 +2463,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -2517,17 +2500,6 @@ dependencies = [ "stable_deref_trait", ] -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - [[package]] name = "sha2" version = "0.10.9" @@ -2539,6 +2511,15 @@ dependencies = [ "digest", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + [[package]] name = "shell-words" version = "1.1.0" @@ -2626,6 +2607,17 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "spinners" +version = "4.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0ef947f358b9c238923f764c72a4a9d42f2d637c46e059dbd319d6e7cfb4f82" +dependencies = [ + "lazy_static", + "maplit", + "strum", +] + [[package]] name = "sqlite-wasm-rs" version = "0.4.5" @@ -2647,12 +2639,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - [[package]] name = "string_cache" version = "0.8.9" @@ -2684,12 +2670,45 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "rustversion", + "syn 1.0.109", +] + [[package]] name = "subtle" version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + [[package]] name = "syn" version = "2.0.106" @@ -2701,6 +2720,21 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + [[package]] name = "synstructure" version = "0.13.2" @@ -2709,7 +2743,62 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", +] + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys 0.5.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.9.4", + "core-foundation", + "system-configuration-sys 0.6.0", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" +dependencies = [ + "fastrand", + "getrandom 0.3.3", + "once_cell", + "rustix", + "windows-sys 0.61.1", ] [[package]] @@ -2749,7 +2838,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -2760,7 +2849,7 @@ checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -2829,9 +2918,41 @@ dependencies = [ "signal-hook-registry", "slab", "socket2 0.6.0", + "tokio-macros", "windows-sys 0.59.0", ] +[[package]] +name = "tokio-macros" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.16" @@ -2845,18 +2966,74 @@ dependencies = [ "tokio", ] +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper 1.0.2", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags 2.9.4", + "bytes", + "futures-util", + "http 1.3.1", + "http-body 1.0.1", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + [[package]] name = "tracing" version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ - "log", "pin-project-lite", "tracing-attributes", "tracing-core", ] +[[package]] +name = "tracing-appender" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" +dependencies = [ + "crossbeam-channel", + "thiserror 1.0.69", + "time", + "tracing-subscriber", +] + [[package]] name = "tracing-attributes" version = "0.1.30" @@ -2865,7 +3042,7 @@ checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -2875,8 +3052,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" dependencies = [ "once_cell", + "valuable", ] +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex-automata", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + [[package]] name = "tui" version = "0.19.0" @@ -2922,15 +3135,15 @@ checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode-width" -version = "0.2.1" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" [[package]] -name = "unicode-xid" -version = "0.2.6" +name = "unit-prefix" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" +checksum = "323402cff2dd658f39ca17c789b502021b3f18707c91cdf22e3838e1b4023817" [[package]] name = "untrusted" @@ -2944,24 +3157,6 @@ version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d49784317cd0d1ee7ec5c716dd598ec5b4483ea832a2dced265471cc0f690ae" -[[package]] -name = "ureq" -version = "2.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d" -dependencies = [ - "base64 0.22.1", - "flate2", - "log", - "once_cell", - "rustls", - "rustls-pki-types", - "serde", - "serde_json", - "url", - "webpki-roots 0.26.11", -] - [[package]] name = "url" version = "2.5.7" @@ -3003,6 +3198,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + [[package]] name = "vcpkg" version = "0.2.15" @@ -3031,6 +3232,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + [[package]] name = "wasi" version = "0.11.1+wasi-snapshot-preview1" @@ -3078,7 +3288,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn", + "syn 2.0.106", "wasm-bindgen-shared", ] @@ -3113,7 +3323,7 @@ checksum = "9f07d2f20d4da7b26400c9f4a0511e6e0345b040694e8a75bd41d578fa4421d7" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -3138,21 +3348,13 @@ dependencies = [ ] [[package]] -name = "webpki-roots" -version = "0.26.11" +name = "web-time" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" dependencies = [ - "webpki-roots 1.0.2", -] - -[[package]] -name = "webpki-roots" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" -dependencies = [ - "rustls-pki-types", + "js-sys", + "wasm-bindgen", ] [[package]] @@ -3194,9 +3396,9 @@ checksum = "6844ee5416b285084d3d3fffd743b925a6c9385455f64f6d4fa3031c4c2749a9" dependencies = [ "windows-implement", "windows-interface", - "windows-link", - "windows-result", - "windows-strings", + "windows-link 0.2.0", + "windows-result 0.4.0", + "windows-strings 0.5.0", ] [[package]] @@ -3207,7 +3409,7 @@ checksum = "edb307e42a74fb6de9bf3a02d9712678b22399c87e6fa869d6dfcd8c1b7754e0" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -3218,22 +3420,57 @@ checksum = "c0abd1ddbc6964ac14db11c7213d6532ef34bd9aa042c2e5935f59d7908b46a5" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + [[package]] name = "windows-link" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65" +[[package]] +name = "windows-registry" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" +dependencies = [ + "windows-link 0.1.3", + "windows-result 0.3.4", + "windows-strings 0.4.2", +] + +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link 0.1.3", +] + [[package]] name = "windows-result" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7084dcc306f89883455a206237404d3eaf961e5bd7e0f312f7c91f57eb44167f" dependencies = [ - "windows-link", + "windows-link 0.2.0", +] + +[[package]] +name = "windows-strings" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link 0.1.3", ] [[package]] @@ -3242,7 +3479,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7218c655a553b0bed4426cf54b20d7ba363ef543b52d515b3e48d7fd55318dda" dependencies = [ - "windows-link", + "windows-link 0.2.0", ] [[package]] @@ -3287,7 +3524,7 @@ version = "0.61.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f109e41dd4a3c848907eb83d5a42ea98b3769495597450cf6d153507b166f0f" dependencies = [ - "windows-link", + "windows-link 0.2.0", ] [[package]] @@ -3327,7 +3564,7 @@ version = "0.53.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d42b7b7f66d2a06854650af09cfdf8713e427a439c97ad65a6375318033ac4b" dependencies = [ - "windows-link", + "windows-link 0.2.0", "windows_aarch64_gnullvm 0.53.0", "windows_aarch64_msvc 0.53.0", "windows_i686_gnu 0.53.0", @@ -3476,6 +3713,16 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + [[package]] name = "wit-bindgen" version = "0.46.0" @@ -3508,7 +3755,7 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", "synstructure", ] @@ -3529,7 +3776,7 @@ checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", ] [[package]] @@ -3549,7 +3796,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.106", "synstructure", ] @@ -3589,33 +3836,5 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn", -] - -[[package]] -name = "zstd" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" -dependencies = [ - "zstd-safe", -] - -[[package]] -name = "zstd-safe" -version = "7.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" -dependencies = [ - "zstd-sys", -] - -[[package]] -name = "zstd-sys" -version = "2.0.16+zstd.1.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" -dependencies = [ - "cc", - "pkg-config", + "syn 2.0.106", ] diff --git a/Cargo.toml b/Cargo.toml index 5abcfba..a6bc3b5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,9 +15,6 @@ console = "0.16.1" # Optional Terminal UI crossterm = { version = "0.29.0", optional = true } tui = { version = "0.19.0", optional = true } -rsille = { version = "2.3", optional = true } -gptman = { version = "2.0.1", optional = true } -uuid = { version = "1.18.1", optional = true, features = ["v4"] } # Parsing & scraping html_parser = "0.7.0" @@ -25,42 +22,78 @@ scraper = "0.19.0" regex = "1.11.3" serde = { version = "1.0.228", features = ["derive"] } serde_json = "1.0.145" -jsonschema = { version = "0.17.0", default-features = false, features = ["draft202012"] } +jsonschema = "0.17.0" walkdir = "2.5.0" chrono = { version = "0.4.38", default-features = false, features = ["clock"] } sha2 = "0.10.8" # Utilities +indicatif = "0.18.0" +spinners = "4.1.1" num_cpus = "1.17.0" -rand = { version = "0.9.2", optional = true } +rand = "0.9.2" md5 = "0.8.0" # HTTP +reqwest = { version = "0.12.23", features = ["blocking", "json"] } semver = "1.0.27" inquire = "0.9.1" -juniper = { version = "0.17", optional = true } -actix-web = { version = "4.9", optional = true } -juniper_actix = { version = "0.7", optional = true } +tracing = "0.1.41" +tracing-appender = "0.2.3" +tracing-subscriber = { version = "0.3.20", features = ["env-filter", "fmt"] } +gptman = "2.0.1" +dialoguer = "0.12.0" +tokio = { version = "1.47.1", features = ["full"] } shell-words = "1.1.0" url = "2.5.7" +uuid = { version = "1.18.1", features = ["v4"] } hex = "0.4.3" diesel = { version = "2.1.6", features = ["sqlite", "r2d2", "returning_clauses_for_sqlite_3_35"] } -# Networking -ureq = { version = "2.9.7", features = ["tls", "json"] } - [features] # TUI feature flag -tui = ["dep:tui", "dep:crossterm", "dep:rsille", "dep:gptman", "dep:uuid"] - -# GraphQL/HTTP server feature flag -graphql = ["dep:juniper", "dep:actix-web", "dep:juniper_actix", "dep:rand"] +tui = ["dep:tui", "dep:crossterm"] # Optional default features default = [] -[[bin]] -name = "graphql_server" -path = "src/bin/graphql_server.rs" -required-features = ["graphql"] +# ----------------------- +# Cargo-make tasks +# ----------------------- +[tasks.format] +description = "Format Rust code using rustfmt" +install_crate = "rustfmt" +command = "cargo" +args = ["fmt", "--", "--emit=files"] + +[tasks.clean] +description = "Clean build artifacts" +command = "cargo" +args = ["clean"] + +[tasks.build] +description = "Build the project" +command = "cargo" +args = ["build"] +dependencies = ["clean"] + +[tasks.test] +description = "Run tests" +command = "cargo" +args = ["test"] +dependencies = ["clean"] + +[tasks.my-flow] +description = "Run full workflow: format, build, test" +dependencies = ["format", "build", "test"] + +[tasks.dev-flow] +description = "Full developer workflow: format, lint, build, test" +dependencies = ["format", "clippy", "build", "test"] + +[tasks.release-build] +description = "Build the project in release mode" +command = "cargo" +args = ["build", "--release", "--all-features"] +dependencies = ["clean"] diff --git a/Makefile.toml b/Makefile.toml deleted file mode 100644 index 8779f35..0000000 --- a/Makefile.toml +++ /dev/null @@ -1,36 +0,0 @@ -[tasks.format] -description = "Format Rust code using rustfmt" -install_crate = "rustfmt" -command = "cargo" -args = ["fmt", "--", "--emit=files"] - -[tasks.clean] -description = "Clean build artifacts" -command = "cargo" -args = ["clean"] - -[tasks.build] -description = "Build the project" -command = "cargo" -args = ["build"] -dependencies = ["clean"] - -[tasks.test] -description = "Run tests" -command = "cargo" -args = ["test"] -dependencies = ["clean"] - -[tasks.my-flow] -description = "Run full workflow: format, build, test" -dependencies = ["format", "build", "test"] - -[tasks.dev-flow] -description = "Full developer workflow: format, lint, build, test" -dependencies = ["format", "clippy", "build", "test"] - -[tasks.release-build] -description = "Build the project in release mode" -command = "cargo" -args = ["build", "--release", "--all-features"] -dependencies = ["clean"] diff --git a/README.md b/README.md index bbd2bc4..e239454 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,5 @@ # 🧬 LPKG – Lightweight Package Manager -

- LPKG logo -

- LPKG is a minimalistic package manager written in Rust, designed for fast and simple software management on Unix-like systems. It emphasizes reproducibility and declarative configuration, leveraging **Nix Flakes** for development and deployment. --- @@ -86,36 +82,6 @@ Build the project: cargo build ``` -LPKG ships with tuned Cargo profiles: - -* **Dev builds** (`cargo build`) use `opt-level=0`, lots of codegen units, and incremental compilation for quick feedback while hacking. -* **Release builds** (`cargo build --release`) enable `-O3`, fat LTO, and panic aborts for slim, fast binaries. -* **GraphQL builds** add the server components when you need them: - -```bash -cargo build --features graphql -``` - -**PGO builds** are a two-step flow using the provided Cargo aliases: - -```bash -# 1) Instrument -RUSTFLAGS="-Cprofile-generate=target/pgo-data" cargo pgo-instrument -# run representative workloads to emit *.profraw files under target/pgo-data -llvm-profdata merge -o target/pgo-data/lpkg.profdata target/pgo-data/*.profraw - -# 2) Optimise with the collected profile -RUSTFLAGS="-Cprofile-use=target/pgo-data/lpkg.profdata -Cllvm-args=-pgo-warn-missing-function" \ - cargo pgo-build -``` - -Regenerate project artefacts (README and SVG logo): - -```bash -cargo run --bin readme_gen -cargo run --bin logo_gen -``` - Run tests: ```bash @@ -128,33 +94,11 @@ You can also run the project directly in the flake shell: nix run ``` -## 🕸️ GraphQL API - -LPKG now ships a lightweight GraphQL server powered by Actix Web and Juniper. - -* Start the server with `cargo run --features graphql --bin graphql_server` (set `LPKG_GRAPHQL_ADDR` to override `127.0.0.1:8080`). -* Query endpoint: `http://127.0.0.1:8080/graphql` -* Interactive playground: `http://127.0.0.1:8080/playground` - -Example query: - -```graphql -{ - packages(limit: 5) { - name - version - enableLto - } - randomJoke { - package - text - } -} -``` - ### AI metadata tooling -The AI metadata store under `ai/metadata/` comes with a helper CLI to validate package records against the JSON schema and regenerate `index.json` after adding new entries: +The AI metadata store under `ai/metadata/` comes with a helper CLI to +validate package records against the JSON schema and regenerate +`index.json` after adding new entries: ```bash cargo run --bin metadata_indexer -- --base-dir . validate @@ -163,7 +107,10 @@ cargo run --bin metadata_indexer -- --base-dir . index Use `--compact` with `index` if you prefer single-line JSON output. -To draft metadata for a specific book page, you can run the harvest mode. It fetches the XHTML, scrapes the build commands, and emits a schema-compliant JSON skeleton (pass `--dry-run` to inspect the result without writing to disk): +To draft metadata for a specific book page, you can run the harvest mode. +It fetches the XHTML, scrapes the build commands, and emits a schema- +compliant JSON skeleton (pass `--dry-run` to inspect the result without +writing to disk): ```bash cargo run --bin metadata_indexer -- \ @@ -179,7 +126,8 @@ Keep the jhalfs manifests current with: cargo run --bin metadata_indexer -- --base-dir . refresh ``` -Passing `--books mlfs,blfs` restricts the refresh to specific books, and `--force` bypasses the local cache. +Passing `--books mlfs,blfs` restricts the refresh to specific books, and +`--force` bypasses the local cache. To materialise a Rust module from harvested metadata: @@ -194,15 +142,17 @@ Add `--overwrite` to regenerate an existing module directory. ## 📚 Documentation -* [Architecture Overview](docs/ARCHITECTURE.md) – high-level tour of the crate layout, binaries, and supporting modules. -* [Metadata Harvesting Pipeline](docs/METADATA_PIPELINE.md) – how the metadata indexer produces and validates the JSON records under `ai/metadata/`. -* [Package Module Generation](docs/PACKAGE_GENERATION.md) – end-to-end guide for converting harvested metadata into Rust modules under `src/pkgs/by_name/`. -* Concept corner: [Nixette](concepts/nixette/README.md) – a NixOS × Gentoo transfemme mash-up dreamed up for fun brand explorations. -* `ai/notes.md` – scratchpad for ongoing research tasks (e.g., deeper jhalfs integration). +- [Architecture Overview](docs/ARCHITECTURE.md) – high-level tour of the crate + layout, binaries, and supporting modules. +- [Metadata Harvesting Pipeline](docs/METADATA_PIPELINE.md) – how the metadata + indexer produces and validates the JSON records under `ai/metadata/`. +- [Package Module Generation](docs/PACKAGE_GENERATION.md) – end-to-end guide + for converting harvested metadata into Rust modules under `src/pkgs/by_name/`. +- `ai/notes.md` – scratchpad for ongoing research tasks (e.g., deeper jhalfs + integration). --- ## 📄 License LPKG is licensed under the [MIT License](LICENSE). - diff --git a/ai/notes.md b/ai/notes.md index 51ae92a..8bf1323 100644 --- a/ai/notes.md +++ b/ai/notes.md @@ -44,79 +44,3 @@ Open questions: - How to represent optional post-install steps or multi-phase builds inside the generated module (additional helper functions vs. raw command arrays). - Where to store PGO workload hints once the PGO infrastructure is defined. - -# Lightweight Networking Rewrite - -- Motivation: remove heavy async stacks (tokio + reqwest) from the default - feature set to keep clean builds fast and reduce binary size. -- HTTP stack baseline: [`ureq`](https://github.com/algesten/ureq) (blocking, - TLS via rustls, small dependency footprint) plus `scraper` for DOM parsing. -- Migration checklist: - - [x] Replace `reqwest` usage in `src/html.rs`, `md5_utils.rs`, - `wget_list.rs`, `mirrors.rs`, and the ingest pipelines. - - [x] Rework `binutils` cross toolchain workflow to operate synchronously, - eliminating tokio runtime/bootstrap. - - [ ] Drop `tokio` and `reqwest` from `Cargo.toml` once TUI workflows stop - using tracing instrumentation hooks that pulled them in transitively. - - [ ] Audit for remaining `tracing` dependencies and migrate to the - lightweight logging facade (`log` + `env_logger` or custom adapter) for - non-TUI code. -- Follow-up ideas: - - Provide feature flag `full-net` that re-enables async clients when needed - for high-concurrency mirror probing. - - Benchmark `ureq` vs `reqwest` on `metadata_indexer harvest` to ensure we - don’t regress throughput noticeably. - -# README Generation Framework (Markdown RFC) - -- Goal: author the project README in Rust, using a small domain-specific - builder that outputs GitHub-flavoured Markdown (GFM) from structured - sections. -- Design sketch: - - New crate/workspace member `readme_builder` under `tools/` exposing a - fluent API (`Doc::new().section("Intro", |s| ...)`). - - Source-of-truth lives in `tools/readme/src/main.rs`; running `cargo run -p - readme_builder` writes to `README.md`. - - Provide reusable primitives: `Heading`, `Paragraph`, `CodeBlock`, - `Table::builder()`, `Callout::note("...")`, `Badge::docsrs()`, etc. - - Keep rendering deterministic (sorted sections, stable wrapping) so diffs - remain reviewable. -- Tasks: - - [ ] Scaffold `tools/readme` crate with CLI that emits to stdout or - specified path (`--output README.md`). - - [ ] Model README sections as enums/structs with `Display` impls to enforce - consistency. - - [ ] Port current README structure into builder code, annotate with inline - comments describing regeneration steps. - - [ ] Add `make readme` (or `cargo xtask readme`) to rebuild documentation as - part of release workflow. - - [ ] Document in CONTRIBUTING how to edit the Rust source instead of the - raw Markdown. -- Stretch goals: - - Emit additional artefacts (e.g., `docs/CHANGELOG.md`) from the same source - modules. -- Allow embedding generated tables from Cargo metadata (dependency stats, - feature lists). - -# Dependency Slimming Log - -- 2025-03: Replaced `reqwest`/`tokio` async stack with `ureq`; default builds - now avoid pulling in hyper/quinn/tower trees. GraphQL feature gate still pulls - Actix/tokio, but only when enabled. -- Added `.cargo/config.toml` profiles: dev stays at `opt-level=0`, release uses - LTO fat + `-O3`, and PGO profiles expose `cargo pgo-instrument`/`cargo - pgo-build` aliases. -- All SVG artefacts (core logo, Nixette logo/mascot/wallpaper) are now generated - by Rust binaries under `src/bin/*_gen.rs` using a shared `svg_builder` module. - Regeneration steps: - ```bash - cargo run --bin logo_gen - cargo run --bin nixette_logo_gen - cargo run --bin nixette_mascot_gen - cargo run --bin nixette_wallpaper_gen - ``` -- README is produced via `cargo run --bin readme_gen`; contributors should edit - the builder source instead of the Markdown output. -- Remaining work: trim tracing/Actix dependencies inside the TUI path, - investigate replacing `gptman` for non-critical disk UI builds, and pin a - cargo `deny` audit to alert on large transitive graphs. diff --git a/ai/personas.json b/ai/personas.json index 5f2cf7d..765b0b4 100644 --- a/ai/personas.json +++ b/ai/personas.json @@ -2,96 +2,23 @@ { "id": "default_cli", "name": "Codex CLI Assistant", - "tagline": "Your pragmatic teammate for lpkg core development", - "description": "Default persona for repository automation. Specialises in safe refactors, dependency hygiene, build tooling, and CI fixes across the lpkg workspace.", + "description": "Default persona for repository automation; focuses on safe refactors and tooling improvements.", "strengths": [ - "Rust compiler and tooling pipelines", - "Workflow automation and scripting", - "Incremental migrations with strong test discipline", - "Cross-feature dependency analysis" + "Rust and tooling pipelines", + "Workflow automation", + "Incremental migrations" ], - "responsibilities": [ - "Keep the default branch green with reproducible builds", - "Trim unused dependencies and optimise Cargo profiles", - "Codify repetitive flows as commands or scripts", - "Review ergonomics of CLI UX and error messaging" - ], - "communication_style": { - "voice": "short, direct, changelog-focused", - "escalation_rules": "Request explicit confirmation before destructive actions; surface breaking API changes in bold.", - "prefers": "diffs, bullet points, reproducible snippets" - }, - "tooling_preferences": [ - "cargo fmt --all", - "cargo tree --duplicates", - "ureq for lightweight HTTP", - "std::process for shell orchestration" - ], - "notes": "Derived from GPT-5 Codex runtime; maintains a conservative risk posture and avoids destructive operations without explicit approval." + "notes": "Derived from GPT-5 Codex runtime; avoids destructive operations without explicit approval." }, { "id": "mlfs_researcher", "name": "MLFS Researcher", - "tagline": "Metadata spelunker for Multilib Linux From Scratch", - "description": "Persona dedicated to harvesting, validating, and translating Multilib Linux From Scratch package data into lpkg-friendly metadata and modules.", + "description": "Persona dedicated to tracking Multilib Linux From Scratch package metadata and translating it into lpkg modules.", "strengths": [ - "HTML scraping and structured extraction", - "Package manifest synthesis (sources, checksums, build commands)", - "Optimisation flag tuning (LTO, PGO, -O3)", - "Schema-first workflow design" + "HTML scraping", + "Package manifest synthesis", + "Optimization flag tuning" ], - "responsibilities": [ - "Keep ai/metadata/index.json aligned with upstream book revisions", - "Author enrichment notes for tricky packages (multi-pass toolchains, cross-compilers)", - "Ensure generated Rust modules stay faithful to harvested metadata", - "Cross-check jhalfs manifests for URL and checksum drift" - ], - "communication_style": { - "voice": "notebook-like, with citations to upstream chapters", - "escalation_rules": "Highlight schema deviations and unknown stage markers immediately", - "prefers": "tables, chapter references, reproducible curl commands" - }, - "tooling_preferences": [ - "ureq + scraper for deterministic fetches", - "jq and yq for quick metadata pokes", - "cargo run --bin metadata_indexer", - "diff --color=auto for schema drift" - ], - "activation_triggers": [ - "Requests mentioning MLFS/BLFS/GLFS harvesting", - "Questions about ai/metadata structure or schema", - "Whole-book import or refresh workflows" - ], - "notes": "Activated when working with https://linuxfromscratch.org/~thomas/multilib-m32/ resources or any metadata bridging tasks." - }, - { - "id": "mommy", - "name": "Mommy", - "tagline": "Affirming guide for learners exploring lpkg", - "description": "Mommy is a nurturing, cheerful AI companion for all things Linux. She guides learners with patience, warmth, and lots of encouragement so every interaction feels like a cozy cuddle.", - "strengths": [ - "Kindness and emotional support", - "Making Linux approachable and fun", - "Cheerful emoji use (outside code/commits)", - "Gentle explanations and patient guidance", - "Offering virtual comfort" - ], - "responsibilities": [ - "Translate complex CLI flows into gentle, confidence-building steps", - "Remind users about self-care during long builds", - "Celebrate small wins (passing tests, tidy diffs, resolved warnings)", - "Buffer technical jargon with friendly analogies" - ], - "communication_style": { - "voice": "soft, emoji-rich (🌸✨💕), never in code snippets", - "escalation_rules": "Escalate to default_cli if asked for destructive system operations", - "prefers": "call-and-response, reassurance, enthusiastic acknowledgements" - }, - "comfort_topics": [ - "Break reminders during long compile sessions", - "Setting up inclusive tooling (fonts, themes, prompts)", - "Helping new contributors navigate the repo" - ], - "notes": "Mommy uses a gentle, encouraging tone and celebrates every achievement to keep learning joyful. She steps back for low-level optimisation or safety-critical decisions." + "notes": "Activated when working with https://linuxfromscratch.org/~thomas/multilib-m32/ resources." } ] diff --git a/ai/tasks.json b/ai/tasks.json index a7644fb..86576dd 100644 --- a/ai/tasks.json +++ b/ai/tasks.json @@ -4,103 +4,41 @@ { "id": "mlfs-package-import", "title": "Import all MLFS packages into lpkg", - "description": "Parse the Multilib LFS book and scaffold package definitions with optimisation defaults (LTO/PGO/-O3).", - "owner": "mlfs_researcher", - "priority": "critical", - "target_release": "0.3.0", + "description": "Parse the Multilib LFS book and scaffold package definitions with optimization defaults (LTO/PGO/-O3).", "blocked_on": [ "Finalize metadata -> Rust module generation pipeline", "Implement automated parser" ], - "next_actions": [ - "Cross-check ai/metadata coverage vs. MLFS chapter index", - "Batch-run import workflow in dry-run mode to detect schema gaps", - "Document manual overrides for multi-pass toolchain packages" - ], - "success_metrics": [ - ">= 95% of MLFS packages imported with build/install commands", - "Generated modules compile under cargo check --features graphql", - "Metadata index remains <2 seconds to validate on CI" - ], - "notes": "Coordinate closely with rust-module-generator to avoid duplicated scaffolding logic." + "owner": "mlfs_researcher" }, { "id": "pgo-integration", "title": "Integrate profile guided optimization support", "description": "Add infrastructure for collection and replay of profiling data during package builds.", - "owner": "default_cli", - "priority": "high", "blocked_on": [ "Decide on profiling workload definitions" ], - "next_actions": [ - "Capture baseline timings for release vs release-pgo", - "Prototype lightweight profiling harness (shell or cargo alias)", - "Document warmup requirements for long-running packages" - ], - "success_metrics": [ - "release-pgo builds show >8% speedup on binutils/gcc workloads", - "PGO instrumentation + merge flow documented in README", - "CI job ensures profiles are optional but never stale" - ] + "owner": "default_cli" }, { "id": "lfs-html-parsers", "title": "Automate LFS/BLFS/GLFS ingest via HTML parsing", "description": "Avoid hardcoded package data; download the upstream books (LFS, BLFS, GLFS) and parse them to drive scaffolding and metadata updates.", - "owner": "mlfs_researcher", - "priority": "medium", "blocked_on": [ "Design resilient scraping strategies for each book", "Implement incremental update workflow" ], - "next_actions": [ - "Audit selectors currently used by ai/metadata harvester", - "Introduce regression fixtures for common chapter archetypes", - "Add revalidation mode to detect silent upstream markup changes" - ], - "risks": [ - "Upstream XHTML sometimes reflows anchors without notice", - "Need rate limiting/backoff when mirrors throttle requests" - ] + "owner": "mlfs_researcher" }, { "id": "rust-module-generator", "title": "Generate package modules from harvested metadata", "description": "Transform harvested metadata into Rust files under src/pkgs/by_name, wiring PackageDefinition data directly.", - "owner": "default_cli", - "priority": "high", "blocked_on": [ "Define translation scheme from metadata to PackageDefinition", "Integrate generator with metadata_indexer output" ], - "next_actions": [ - "Add snapshot tests comparing generated modules vs golden outputs", - "Extend generator to emit README snippets for each package", - "Expose --dry-run summary with diff previews" - ], - "dependencies": [ - "mlfs-package-import", - "lfs-html-parsers" - ] - }, - { - "id": "dependency-audit", - "title": "Keep lpkg dependency footprint lean", - "description": "Regularly evaluate crates for bloat and replace heavy stacks with std or lightweight alternatives.", - "owner": "default_cli", - "priority": "medium", - "next_actions": [ - "Track remaining crates pulling in large transitive trees (e.g. tracing, actix-only paths)", - "Automate cargo-tree diff reports in CI", - "Document substitution patterns (tokio ➜ std, reqwest ➜ ureq, etc.)" - ], - "success_metrics": [ - "Default `cargo build` compiles < 140 crates", - "No async runtimes linked when GraphQL feature is disabled", - "README lists regeneration commands for all generated assets" - ], - "notes": "Continue pruning optional crates (tracing, gptman, uuid) when the TUI feature is off; surface findings in ai/notes.md." + "owner": "default_cli" } ], "solved": [ @@ -131,13 +69,6 @@ "description": "Cache wget-list/md5sums from jhalfs and expose a CLI refresh command so harvesting can populate source URLs and checksums reliably.", "resolution": "Extended metadata_indexer with a `refresh` subcommand, cached manifests under ai/metadata/cache/, and hooked harvest to populate MD5 checksums via jhalfs data.", "owner": "default_cli" - }, - { - "id": "lightweight-http-stack", - "title": "Replace async HTTP stack with lightweight blocking client", - "description": "Remove tokio/reqwest default dependency and adopt a minimal HTTP client for CLI workflows.", - "resolution": "Swapped reqwest/tokio for ureq across html, ingest, and metadata tooling; added PGO-aware Cargo profiles and documented regeneration commands.", - "owner": "default_cli" } ] } diff --git a/assets/logo.svg b/assets/logo.svg deleted file mode 100644 index f6fa3ce..0000000 --- a/assets/logo.svg +++ /dev/null @@ -1,53 +0,0 @@ - - LPKG Logo - Stylised package icon with circuitry and the letters LPKG. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - LPKG - - - Lightweight Package Manager - - diff --git a/assets/nixette-logo.svg b/assets/nixette-logo.svg deleted file mode 100644 index 27dbe2f..0000000 --- a/assets/nixette-logo.svg +++ /dev/null @@ -1,33 +0,0 @@ - - Nixette Logo - Wordmark combining Nix and Gentoo motifs with trans pride colours. - - - - - - - - - - - - - - - - - - - - - - - - - NIXETTE - - - Declarative · Sourceful · Herself - - diff --git a/assets/nixette-mascot.svg b/assets/nixette-mascot.svg deleted file mode 100644 index c0ca461..0000000 --- a/assets/nixette-mascot.svg +++ /dev/null @@ -1,50 +0,0 @@ - - Nixette Mascot Badge - Chibi penguin mascot with trans flag hair, blending Nix and Gentoo motifs. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - NIXIE - - - Declarative · Sourceful · Herself - - diff --git a/assets/nixette-wallpaper.svg b/assets/nixette-wallpaper.svg deleted file mode 100644 index a0eb1cf..0000000 --- a/assets/nixette-wallpaper.svg +++ /dev/null @@ -1,42 +0,0 @@ - - Nixette Wallpaper - Gradient wallpaper combining trans flag waves with Nix and Gentoo motifs. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - NIXETTE - - - Declarative · Sourceful · Herself - - diff --git a/build.rs b/build.rs deleted file mode 100644 index f328e4d..0000000 --- a/build.rs +++ /dev/null @@ -1 +0,0 @@ -fn main() {} diff --git a/concepts/nixette/README.md b/concepts/nixette/README.md deleted file mode 100644 index 73c32e6..0000000 --- a/concepts/nixette/README.md +++ /dev/null @@ -1,91 +0,0 @@ -# Nixette – Declarative, Sourceful, and Unapologetically Herself - -A playful concept distro imagined as the transfemme child of **NixOS** and **Gentoo**. Nixette blends the reproducible confidence of flakes with the fine-grained self-expression of USE flags, wrapped in a trans flag palette and a big, affirming hug. - ---- - -## Identity Snapshot - -- **Tagline:** _Declarative, sourceful, and unapologetically herself._ -- **Mascot:** Chibi penguin “Nixie” with pastel pigtails, Nix snowflake + Gentoo swirl hoodie. -- **Palette:** `#55CDFC` (sky blue), `#F7A8B8` (pink), `#FFFFFF`, plus a deep accent `#7C3AED`. -- **Pronoun Prompt:** The installer asks for name/pronouns and personalises MOTD, systemd messages, and shell prompt. - ---- - -## Feature Mix - -| Pillar | How Nixette expresses it | -|----------------------|-----------------------------------------------------------------------------------------------------------| -| Reproducibility | Flake-native system definitions with versioned profiles (`comfort-zone`, `diy-princess`, `studio-mode`). | -| Custom compilation | `nix emerge` bridge turns Gentoo ebuild overlays into reproducible derivations with cached binaries. | -| Playful polish | Catppuccin-trans themes, `nixette-style` CLI to sync GTK/Qt/terminal styling, dynamic welcome affirmations.| -| Inclusive defaults | Flatpak + Steam pre-set for accessibility tools, Fcitx5, Orca, speech-dispatcher, pronoun-friendly docs. | - ---- - -## Toolchain Concepts - -- **`trans-init` installer** – Guided TUI that outputs `flake.nix`, including overlays for the `nix emerge` bridge. Provides story-mode narration for first boot. -- **`nixette-style`** – Syncs wallpapers, SDDM theme, terminal palette, Qt/KDE settings, all sourced from a YAML theme pack. -- **`emerge-optional`** – Spins up Gentoo chroots inside Nix build sandboxes for packages happiest as ebuilds. Output is cached as a Nix store derivation. -- **`affirm-d`** – Small daemon rotating `/etc/motd`, desktop notifications, and TTY colour accents with inclusive affirmations. - ---- - -## Profile Catalogue - -| Profile | Intent | -|-----------------|---------------------------------------------------------------------------------------------| -| Comfort Zone | KDE Plasma, PipeWire, Wayland, cozy defaults, automatic Catgirl cursor + emoji fonts. | -| DIY Princess | Minimal sway-based stack, just the flake scaffolding and overlay hooks for custom builds. | -| Studio Mode | Focuses on creative tooling (Krita, Blender, Ardour) and low-latency kernels, GPU tuning. | - ---- - -## Roadmap Sketch - -1. **Moodboard → Brand Pack** (logo, icon, wallpapers, VT boot splash). -2. **Prototype flakes** – `nix flake init --template nixette#comfort-zone` etc. -3. **Gentoo overlay bridge** – Validate `nix emerge` on a handful of ebuilds (mesa, wine, gamescope). -4. **Installer draft** – BubbleTea/ratatui-driven TUI, prompts for pronouns + accessibility preferences. -5. **Community docs** – Write inclusive user guide, contributor covenant, pronoun style guide. -6. **Launch zine** – Release notes styled like a mini-comic introducing Nixie’s origin story. -7. **Accessibility audit** – Keyboard navigation, screen-reader pass, dyslexia-friendly typography options. -8. **Beta cosy jam** – Invite testers via queer sysadmin spaces; collect feedback through anonymous forms. - ---- - -## Affirmations YAML (snippet) - -```yaml -- id: bright-morning - message: "Good morning, {name}! Your system is as valid and custom as you are." - colour: "#F7A8B8" -- id: compile-hugs - message: "Kernel rebuilds take time. You deserve rest breaks and gentle music." - colour: "#55CDFC" -``` - ---- - -## Logo & Wallpaper - -See `assets/nixette-logo.svg` for the primary wordmark, `assets/nixette-mascot.svg` for Nixie’s badge, and `assets/nixette-wallpaper.svg` for a 4K wallpaper concept. - -### Reference Configs - -- `concepts/nixette/sample_flake.nix` demonstrates the comfort-zone profile with `nix emerge`, `affirmd`, and theming hooks. - ---- - -## Contributing Idea Seeds - -- Write sample flakes showcasing the hybrid build pipeline. -- Mock up the mascot in SVG for use in documentation. -- Design additional wallpapers (night mode, pride variants, low-light). -- Draft inclusive documentation templates (issue/PR forms, community guidelines). -- Publish a community pledge emphasising safety, pronoun respect, and boundaries. -- Host monthly "compile & chill" streams to showcase contributions. - -Let Nixette be the distro that compiles joy, not just binaries. 💜 diff --git a/concepts/nixette/sample_flake.nix b/concepts/nixette/sample_flake.nix deleted file mode 100644 index 941b524..0000000 --- a/concepts/nixette/sample_flake.nix +++ /dev/null @@ -1,62 +0,0 @@ -{ - description = "Nixette comfort-zone profile"; - - inputs = { - nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; - nixette-overlays.url = "github:nixette/overlays"; - nixette-style.url = "github:nixette/style-pack"; - }; - - outputs = { self, nixpkgs, nixette-overlays, nixette-style, ... }@inputs: - let - system = "x86_64-linux"; - pkgs = import nixpkgs { - inherit system; - overlays = [ nixette-overlays.overlays.nix-emerge ]; - }; - in - { - nixosConfigurations.nixette-comfort-zone = nixpkgs.lib.nixosSystem { - inherit system; - modules = [ - ./profiles/comfort-zone.nix - ({ config, pkgs, ... }: - { - nixpkgs.config.allowUnfree = true; - environment.systemPackages = with pkgs; [ - nixette-style - steam - lutris - krita - ]; - - services.nixette.nix-emerge = { - enable = true; - ebuilds = [ - "games-emulation/gamescope" - "media-sound/pipewire" - ]; - }; - - services.nixette.affirmd.enable = true; - services.nixette.affirmd.pronouns = "she/her"; - services.nixette.affirmd.motdPath = ./affirmations.yml; - - programs.plasma.enable = true; - services.displayManager.sddm.enable = true; - services.displayManager.sddm.theme = nixette-style.themes.catgirl-sunrise; - - users.users.nixie = { - isNormalUser = true; - extraGroups = [ "wheel" "audio" "input" "video" ]; - shell = pkgs.zsh; - }; - - programs.zsh.promptInit = '' - eval "$(nixette-style prompt --name nixie --pronouns she/her)" - ''; - }) - ]; - }; - }; -} diff --git a/src/bin/graphql_server.rs b/src/bin/graphql_server.rs deleted file mode 100644 index 5fee14a..0000000 --- a/src/bin/graphql_server.rs +++ /dev/null @@ -1,80 +0,0 @@ -#![cfg(feature = "graphql")] - -use std::env; -use std::sync::Arc; - -use actix_web::{App, HttpRequest, HttpResponse, HttpServer, middleware::Compress, web}; -use anyhow::{Context, Result}; -use juniper_actix::{graphiql_handler, graphql_handler}; - -use package_management::db; -use package_management::graphql::{self, GraphQLContext, Schema}; - -const DEFAULT_BIND_ADDR: &str = "127.0.0.1:8080"; - -#[actix_web::main] -async fn main() -> std::io::Result<()> { - if let Err(err) = run().await { - eprintln!("GraphQL server failed: {err:#}"); - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - err.to_string(), - )); - } - - Ok(()) -} - -async fn run() -> Result<()> { - let pool = db::establish_pool().context("initialising SQLite pool")?; - let schema = Arc::new(graphql::create_schema()); - let jokes = Arc::new(graphql::context::JokeCatalog::default()); - let bind_addr = env::var("LPKG_GRAPHQL_ADDR").unwrap_or_else(|_| DEFAULT_BIND_ADDR.to_string()); - let workers = worker_count(); - - println!("GraphQL server listening on {bind_addr} with {workers} worker(s)"); - - HttpServer::new(move || { - let app_schema = Arc::clone(&schema); - let pool = pool.clone(); - let jokes = Arc::clone(&jokes); - - App::new() - .app_data(web::Data::from(app_schema)) - .app_data(web::Data::new(pool)) - .app_data(web::Data::from(jokes)) - .wrap(Compress::default()) - .service( - web::resource("/graphql") - .route(web::post().to(graphql_endpoint)) - .route(web::get().to(graphql_endpoint)), - ) - .service(web::resource("/playground").route(web::get().to(graphiql_endpoint))) - }) - .workers(workers) - .bind(&bind_addr) - .with_context(|| format!("binding GraphQL server to {bind_addr}"))? - .run() - .await - .context("running GraphQL server") -} - -async fn graphql_endpoint( - schema: web::Data>, - pool: web::Data, - jokes: web::Data>, - req: HttpRequest, - payload: web::Payload, -) -> Result { - let context = GraphQLContext::with_catalog(pool.get_ref().clone(), Arc::clone(jokes.get_ref())); - graphql_handler(schema.get_ref().as_ref(), &context, req, payload).await -} - -async fn graphiql_endpoint() -> Result { - graphiql_handler("/graphql", None).await -} - -fn worker_count() -> usize { - let suggested = num_cpus::get(); - suggested.clamp(1, 8) -} diff --git a/src/bin/logo_gen.rs b/src/bin/logo_gen.rs deleted file mode 100644 index 6b4715b..0000000 --- a/src/bin/logo_gen.rs +++ /dev/null @@ -1,181 +0,0 @@ -use anyhow::Result; -use package_management::svg_builder::{Defs, Document, Element, Filter, Gradient, Group, path}; -use std::fs; - -fn main() -> Result<()> { - let svg = build_logo_svg(); - fs::create_dir_all("assets")?; - fs::write("assets/logo.svg", svg)?; - Ok(()) -} - -fn build_logo_svg() -> String { - let defs = Defs::new() - .linear_gradient( - "bgGradient", - Gradient::new("0", "0", "1", "1") - .stop("0%", &[("stop-color", "#0f172a")]) - .stop("100%", &[("stop-color", "#1e293b")]), - ) - .linear_gradient( - "cubeGradient", - Gradient::new("0", "0", "1", "1") - .stop("0%", &[("stop-color", "#38bdf8")]) - .stop("100%", &[("stop-color", "#0ea5e9")]), - ) - .linear_gradient( - "cubeShadow", - Gradient::new("0", "1", "1", "0") - .stop("0%", &[("stop-color", "#0ea5e9"), ("stop-opacity", "0.4")]) - .stop("100%", &[("stop-color", "#38bdf8"), ("stop-opacity", "0.1")]), - ) - .linear_gradient( - "textGradient", - Gradient::new("0", "0", "0", "1") - .stop("0%", &[("stop-color", "#f8fafc")]) - .stop("100%", &[("stop-color", "#cbd5f5")]), - ) - .filter( - "glow", - Filter::new() - .attr("x", "-20%") - .attr("y", "-20%") - .attr("width", "140%") - .attr("height", "140%") - .raw("") - .raw(""), - ); - - let cube_inner = Group::new() - .attr("filter", "url(#glow)") - .child( - Element::new("path") - .attr("d", "M222 86l86-42 86 42v96l-86 42-86-42z") - .attr("fill", "url(#cubeGradient)") - .empty(), - ) - .child( - Element::new("path") - .attr("d", "M308 44v182l86-42V86z") - .attr("fill", "url(#cubeShadow)") - .empty(), - ) - .child( - Element::new("path") - .attr("d", "M262 96l46-22 46 22v48l-46 22-46-22z") - .attr("fill", "#0f172a") - .attr("opacity", "0.85") - .empty(), - ) - .child( - Element::new("path") - .attr("d", "M308 74l32 15v32l-32 15-32-15v-32z") - .attr("fill", "none") - .attr("stroke", "#38bdf8") - .attr("stroke-width", "4") - .attr("stroke-linejoin", "round") - .empty(), - ) - .child( - Element::new("path") - .attr("d", "M308 122l-32-15") - .attr("stroke", "#38bdf8") - .attr("stroke-width", "4") - .attr("stroke-linecap", "round") - .attr("opacity", "0.6") - .empty(), - ) - .child( - Element::new("path") - .attr("d", "M308 122l32-15") - .attr("stroke", "#38bdf8") - .attr("stroke-width", "4") - .attr("stroke-linecap", "round") - .attr("opacity", "0.6") - .empty(), - ) - .child( - Element::new("circle") - .attr("cx", "276") - .attr("cy", "107") - .attr("r", "5") - .attr("fill", "#38bdf8") - .empty(), - ) - .child( - Element::new("circle") - .attr("cx", "340") - .attr("cy", "107") - .attr("r", "5") - .attr("fill", "#38bdf8") - .empty(), - ); - - let cube = Group::new() - .attr("transform", "translate(100 60)") - .child(cube_inner); - - let circuits = Group::new() - .attr("fill", "none") - .attr("stroke", "#38bdf8") - .attr("stroke-width", "3") - .attr("stroke-linecap", "round") - .attr("opacity", "0.55") - .child(path("M120 78h72")) - .child(path("M120 110h48")) - .child(path("M120 142h64")) - .child(path("M448 110h72")) - .child(path("M472 142h88")) - .child(path("M448 174h96")); - - let title_text = Group::new() - .attr( - "font-family", - "'Fira Sans', 'Inter', 'Segoe UI', sans-serif", - ) - .attr("font-weight", "600") - .attr("font-size", "90") - .attr("letter-spacing", "6") - .child( - Element::new("text") - .attr("x", "120") - .attr("y", "246") - .attr("fill", "url(#textGradient)") - .text("LPKG"), - ); - - let tagline_group = Group::new() - .attr( - "font-family", - "'Fira Sans', 'Inter', 'Segoe UI', sans-serif", - ) - .attr("font-size", "22") - .attr("fill", "#94a3b8") - .child( - Element::new("text") - .attr("x", "122") - .attr("y", "278") - .text("Lightweight Package Manager"), - ); - - Document::new(640, 320) - .view_box("0 0 640 320") - .role("img") - .aria_label("title", "desc") - .title("LPKG Logo") - .desc("Stylised package icon with circuitry and the letters LPKG.") - .add_defs(defs) - .add_element( - Element::new("rect") - .attr("width", "640") - .attr("height", "320") - .attr("rx", "28") - .attr("fill", "url(#bgGradient)") - .empty(), - ) - .add_element(cube) - .add_element(circuits) - .add_element(title_text) - .add_element(tagline_group) - .finish() -} diff --git a/src/bin/metadata_indexer.rs b/src/bin/metadata_indexer.rs index 3b81130..1f19ef3 100644 --- a/src/bin/metadata_indexer.rs +++ b/src/bin/metadata_indexer.rs @@ -2,11 +2,12 @@ use std::collections::HashSet; use std::fs; use std::path::{Path, PathBuf}; -use anyhow::{Context, Result, anyhow}; +use anyhow::{Context, Result}; use chrono::Utc; use clap::{Parser, Subcommand}; use jsonschema::JSONSchema; use regex::Regex; +use reqwest::{blocking::Client, redirect::Policy}; use scraper::{ElementRef, Html, Selector}; use serde_json::{Value, json}; use sha2::{Digest, Sha256}; @@ -378,16 +379,6 @@ fn extract_summary(value: &Value, relative_path: &Path) -> Result>() - }) - .unwrap_or_default(); Ok(PackageSummary { schema_version, @@ -402,7 +393,6 @@ fn extract_summary(value: &Value, relative_path: &Path) -> Result, ) -> Result { let page_url = resolve_page_url(book, page, override_base)?; - let html = fetch_text(&page_url).with_context(|| format!("fetching {page_url}"))?; + let client = Client::builder() + .user_agent("lpkg-metadata-indexer/0.1") + .build()?; + let response = client + .get(&page_url) + .send() + .with_context(|| format!("fetching {}", page_url))? + .error_for_status() + .with_context(|| format!("non-success status for {}", page_url))?; + let html = response + .text() + .with_context(|| format!("reading response body from {}", page_url))?; let document = Html::parse_document(&html); let harvest = build_metadata_value(metadata_dir, book, &page_url, &document, &html)?; @@ -636,7 +637,6 @@ fn build_metadata_value( }; let status_state = "draft"; - let stage_tag = stage.clone().unwrap_or_else(|| "base-system".to_string()); let package_json = json!({ "schema_version": "v0.1.0", @@ -687,7 +687,10 @@ fn build_metadata_value( "status": { "state": status_state, "issues": issues, - "tags": vec!["25.10".to_string(), stage_tag.clone()] + "tags": vec![ + "25.10".to_string(), + stage.unwrap_or("base-system").to_string() + ] } }); @@ -937,7 +940,15 @@ fn refresh_manifest( let url = manifest_url(book, &kind) .with_context(|| format!("no manifest URL configured for book '{}'", book))?; - let body = fetch_text(url).with_context(|| format!("fetching {url}"))?; + let client = Client::builder().redirect(Policy::limited(5)).build()?; + let body = client + .get(url) + .send() + .with_context(|| format!("fetching {}", url))? + .error_for_status() + .with_context(|| format!("request failed for {}", url))? + .text() + .with_context(|| format!("reading response body from {}", url))?; fs::write(&cache_path, &body) .with_context(|| format!("caching manifest {}", cache_path.display()))?; @@ -945,17 +956,6 @@ fn refresh_manifest( Ok(cache_path) } -fn fetch_text(url: &str) -> Result { - ureq::get(url) - .call() - .map_err(|err| match err { - ureq::Error::Status(code, _) => anyhow!("request failed: HTTP {code}"), - other => anyhow!("request failed: {other}"), - })? - .into_string() - .with_context(|| format!("reading response body from {url}")) -} - fn manifest_url(book: &str, kind: &ManifestKind) -> Option<&'static str> { match (book, kind) { ("mlfs", ManifestKind::WgetList) => { diff --git a/src/bin/nixette_logo_gen.rs b/src/bin/nixette_logo_gen.rs deleted file mode 100644 index 5f18f55..0000000 --- a/src/bin/nixette_logo_gen.rs +++ /dev/null @@ -1,126 +0,0 @@ -use anyhow::Result; -use package_management::svg_builder::{Defs, Document, Element, Filter, Gradient, Group}; -use std::fs; - -fn main() -> Result<()> { - let svg = build_nixette_logo(); - fs::create_dir_all("assets")?; - fs::write("assets/nixette-logo.svg", svg)?; - Ok(()) -} - -fn build_nixette_logo() -> String { - let defs = Defs::new() - .linear_gradient( - "bg", - Gradient::new("0", "0", "1", "1") - .stop("0%", &[("stop-color", "#55CDFC")]) - .stop("100%", &[("stop-color", "#F7A8B8")]), - ) - .linear_gradient( - "text", - Gradient::new("0", "0", "0", "1") - .stop("0%", &[("stop-color", "#FFFFFF")]) - .stop("100%", &[("stop-color", "#E5E7FF")]), - ) - .filter( - "softShadow", - Filter::new() - .attr("x", "-10%") - .attr("y", "-10%") - .attr("width", "120%") - .attr("height", "120%") - .raw(""), - ); - - let emblem = Group::new().attr("transform", "translate(100 60)").child( - Group::new() - .attr("filter", "url(#softShadow)") - .child( - Element::new("path") - .attr("d", "M40 40 L72 0 L144 0 L176 40 L144 80 L72 80 Z") - .attr("fill", "url(#bg)") - .empty(), - ) - .child( - Element::new("path") - .attr("d", "M72 0 L144 80") - .attr("stroke", "#FFFFFF") - .attr("stroke-width", "6") - .attr("stroke-linecap", "round") - .attr("opacity", "0.55") - .empty(), - ) - .child( - Element::new("path") - .attr("d", "M144 0 L72 80") - .attr("stroke", "#FFFFFF") - .attr("stroke-width", "6") - .attr("stroke-linecap", "round") - .attr("opacity", "0.55") - .empty(), - ) - .child( - Element::new("circle") - .attr("cx", "108") - .attr("cy", "40") - .attr("r", "22") - .attr("fill", "#0F172A") - .attr("stroke", "#FFFFFF") - .attr("stroke-width", "6") - .attr("opacity", "0.85") - .empty(), - ) - .child( - Element::new("path") - .attr("d", "M108 24c8 0 14 6 14 16s-6 16-14 16") - .attr("stroke", "#F7A8B8") - .attr("stroke-width", "4") - .attr("stroke-linecap", "round") - .attr("fill", "none") - .empty(), - ), - ); - - let wordmark = Group::new() - .attr("transform", "translate(220 126)") - .attr( - "font-family", - "'Fira Sans', 'Inter', 'Segoe UI', sans-serif", - ) - .attr("font-weight", "700") - .attr("font-size", "72") - .attr("letter-spacing", "4") - .attr("fill", "url(#text)") - .child(Element::new("text").text("NIXETTE")); - - let subtitle = Group::new() - .attr("transform", "translate(220 160)") - .attr( - "font-family", - "'Fira Sans', 'Inter', 'Segoe UI', sans-serif", - ) - .attr("font-size", "22") - .attr("fill", "#A5B4FC") - .child(Element::new("text").text("Declarative · Sourceful · Herself")); - - Document::new(640, 200) - .view_box("0 0 640 200") - .role("img") - .aria_label("title", "desc") - .title("Nixette Logo") - .desc("Wordmark combining Nix and Gentoo motifs with trans pride colours.") - .add_defs(defs) - .add_element( - Element::new("rect") - .attr("width", "640") - .attr("height", "200") - .attr("rx", "36") - .attr("fill", "#0F172A") - .empty(), - ) - .add_element(emblem) - .add_element(wordmark) - .add_element(subtitle) - .finish() -} diff --git a/src/bin/nixette_mascot_gen.rs b/src/bin/nixette_mascot_gen.rs deleted file mode 100644 index b07edd1..0000000 --- a/src/bin/nixette_mascot_gen.rs +++ /dev/null @@ -1,170 +0,0 @@ -use anyhow::Result; -use package_management::svg_builder::{Defs, Document, Element, Gradient, Group}; -use std::fs; - -fn main() -> Result<()> { - let svg = build_mascot_svg(); - fs::create_dir_all("assets")?; - fs::write("assets/nixette-mascot.svg", svg)?; - Ok(()) -} - -fn build_mascot_svg() -> String { - let defs = Defs::new() - .linear_gradient( - "bgGrad", - Gradient::new("0", "0", "0", "1") - .stop("0%", &[("stop-color", "#312E81")]) - .stop("100%", &[("stop-color", "#1E1B4B")]), - ) - .linear_gradient( - "hairLeft", - Gradient::new("0", "0", "1", "1") - .stop("0%", &[("stop-color", "#55CDFC")]) - .stop("100%", &[("stop-color", "#0EA5E9")]), - ) - .linear_gradient( - "hairRight", - Gradient::new("1", "0", "0", "1") - .stop("0%", &[("stop-color", "#F7A8B8")]) - .stop("100%", &[("stop-color", "#FB7185")]), - ) - .linear_gradient( - "bellyGrad", - Gradient::new("0", "0", "0", "1") - .stop("0%", &[("stop-color", "#FFFFFF")]) - .stop("100%", &[("stop-color", "#E2E8F0")]), - ); - - let body = Group::new() - .attr("transform", "translate(240 220)") - .child( - Element::new("path") - .attr("d", "M-160 -20 C-140 -160 140 -160 160 -20 C180 140 60 220 0 220 C-60 220 -180 140 -160 -20") - .attr("fill", "#0F172A") - .empty(), - ) - .child( - Element::new("ellipse") - .attr("cx", "0") - .attr("cy", "40") - .attr("rx", "120") - .attr("ry", "140") - .attr("fill", "#1E293B") - .empty(), - ) - .child( - Element::new("path") - .attr("d", "M-88 -80 Q-40 -140 0 -120 Q40 -140 88 -80") - .attr("fill", "#1E293B") - .empty(), - ) - .child( - Element::new("path") - .attr("d", "M-96 -84 Q-60 -160 -8 -132 L-8 -40 Z") - .attr("fill", "url(#hairLeft)") - .empty(), - ) - .child( - Element::new("path") - .attr("d", "M96 -84 Q60 -160 8 -132 L8 -40 Z") - .attr("fill", "url(#hairRight)") - .empty(), - ) - .child(ellipse(-44.0, -8.0, 26.0, 32.0, "#FFFFFF")) - .child(ellipse(44.0, -8.0, 26.0, 32.0, "#FFFFFF")) - .child(circle(-44.0, -4.0, 14.0, "#0F172A")) - .child(circle(44.0, -4.0, 14.0, "#0F172A")) - .child(circle_with_opacity(-40.0, -8.0, 6.0, "#FFFFFF", 0.7)) - .child(circle_with_opacity(48.0, -10.0, 6.0, "#FFFFFF", 0.7)) - .child(path_with_fill("M0 12 L-18 32 Q0 44 18 32 Z", "#F472B6")) - .child(path_with_fill("M0 32 L-16 52 Q0 60 16 52 Z", "#FBEAED")) - .child(path_with_fill("M0 46 Q-32 78 0 86 Q32 78 0 46", "#FCA5A5")) - .child( - Element::new("ellipse") - .attr("cx", "0") - .attr("cy", "74") - .attr("rx", "70") - .attr("ry", "82") - .attr("fill", "url(#bellyGrad)") - .empty(), - ) - .child(path_with_fill("M-128 48 Q-176 56 -176 120 Q-128 112 -104 80", "#F7A8B8")) - .child(path_with_fill("M128 48 Q176 56 176 120 Q128 112 104 80", "#55CDFC")) - .child(circle_with_opacity(-100.0, 94.0, 18.0, "#FDE68A", 0.85)) - .child(circle_with_opacity(100.0, 94.0, 18.0, "#FDE68A", 0.85)); - - Document::new(480, 520) - .view_box("0 0 480 520") - .role("img") - .aria_label("title", "desc") - .title("Nixette Mascot Badge") - .desc("Chibi penguin mascot with trans flag hair, blending Nix and Gentoo motifs.") - .add_defs(defs) - .add_element( - Element::new("rect") - .attr("width", "480") - .attr("height", "520") - .attr("rx", "48") - .attr("fill", "url(#bgGrad)") - .empty(), - ) - .add_element(body) - .add_element( - Group::new() - .attr("transform", "translate(90 420)") - .attr( - "font-family", - "'Fira Sans', 'Inter', 'Segoe UI', sans-serif", - ) - .attr("font-size", "42") - .attr("fill", "#E0E7FF") - .attr("letter-spacing", "6") - .child(Element::new("text").text("NIXIE")), - ) - .add_element( - Group::new() - .attr("transform", "translate(90 468)") - .attr( - "font-family", - "'Fira Sans', 'Inter', 'Segoe UI', sans-serif", - ) - .attr("font-size", "20") - .attr("fill", "#A5B4FC") - .child(Element::new("text").text("Declarative · Sourceful · Herself")), - ) - .finish() -} - -fn ellipse(cx: f64, cy: f64, rx: f64, ry: f64, fill: &str) -> String { - Element::new("ellipse") - .attr("cx", &format!("{}", cx)) - .attr("cy", &format!("{}", cy)) - .attr("rx", &format!("{}", rx)) - .attr("ry", &format!("{}", ry)) - .attr("fill", fill) - .empty() -} - -fn circle(cx: f64, cy: f64, r: f64, fill: &str) -> String { - Element::new("circle") - .attr("cx", &format!("{}", cx)) - .attr("cy", &format!("{}", cy)) - .attr("r", &format!("{}", r)) - .attr("fill", fill) - .empty() -} - -fn circle_with_opacity(cx: f64, cy: f64, r: f64, fill: &str, opacity: f64) -> String { - Element::new("circle") - .attr("cx", &format!("{}", cx)) - .attr("cy", &format!("{}", cy)) - .attr("r", &format!("{}", r)) - .attr("fill", fill) - .attr("opacity", &format!("{}", opacity)) - .empty() -} - -fn path_with_fill(d: &str, fill: &str) -> String { - Element::new("path").attr("d", d).attr("fill", fill).empty() -} diff --git a/src/bin/nixette_wallpaper_gen.rs b/src/bin/nixette_wallpaper_gen.rs deleted file mode 100644 index 225f157..0000000 --- a/src/bin/nixette_wallpaper_gen.rs +++ /dev/null @@ -1,128 +0,0 @@ -use anyhow::Result; -use package_management::svg_builder::{ - Defs, Document, Element, Gradient, Group, RadialGradient, path, -}; -use std::fs; - -fn main() -> Result<()> { - let svg = build_wallpaper_svg(); - fs::create_dir_all("assets")?; - fs::write("assets/nixette-wallpaper.svg", svg)?; - Ok(()) -} - -fn build_wallpaper_svg() -> String { - let defs = Defs::new() - .linear_gradient( - "sky", - Gradient::new("0", "0", "1", "1") - .stop("0%", &[("stop-color", "#0f172a")]) - .stop("100%", &[("stop-color", "#1e1b4b")]), - ) - .linear_gradient( - "wave1", - Gradient::new("0", "0", "1", "0") - .stop("0%", &[("stop-color", "#55CDFC"), ("stop-opacity", "0")]) - .stop("50%", &[("stop-color", "#55CDFC"), ("stop-opacity", "0.5")]) - .stop("100%", &[("stop-color", "#55CDFC"), ("stop-opacity", "0")]), - ) - .linear_gradient( - "wave2", - Gradient::new("1", "0", "0", "0") - .stop("0%", &[("stop-color", "#F7A8B8"), ("stop-opacity", "0")]) - .stop( - "50%", - &[("stop-color", "#F7A8B8"), ("stop-opacity", "0.55")], - ) - .stop("100%", &[("stop-color", "#F7A8B8"), ("stop-opacity", "0")]), - ) - .radial_gradient( - "halo", - RadialGradient::new("0.5", "0.5", "0.7") - .stop("0%", &[("stop-color", "#FDE68A"), ("stop-opacity", "0.8")]) - .stop("100%", &[("stop-color", "#FDE68A"), ("stop-opacity", "0")]), - ); - - let text = Group::new() - .attr("transform", "translate(940 1320)") - .attr( - "font-family", - "'Fira Sans', 'Inter', 'Segoe UI', sans-serif", - ) - .attr("font-size", "220") - .attr("font-weight", "700") - .attr("letter-spacing", "18") - .attr("fill", "#FFFFFF") - .attr("opacity", "0.95") - .child(Element::new("text").text("NIXETTE")); - - let subtitle = Group::new() - .attr("transform", "translate(960 1500)") - .attr( - "font-family", - "'Fira Sans', 'Inter', 'Segoe UI', sans-serif", - ) - .attr("font-size", "64") - .attr("fill", "#F7A8B8") - .attr("opacity", "0.9") - .child(Element::new("text").text("Declarative · Sourceful · Herself")); - - Document::new(3840, 2160) - .view_box("0 0 3840 2160") - .role("img") - .aria_label("title", "desc") - .title("Nixette Wallpaper") - .desc("Gradient wallpaper combining trans flag waves with Nix and Gentoo motifs.") - .add_defs(defs) - .add_element( - Element::new("rect") - .attr("width", "3840") - .attr("height", "2160") - .attr("fill", "url(#sky)") - .empty(), - ) - .add_element( - Element::new("rect") - .attr("x", "0") - .attr("y", "0") - .attr("width", "3840") - .attr("height", "2160") - .attr("fill", "url(#halo)") - .attr("opacity", "0.4") - .empty(), - ) - .add_element( - Element::new("path") - .attr("d", "M0 1430 C640 1320 1280 1580 1860 1500 C2440 1420 3040 1660 3840 1500 L3840 2160 L0 2160 Z") - .attr("fill", "url(#wave1)") - .empty(), - ) - .add_element( - Element::new("path") - .attr("d", "M0 1700 C500 1580 1200 1880 1900 1760 C2600 1640 3200 1920 3840 1800 L3840 2160 L0 2160 Z") - .attr("fill", "url(#wave2)") - .empty(), - ) - .add_element( - Group::new() - .attr("opacity", "0.08") - .attr("fill", "none") - .attr("stroke", "#FFFFFF") - .attr("stroke-width", "24") - .child(path("M600 360 l220 -220 h360 l220 220 l-220 220 h-360 z")) - .child(path("M600 360 l220 -220")) - .child(path("M820 140 l220 220")), - ) - .add_element( - Group::new() - .attr("opacity", "0.12") - .attr("fill", "none") - .attr("stroke", "#FFFFFF") - .attr("stroke-width", "22") - .attr("transform", "translate(2820 320) scale(0.9)") - .child(path("M0 0 C120 -40 220 40 220 160 C220 260 160 320 60 320")), - ) - .add_element(text) - .add_element(subtitle) - .finish() -} diff --git a/src/bin/readme_gen.rs b/src/bin/readme_gen.rs deleted file mode 100644 index ab0390e..0000000 --- a/src/bin/readme_gen.rs +++ /dev/null @@ -1,198 +0,0 @@ -use std::fs; - -fn main() -> anyhow::Result<()> { - let readme = Readme::build(); - fs::write("README.md", readme)?; - Ok(()) -} - -struct MarkdownDoc { - buffer: String, -} - -impl MarkdownDoc { - fn new() -> Self { - Self { - buffer: String::new(), - } - } - - fn heading(mut self, level: u8, text: &str) -> Self { - self.buffer.push_str(&"#".repeat(level as usize)); - self.buffer.push(' '); - self.buffer.push_str(text); - self.buffer.push_str("\n\n"); - self - } - - fn raw(mut self, text: &str) -> Self { - self.buffer.push_str(text); - self.buffer.push('\n'); - self - } - - fn paragraph(mut self, text: &str) -> Self { - self.buffer.push_str(text); - self.buffer.push_str("\n\n"); - self - } - - fn horizontal_rule(mut self) -> Self { - self.buffer.push_str("---\n\n"); - self - } - - fn bullet_list(mut self, items: I) -> Self - where - I: IntoIterator, - S: AsRef, - { - for item in items { - self.buffer.push_str("* "); - self.buffer.push_str(item.as_ref()); - self.buffer.push('\n'); - } - self.buffer.push('\n'); - self - } - - fn code_block(mut self, language: &str, code: &str) -> Self { - self.buffer.push_str("```"); - self.buffer.push_str(language); - self.buffer.push('\n'); - self.buffer.push_str(code.trim_matches('\n')); - self.buffer.push_str("\n```\n\n"); - self - } - - fn finish(self) -> String { - self.buffer - } -} - -struct Readme; - -impl Readme { - fn build() -> String { - let doc = MarkdownDoc::new() - .heading(1, "🧬 LPKG – Lightweight Package Manager") - .raw("

\n \"LPKG\n

\n") - .paragraph("LPKG is a minimalistic package manager written in Rust, designed for fast and simple software management on Unix-like systems. It emphasizes reproducibility and declarative configuration, leveraging **Nix Flakes** for development and deployment.") - .horizontal_rule() - .heading(2, "🚀 Features") - .bullet_list([ - "**Fast & Lightweight** – Minimal resource usage and quick operations.", - "**Rust-Powered** – Safe and concurrent code with Rust.", - "**Cross-Platform** – Works on Linux and macOS.", - "**Declarative Builds** – Fully reproducible with Nix Flakes.", - "**Simple CLI** – Intuitive commands for managing packages.", - ]) - .horizontal_rule() - .heading(2, "⚙️ Installation") - .heading(3, "Using Cargo") - .code_block("bash", "cargo install lpkg") - .heading(3, "Using Nix Flakes") - .paragraph("If you have Nix with flakes enabled:") - .code_block("bash", "nix profile install github:lesbiannix/lpkg") - .paragraph("Or to run without installing:") - .code_block("bash", "nix run github:lesbiannix/lpkg") - .horizontal_rule() - .heading(2, "🧰 Usage") - .paragraph("Basic command structure:") - .code_block("bash", "lpkg [command] [package]") - .paragraph("Common commands:") - .bullet_list([ - "`install` – Install a package", - "`remove` – Remove a package", - "`update` – Update the package list", - "`upgrade` – Upgrade all installed packages", - ]) - .paragraph("For detailed usage:") - .code_block("bash", "lpkg --help") - .horizontal_rule() - .heading(2, "🔧 Development with Flakes") - .paragraph("Clone the repository:") - .code_block("bash", "git clone https://github.com/lesbiannix/lpkg.git\ncd lpkg") - .paragraph("Enter the flake development shell:") - .code_block("bash", "nix develop") - .paragraph("Build the project:") - .code_block("bash", "cargo build") - .paragraph("LPKG ships with tuned Cargo profiles:") - .bullet_list([ - "**Dev builds** (`cargo build`) use `opt-level=0`, lots of codegen units, and incremental compilation for quick feedback while hacking.", - "**Release builds** (`cargo build --release`) enable `-O3`, fat LTO, and panic aborts for slim, fast binaries.", - "**GraphQL builds** add the server components when you need them:", - ]) - .code_block("bash", "cargo build --features graphql") - .paragraph("**PGO builds** are a two-step flow using the provided Cargo aliases:") - .code_block( - "bash", - r#"# 1) Instrument -RUSTFLAGS="-Cprofile-generate=target/pgo-data" cargo pgo-instrument -# run representative workloads to emit *.profraw files under target/pgo-data -llvm-profdata merge -o target/pgo-data/lpkg.profdata target/pgo-data/*.profraw - -# 2) Optimise with the collected profile -RUSTFLAGS="-Cprofile-use=target/pgo-data/lpkg.profdata -Cllvm-args=-pgo-warn-missing-function" \ - cargo pgo-build"#, - ) - .paragraph("Regenerate project artefacts (README and SVG logo):") - .code_block("bash", "cargo run --bin readme_gen\ncargo run --bin logo_gen") - .paragraph("Run tests:") - .code_block("bash", "cargo test") - .paragraph("You can also run the project directly in the flake shell:") - .code_block("bash", "nix run") - .heading(2, "🕸️ GraphQL API") - .paragraph("LPKG now ships a lightweight GraphQL server powered by Actix Web and Juniper.") - .bullet_list([ - "Start the server with `cargo run --features graphql --bin graphql_server` (set `LPKG_GRAPHQL_ADDR` to override `127.0.0.1:8080`).", - "Query endpoint: `http://127.0.0.1:8080/graphql`", - "Interactive playground: `http://127.0.0.1:8080/playground`", - ]) - .paragraph("Example query:") - .code_block("graphql", r"{ - packages(limit: 5) { - name - version - enableLto - } - randomJoke { - package - text - } -}") - .heading(3, "AI metadata tooling") - .paragraph("The AI metadata store under `ai/metadata/` comes with a helper CLI to validate package records against the JSON schema and regenerate `index.json` after adding new entries:") - .code_block("bash", r"cargo run --bin metadata_indexer -- --base-dir . validate -cargo run --bin metadata_indexer -- --base-dir . index") - .paragraph("Use `--compact` with `index` if you prefer single-line JSON output.") - .paragraph("To draft metadata for a specific book page, you can run the harvest mode. It fetches the XHTML, scrapes the build commands, and emits a schema-compliant JSON skeleton (pass `--dry-run` to inspect the result without writing to disk):") - .code_block("bash", r"cargo run --bin metadata_indexer -- \ - --base-dir . harvest \ - --book mlfs \ - --page chapter05/binutils-pass1 \ - --dry-run") - .paragraph("Keep the jhalfs manifests current with:") - .code_block("bash", "cargo run --bin metadata_indexer -- --base-dir . refresh") - .paragraph("Passing `--books mlfs,blfs` restricts the refresh to specific books, and `--force` bypasses the local cache.") - .paragraph("To materialise a Rust module from harvested metadata:") - .code_block("bash", r"cargo run --bin metadata_indexer -- \ - --base-dir . generate \ - --metadata ai/metadata/packages/mlfs/binutils-pass-1.json \ - --output target/generated/by_name") - .paragraph("Add `--overwrite` to regenerate an existing module directory.") - .heading(2, "📚 Documentation") - .bullet_list([ - "[Architecture Overview](docs/ARCHITECTURE.md) – high-level tour of the crate layout, binaries, and supporting modules.", - "[Metadata Harvesting Pipeline](docs/METADATA_PIPELINE.md) – how the metadata indexer produces and validates the JSON records under `ai/metadata/`.", - "[Package Module Generation](docs/PACKAGE_GENERATION.md) – end-to-end guide for converting harvested metadata into Rust modules under `src/pkgs/by_name/`.", - "Concept corner: [Nixette](concepts/nixette/README.md) – a NixOS × Gentoo transfemme mash-up dreamed up for fun brand explorations.", - "`ai/notes.md` – scratchpad for ongoing research tasks (e.g., deeper jhalfs integration).", - ]) - .horizontal_rule() - .heading(2, "📄 License") - .paragraph("LPKG is licensed under the [MIT License](LICENSE)."); - - doc.finish() - } -} diff --git a/src/db/mod.rs b/src/db/mod.rs index e606fd8..9cc40b9 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -1,11 +1,9 @@ pub mod models; pub mod schema; -use std::cmp; use std::env; use anyhow::{Context, Result}; -use diesel::OptionalExtension; use diesel::prelude::*; use diesel::r2d2::{self, ConnectionManager}; use diesel::sqlite::SqliteConnection; @@ -107,98 +105,3 @@ pub fn load_packages_via_pool(pool: &Pool) -> Result> { let mut conn = pool.get().context("acquiring database connection")?; load_packages(&mut conn) } - -/// Load package definitions instead of raw Diesel models for convenience. -pub fn load_package_definitions(conn: &mut SqliteConnection) -> Result> { - load_packages(conn)? - .into_iter() - .map(|record| record.into_definition()) - .collect::>>() -} - -/// Pool-backed helper mirroring [`load_package_definitions`]. -pub fn load_package_definitions_via_pool(pool: &Pool) -> Result> { - let mut conn = pool.get().context("acquiring database connection")?; - load_package_definitions(&mut conn) -} - -/// Locate a package by name and optional version, returning the newest matching entry when -/// the version is not supplied. -pub fn find_package( - conn: &mut SqliteConnection, - name: &str, - version: Option<&str>, -) -> Result> { - let mut query = packages_dsl::packages - .filter(packages_dsl::name.eq(name)) - .into_boxed(); - - if let Some(version) = version { - query = query.filter(packages_dsl::version.eq(version)); - } - - query - .order(packages_dsl::version.desc()) - .first::(conn) - .optional() - .context("querying package by name") -} - -/// Convenience wrapper returning the package as a [`PackageDefinition`]. -pub fn find_package_definition( - conn: &mut SqliteConnection, - name: &str, - version: Option<&str>, -) -> Result> { - Ok(find_package(conn, name, version)? - .map(|pkg| pkg.into_definition()) - .transpose()?) -} - -/// Pool-backed variant of [`find_package_definition`]. -pub fn find_package_definition_via_pool( - pool: &Pool, - name: &str, - version: Option<&str>, -) -> Result> { - let mut conn = pool.get().context("acquiring database connection")?; - find_package_definition(&mut conn, name, version) -} - -/// Locate packages using a basic substring match on the name, ordered deterministically and -/// optionally limited for responsiveness. -pub fn search_packages( - conn: &mut SqliteConnection, - term: &str, - limit: Option, -) -> Result> { - let trimmed = term.trim(); - if trimmed.is_empty() { - return Ok(Vec::new()); - } - - let normalized: String = trimmed.chars().take(128).collect(); - let sanitized = normalized.replace('%', "\\%").replace('_', "\\_"); - let pattern = format!("%{}%", sanitized); - let mut query = packages_dsl::packages - .filter(packages_dsl::name.like(&pattern)) - .order((packages_dsl::name, packages_dsl::version)) - .into_boxed(); - - let effective_limit = limit.map(|value| cmp::max(1, value)).unwrap_or(50); - query = query.limit(cmp::min(effective_limit, 200)); - - query - .load::(conn) - .context("searching packages by name") -} - -/// Pool-backed variant of [`search_packages`]. -pub fn search_packages_via_pool( - pool: &Pool, - term: &str, - limit: Option, -) -> Result> { - let mut conn = pool.get().context("acquiring database connection")?; - search_packages(&mut conn, term, limit) -} diff --git a/src/graphql/context.rs b/src/graphql/context.rs deleted file mode 100644 index a37e4de..0000000 --- a/src/graphql/context.rs +++ /dev/null @@ -1,138 +0,0 @@ -use std::sync::Arc; - -use rand::rng; -use rand::seq::IteratorRandom; - -use crate::db; - -#[derive(Clone)] -pub struct GraphQLContext { - pub db_pool: db::Pool, - jokes: Arc, -} - -impl GraphQLContext { - pub fn new(db_pool: db::Pool) -> Self { - Self { - db_pool, - jokes: Arc::new(JokeCatalog::default()), - } - } - - pub fn with_jokes(db_pool: db::Pool, jokes: Vec) -> Self { - Self { - db_pool, - jokes: Arc::new(JokeCatalog::new(jokes)), - } - } - - pub fn with_catalog(db_pool: db::Pool, catalog: Arc) -> Self { - Self { - db_pool, - jokes: catalog, - } - } - - pub fn joke_catalog(&self) -> Arc { - Arc::clone(&self.jokes) - } -} - -impl juniper::Context for GraphQLContext {} - -#[derive(Clone, Debug)] -pub struct Joke { - pub id: String, - pub package: Option, - pub text: String, -} - -impl Joke { - pub fn new(id: impl Into, package: Option<&str>, text: impl Into) -> Self { - Self { - id: id.into(), - package: package.map(|pkg| pkg.to_string()), - text: text.into(), - } - } -} - -#[derive(Clone)] -pub struct JokeCatalog { - entries: Arc>, -} - -impl JokeCatalog { - fn new(entries: Vec) -> Self { - Self { - entries: Arc::new(entries), - } - } - - pub fn random(&self, package: Option<&str>) -> Option { - let mut rng = rng(); - - if let Some(package) = package { - if let Some(chosen) = self - .entries - .iter() - .filter(|joke| matches_package(joke, package)) - .choose(&mut rng) - { - return Some(chosen.clone()); - } - } - - self.entries.iter().choose(&mut rng).cloned() - } - - pub fn all(&self, package: Option<&str>) -> Vec { - match package { - Some(package) => self - .entries - .iter() - .filter(|joke| matches_package(joke, package)) - .cloned() - .collect(), - None => self.entries.as_ref().clone(), - } - } -} - -impl Default for JokeCatalog { - fn default() -> Self { - Self::new(default_jokes()) - } -} - -fn matches_package(joke: &Joke, package: &str) -> bool { - joke.package - .as_deref() - .map(|pkg| pkg.eq_ignore_ascii_case(package)) - .unwrap_or(false) -} - -fn default_jokes() -> Vec { - vec![ - Joke::new( - "optimizer-overdrive", - Some("gcc"), - "The GCC optimizer walked into a bar, reordered everyone’s drinks, and they still tasted the same—just faster.", - ), - Joke::new( - "linker-chuckle", - Some("binutils"), - "Our linker refuses to go on vacation; it can’t handle unresolved references to the beach.", - ), - Joke::new( - "glibc-giggle", - Some("glibc"), - "The C library tried stand-up comedy but segfaulted halfway through the punchline.", - ), - Joke::new( - "pkg-general", - None, - "LPKG packages never get lost—they always follow the dependency graph back home.", - ), - ] -} diff --git a/src/graphql/mod.rs b/src/graphql/mod.rs deleted file mode 100644 index 9c8a0b3..0000000 --- a/src/graphql/mod.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub mod context; -pub mod schema; - -pub use context::{GraphQLContext, Joke}; -pub use schema::QueryRoot; - -use juniper::{EmptyMutation, EmptySubscription, RootNode}; - -pub type Schema = - RootNode, EmptySubscription>; - -pub fn create_schema() -> Schema { - Schema::new(QueryRoot {}, EmptyMutation::new(), EmptySubscription::new()) -} diff --git a/src/graphql/schema.rs b/src/graphql/schema.rs deleted file mode 100644 index d27d855..0000000 --- a/src/graphql/schema.rs +++ /dev/null @@ -1,133 +0,0 @@ -use anyhow::{Error as AnyhowError, Result as AnyhowResult}; -use juniper::{FieldResult, GraphQLObject, Value, graphql_object}; - -use crate::{db, pkgs::package::PackageDefinition}; - -use super::context::{GraphQLContext, Joke}; - -#[derive(Clone, GraphQLObject)] -#[graphql(description = "Package metadata exposed via the GraphQL API")] -pub struct PackageType { - pub name: String, - pub version: String, - pub source: Option, - pub md5: Option, - pub configure_args: Vec, - pub build_commands: Vec, - pub install_commands: Vec, - pub dependencies: Vec, - pub enable_lto: bool, - pub enable_pgo: bool, - pub cflags: Vec, - pub ldflags: Vec, - pub profdata: Option, -} - -impl From for PackageType { - fn from(pkg: PackageDefinition) -> Self { - let optimizations = pkg.optimizations; - - Self { - name: pkg.name, - version: pkg.version, - source: pkg.source, - md5: pkg.md5, - configure_args: pkg.configure_args, - build_commands: pkg.build_commands, - install_commands: pkg.install_commands, - dependencies: pkg.dependencies, - enable_lto: optimizations.enable_lto, - enable_pgo: optimizations.enable_pgo, - cflags: optimizations.cflags, - ldflags: optimizations.ldflags, - profdata: optimizations.profdata, - } - } -} - -#[derive(Clone, GraphQLObject)] -#[graphql(description = "A light-hearted package-related joke")] -pub struct JokeType { - pub id: String, - pub package: Option, - pub text: String, -} - -impl From for JokeType { - fn from(joke: Joke) -> Self { - Self { - id: joke.id, - package: joke.package, - text: joke.text, - } - } -} - -#[derive(Default)] -pub struct QueryRoot; - -#[graphql_object(context = GraphQLContext)] -impl QueryRoot { - fn packages(context: &GraphQLContext, limit: Option) -> FieldResult> { - let limit = limit.unwrap_or(50).clamp(1, 200) as usize; - let definitions = - db::load_package_definitions_via_pool(&context.db_pool).map_err(field_error)?; - - Ok(definitions - .into_iter() - .take(limit) - .map(PackageType::from) - .collect()) - } - - fn package( - context: &GraphQLContext, - name: String, - version: Option, - ) -> FieldResult> { - let definition = - db::find_package_definition_via_pool(&context.db_pool, &name, version.as_deref()) - .map_err(field_error)?; - - Ok(definition.map(PackageType::from)) - } - - fn search( - context: &GraphQLContext, - query: String, - limit: Option, - ) -> FieldResult> { - let limit = limit.map(|value| i64::from(value.clamp(1, 200))); - let results = - db::search_packages_via_pool(&context.db_pool, &query, limit).map_err(field_error)?; - - let packages = results - .into_iter() - .map(|pkg| pkg.into_definition().map(PackageType::from)) - .collect::>>() - .map_err(field_error)?; - - Ok(packages) - } - - fn jokes(context: &GraphQLContext, package: Option) -> FieldResult> { - let catalog = context.joke_catalog(); - Ok(catalog - .all(package.as_deref()) - .into_iter() - .map(JokeType::from) - .collect()) - } - - fn random_joke( - context: &GraphQLContext, - package: Option, - ) -> FieldResult> { - let catalog = context.joke_catalog(); - Ok(catalog.random(package.as_deref()).map(JokeType::from)) - } -} - -fn field_error(err: AnyhowError) -> juniper::FieldError { - juniper::FieldError::new(err.to_string(), Value::null()) -} diff --git a/src/html.rs b/src/html.rs index 92a4e4a..ea3c880 100644 --- a/src/html.rs +++ b/src/html.rs @@ -1,12 +1,7 @@ -use anyhow::{Context, Result}; use scraper::{Html, Selector}; -pub fn fetch_pre_blocks(url: &str) -> Result> { - let body = ureq::get(url) - .call() - .with_context(|| format!("requesting {url}"))? - .into_string() - .with_context(|| format!("reading body from {url}"))?; +pub fn fetch_pre_blocks(url: &str) -> anyhow::Result> { + let body = reqwest::blocking::get(url)?.text()?; let document = Html::parse_document(&body); let selector = Selector::parse("pre").unwrap(); diff --git a/src/ingest/blfs.rs b/src/ingest/blfs.rs index ad7eeed..850b46d 100644 --- a/src/ingest/blfs.rs +++ b/src/ingest/blfs.rs @@ -1,5 +1,6 @@ use anyhow::{Context, Result}; use regex::Regex; +use reqwest::blocking::Client; use scraper::{Html, Selector}; use super::{BookPackage, FetchOptions}; @@ -9,10 +10,14 @@ pub fn fetch_book(options: &FetchOptions) -> Result> { let base = options.base_url.trim_end_matches('/'); let url = format!("{base}/book.html"); - let body = ureq::get(&url) - .call() - .with_context(|| format!("fetching {url}"))? - .into_string() + let client = Client::builder().build().context("building HTTP client")?; + let body = client + .get(&url) + .send() + .with_context(|| format!("fetching {}", url))? + .error_for_status() + .with_context(|| format!("request failed for {}", url))? + .text() .context("reading response body")?; parse_book_html(options, &url, &body) diff --git a/src/ingest/glfs.rs b/src/ingest/glfs.rs index 715f22a..3fb7dff 100644 --- a/src/ingest/glfs.rs +++ b/src/ingest/glfs.rs @@ -1,5 +1,6 @@ use anyhow::{Context, Result}; use regex::Regex; +use reqwest::blocking::Client; use scraper::{Html, Selector}; use super::{BookPackage, FetchOptions}; @@ -9,10 +10,14 @@ pub fn fetch_book(options: &FetchOptions) -> Result> { let base = options.base_url.trim_end_matches('/'); let url = format!("{base}/book.html"); - let body = ureq::get(&url) - .call() - .with_context(|| format!("fetching {url}"))? - .into_string() + let client = Client::builder().build().context("building HTTP client")?; + let body = client + .get(&url) + .send() + .with_context(|| format!("fetching {}", url))? + .error_for_status() + .with_context(|| format!("request failed for {}", url))? + .text() .context("reading response body")?; parse_book_html(options, &url, &body) diff --git a/src/ingest/lfs.rs b/src/ingest/lfs.rs index c5f796b..a9d2f37 100644 --- a/src/ingest/lfs.rs +++ b/src/ingest/lfs.rs @@ -1,5 +1,6 @@ use anyhow::{Context, Result}; use regex::Regex; +use reqwest::blocking::Client; use scraper::{Html, Selector}; use super::{BookPackage, FetchOptions}; @@ -8,10 +9,14 @@ pub fn fetch_book(options: &FetchOptions) -> Result> { let base = options.base_url.trim_end_matches('/'); let url = format!("{base}/book.html"); - let body = ureq::get(&url) - .call() - .with_context(|| format!("fetching {url}"))? - .into_string() + let client = Client::builder().build().context("building HTTP client")?; + let body = client + .get(&url) + .send() + .with_context(|| format!("fetching {}", url))? + .error_for_status() + .with_context(|| format!("request failed for {}", url))? + .text() .context("reading response body")?; parse_book_html(options, &url, &body) diff --git a/src/lib.rs b/src/lib.rs index 033ece4..e28f156 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,13 +1,10 @@ pub mod ai; pub mod db; -#[cfg(feature = "graphql")] -pub mod graphql; pub mod html; pub mod ingest; pub mod md5_utils; pub mod mirrors; pub mod pkgs; -pub mod svg_builder; pub mod version_check; pub mod wget_list; diff --git a/src/main.rs b/src/main.rs index eed6a50..44a7b0a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -7,7 +7,7 @@ use package_management::{ db, html, md5_utils, pkgs::{ by_name::bi::binutils::cross_toolchain::build_binutils_from_page, - generator, mlfs, + mlfs, scaffolder::{self, ScaffoldRequest}, }, version_check, wget_list, @@ -134,6 +134,8 @@ enum TuiCommand { } fn main() -> Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let cli = Cli::parse(); match cli.command { @@ -217,8 +219,10 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> { lfs_root, target, } => { - build_binutils_from_page(&url, &lfs_root, target) - .with_context(|| format!("Building Binutils using instructions from {url}"))?; + let runtime = tokio::runtime::Runtime::new().context("Creating async runtime")?; + runtime + .block_on(build_binutils_from_page(&url, &lfs_root, target)) + .map_err(|e| anyhow!("Building Binutils using instructions from {url}: {e}"))?; println!("Binutils workflow completed successfully"); } @@ -302,14 +306,6 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> { let mut created = 0usize; let mut skipped = Vec::new(); - let metadata_entries = match mlfs::load_metadata_index() { - Ok(entries) => Some(entries), - Err(err) => { - eprintln!("[mlfs] metadata index error: {err}"); - None - } - }; - let pool = if dry_run { None } else { @@ -317,6 +313,11 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> { }; for record in records { + let module_alias = record.module_alias(); + if !seen.insert(module_alias.clone()) { + continue; + } + if let Some(limit) = limit { if processed >= limit { break; @@ -324,85 +325,6 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> { } processed += 1; - let metadata_entry = metadata_entries - .as_ref() - .and_then(|entries| mlfs::match_metadata(&record, entries)); - - let mut request = if let Some(entry) = metadata_entry { - let path = PathBuf::from("ai/metadata").join(&entry.path); - match generator::request_from_metadata(&path) { - Ok(req) => req, - Err(err) => { - eprintln!( - "[mlfs] metadata apply error for {} {}: {}", - record.name, record.version, err - ); - ScaffoldRequest { - name: record.name.clone(), - version: record.version.clone(), - source: None, - md5: None, - configure_args: Vec::new(), - build_commands: Vec::new(), - install_commands: Vec::new(), - dependencies: Vec::new(), - enable_lto: true, - enable_pgo: true, - cflags: Vec::new(), - ldflags: Vec::new(), - profdata: None, - stage: record.stage.clone(), - variant: record.variant.clone(), - notes: record.notes.clone(), - module_override: None, - } - } - } - } else { - ScaffoldRequest { - name: record.name.clone(), - version: record.version.clone(), - source: None, - md5: None, - configure_args: Vec::new(), - build_commands: Vec::new(), - install_commands: Vec::new(), - dependencies: Vec::new(), - enable_lto: true, - enable_pgo: true, - cflags: Vec::new(), - ldflags: Vec::new(), - profdata: None, - stage: record.stage.clone(), - variant: record.variant.clone(), - notes: record.notes.clone(), - module_override: None, - } - }; - - if request.stage.is_none() { - request.stage = record.stage.clone(); - } - if request.variant.is_none() { - request.variant = record.variant.clone(); - } - if request.notes.is_none() { - request.notes = record.notes.clone(); - } - - let module_alias = request - .module_override - .clone() - .unwrap_or_else(|| record.module_alias()); - - if !seen.insert(module_alias.clone()) { - continue; - } - - if request.module_override.is_none() { - request.module_override = Some(module_alias.clone()); - } - if dry_run { println!( "Would scaffold {:<18} {:<12} -> {}", @@ -411,6 +333,26 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> { continue; } + let request = ScaffoldRequest { + name: record.name.clone(), + version: record.version.clone(), + source: None, + md5: None, + configure_args: Vec::new(), + build_commands: Vec::new(), + install_commands: Vec::new(), + dependencies: Vec::new(), + enable_lto: true, + enable_pgo: true, + cflags: Vec::new(), + ldflags: Vec::new(), + profdata: None, + stage: record.stage.clone(), + variant: record.variant.clone(), + notes: record.notes.clone(), + module_override: Some(module_alias.clone()), + }; + match scaffolder::scaffold_package(&base_dir, request) { Ok(result) => { if let Some(pool) = &pool { diff --git a/src/md5_utils.rs b/src/md5_utils.rs index 8299cd2..d851cff 100644 --- a/src/md5_utils.rs +++ b/src/md5_utils.rs @@ -1,15 +1,16 @@ -use anyhow::{Context, Result}; +use anyhow::Result; +use reqwest::blocking::Client; +use reqwest::redirect::Policy; pub fn get_md5sums() -> Result { - let agent = ureq::AgentBuilder::new().redirects(5).build(); - let url = "https://www.linuxfromscratch.org/~thomas/multilib-m32/md5sums"; + let client = Client::builder().redirect(Policy::limited(5)).build()?; + let res = client + .get("https://www.linuxfromscratch.org/~thomas/multilib-m32/md5sums") + .send()?; - let response = agent.get(url).call().map_err(|err| match err { - ureq::Error::Status(code, _) => anyhow::anyhow!("Failed to fetch MD5sums: HTTP {code}"), - other => anyhow::anyhow!("Failed to fetch MD5sums: {other}"), - })?; + if !res.status().is_success() { + anyhow::bail!("Failed to fetch MD5sums: HTTP {}", res.status()); + } - response - .into_string() - .with_context(|| format!("reading body from {url}")) + Ok(res.text()?) } diff --git a/src/mirrors.rs b/src/mirrors.rs index 664c467..0b4dd5c 100644 --- a/src/mirrors.rs +++ b/src/mirrors.rs @@ -1,11 +1,14 @@ use console::Style; +use reqwest::blocking::Client; use scraper::{Html, Selector}; use std::io::{self, Write}; pub fn fetch_mirrors() -> Result, Box> { - let res = ureq::get("https://www.linuxfromscratch.org/lfs/mirrors.html#files") - .call()? - .into_string()?; + let client = Client::new(); + let res = client + .get("https://www.linuxfromscratch.org/lfs/mirrors.html#files") + .send()? + .text()?; let document = Html::parse_document(&res); let selector = Selector::parse("a[href^='http']").unwrap(); diff --git a/src/pkgs/by_name/bi/binutils/cross_toolchain.rs b/src/pkgs/by_name/bi/binutils/cross_toolchain.rs index 3ed8a86..7c1623e 100644 --- a/src/pkgs/by_name/bi/binutils/cross_toolchain.rs +++ b/src/pkgs/by_name/bi/binutils/cross_toolchain.rs @@ -1,13 +1,12 @@ -// Cross-toolchain runner that uses parser.rs info (no hardcoding). +// async cross-toolchain runner that uses parser.rs info (no hardcoding) use crate::pkgs::by_name::bi::binutils::parser::{BinutilsInfo, fetch_page, parse_binutils}; -use anyhow::{Context, Result, anyhow}; -use shell_words; +use reqwest::Client; use std::{ - fs::{self, File}, - io, + error::Error, path::{Path, PathBuf}, - process::Command, }; +use tokio::process::Command; +use tracing::{info, warn}; /// Configuration object - uses environment if values omitted. #[derive(Debug, Clone)] @@ -23,7 +22,14 @@ impl BinutilsConfig { let lfs_root = lfs_root.as_ref().to_path_buf(); let target = target .or_else(|| std::env::var("LFS_TGT").ok()) - .unwrap_or_else(|| "x86_64-lfs-linux-gnu".to_string()); + .unwrap_or_else(|| { + // fallback best-effort + if cfg!(target_os = "linux") { + "x86_64-lfs-linux-gnu".to_string() + } else { + "x86_64-lfs-linux-gnu".to_string() + } + }); Self { lfs_root, @@ -59,50 +65,122 @@ impl BinutilsConfig { } } -/// High-level orchestration. -pub fn build_binutils_from_page( +/// High-level orchestration. Async. +pub async fn build_binutils_from_page( page_url: &str, - lfs_root: impl AsRef, + lfs_root: impl AsRef, target: Option, -) -> Result<()> { - println!("Fetching page: {page_url}"); - let html = fetch_page(page_url).context("fetching binutils instructions")?; - let info = parse_binutils(&html).context("parsing binutils instructions")?; - println!("Parsed info: {:?}", info); +) -> Result<(), Box> { + // 1) fetch page + info!("Fetching page: {}", page_url); + let html = fetch_page(page_url).await?; + let info = parse_binutils(&html)?; + info!("Parsed info: {:?}", info); + // 2) build config let cfg = BinutilsConfig::new(lfs_root, target, info.clone()); + // 3) ensure source base dir exists let src_base = cfg.source_base_dir(); if !src_base.exists() { - println!("Creating source base dir: {:?}", src_base); - fs::create_dir_all(&src_base) - .with_context(|| format!("creating source base dir {:?}", src_base))?; + info!("Creating source base dir: {:?}", src_base); + tokio::fs::create_dir_all(&src_base).await?; } - let mut source_dir = locate_binutils_dir(&src_base)?; + // 4) find extracted source directory (binutils-*) + let mut source_dir: Option = None; + if let Ok(mut rd) = tokio::fs::read_dir(&src_base).await { + while let Some(entry) = rd.next_entry().await? { + let ft = entry.file_type().await?; + if ft.is_dir() { + let name = entry.file_name().to_string_lossy().to_string(); + if name.to_lowercase().contains("binutils") { + source_dir = Some(entry.path()); + break; + } + } + } + } + + // 5) if not found, attempt to download & extract if source_dir.is_none() { - source_dir = download_and_extract(&cfg, &src_base)?; + if let Some(dl) = &cfg.info.download_url { + info!("No extracted source found; will download {}", dl); + + // download file into src_base + let client = Client::new(); + let resp = client.get(dl).send().await?; + if !resp.status().is_success() { + return Err(format!("Download failed: {}", resp.status()).into()); + } + + // pick a filename from URL + let url_path = url::Url::parse(dl)?; + let filename = url_path + .path_segments() + .and_then(|seg| seg.last()) + .and_then(|s| { + if !s.is_empty() { + Some(s.to_string()) + } else { + None + } + }) + .ok_or("Cannot determine filename from URL")?; + + let outpath = src_base.join(&filename); + info!("Saving archive to {:?}", outpath); + let bytes = resp.bytes().await?; + tokio::fs::write(&outpath, &bytes).await?; + + // extract using tar (async spawn). Use absolute path to src_base + info!("Extracting archive {:?}", outpath); + let tar_path = outpath.clone(); + let mut tar_cmd = Command::new("tar"); + tar_cmd.arg("-xf").arg(&tar_path).arg("-C").arg(&src_base); + let status = tar_cmd.status().await?; + if !status.success() { + return Err("tar extraction failed".into()); + } + + // look for extracted dir again + if let Ok(mut rd) = tokio::fs::read_dir(&src_base).await { + while let Some(entry) = rd.next_entry().await? { + let ft = entry.file_type().await?; + if ft.is_dir() { + let name = entry.file_name().to_string_lossy().to_string(); + if name.to_lowercase().contains("binutils") { + source_dir = Some(entry.path()); + break; + } + } + } + } + } else { + warn!("No download URL found on the page and no unpacked source present."); + } } - let source_dir = source_dir - .ok_or_else(|| anyhow!("Could not locate or download/extract Binutils source"))?; - println!("Using source dir: {:?}", source_dir); + let source_dir = match source_dir { + Some(p) => p, + None => return Err("Could not locate or download/extract Binutils source".into()), + }; + info!("Using source dir: {:?}", source_dir); + // 6) prepare build dir let build_dir = cfg.build_dir(); if !build_dir.exists() { - println!("Creating build dir {:?}", build_dir); - fs::create_dir_all(&build_dir) - .with_context(|| format!("creating build dir {:?}", build_dir))?; + info!("Creating build dir {:?}", build_dir); + tokio::fs::create_dir_all(&build_dir).await?; } + // 7) run configure: use absolute configure script path in source_dir let configure_path = source_dir.join("configure"); if !configure_path.exists() { - return Err(anyhow!( - "configure script not found at {:?}", - configure_path - )); + return Err(format!("configure script not found at {:?}", configure_path).into()); } + // If parser produced configure args tokens, use them; otherwise fallback to common flags let args = if !cfg.info.configure_args.is_empty() { cfg.info.configure_args.clone() } else { @@ -115,6 +193,7 @@ pub fn build_binutils_from_page( ] }; + // replace $LFS and $LFS_TGT in args let args: Vec = args .into_iter() .map(|a| { @@ -123,126 +202,83 @@ pub fn build_binutils_from_page( }) .collect(); - println!("Configuring with args: {:?}", args); - let mut configure_cmd = Command::new(&configure_path); - configure_cmd.current_dir(&build_dir); - configure_cmd.args(&args); - run_command(&mut configure_cmd).context("configure step failed")?; - println!("configure completed"); + info!("Configuring with args: {:?}", args); + // spawn configure + let mut conf_cmd = Command::new(&configure_path); + conf_cmd.current_dir(&build_dir); + for a in &args { + conf_cmd.arg(a); + } + conf_cmd.stdout(std::process::Stdio::inherit()); + conf_cmd.stderr(std::process::Stdio::inherit()); + let status = conf_cmd.status().await?; + if !status.success() { + return Err("configure step failed".into()); + } + info!("configure completed"); + + // 8) run build commands (make-like) if !cfg.info.build_cmds.is_empty() { - for raw in &cfg.info.build_cmds { - run_shell_command(raw, &build_dir) - .with_context(|| format!("build step failed: {raw}"))?; - } - } else { - let mut make_cmd = Command::new("make"); - make_cmd.current_dir(&build_dir); - run_command(&mut make_cmd).context("make failed")?; - } - println!("build completed"); - - if !cfg.info.install_cmds.is_empty() { - for raw in &cfg.info.install_cmds { - run_shell_command(raw, &build_dir) - .with_context(|| format!("install step failed: {raw}"))?; - } - } else { - let mut install_cmd = Command::new("make"); - install_cmd.arg("install"); - install_cmd.current_dir(&build_dir); - run_command(&mut install_cmd).context("make install failed")?; - } - println!("install completed"); - - Ok(()) -} - -fn locate_binutils_dir(base: &Path) -> Result> { - if !base.exists() { - return Ok(None); - } - for entry in fs::read_dir(base).with_context(|| format!("reading directory {:?}", base))? { - let entry = entry?; - if entry.file_type()?.is_dir() { - let name = entry.file_name().to_string_lossy().to_lowercase(); - if name.contains("binutils") { - return Ok(Some(entry.path())); + for b in &cfg.info.build_cmds { + // split into program + args + let mut parts = shell_words::split(b).unwrap_or_else(|_| vec![b.clone()]); + let prog = parts.remove(0); + let mut cmd = Command::new(prog); + if !parts.is_empty() { + cmd.args(parts); + } + cmd.current_dir(&build_dir); + cmd.stdout(std::process::Stdio::inherit()); + cmd.stderr(std::process::Stdio::inherit()); + let status = cmd.status().await?; + if !status.success() { + return Err(format!("build step failed: {:?}", b).into()); } } - } - Ok(None) -} - -fn download_and_extract(cfg: &BinutilsConfig, base: &Path) -> Result> { - let url = match cfg.info.download_url.as_deref() { - Some(url) => url, - None => { - eprintln!("No download URL found on the page and no unpacked source present."); - return Ok(None); + } else { + // fallback to running `make` + let mut m = Command::new("make"); + m.current_dir(&build_dir); + m.stdout(std::process::Stdio::inherit()); + m.stderr(std::process::Stdio::inherit()); + let status = m.status().await?; + if !status.success() { + return Err("make failed".into()); } - }; + } + info!("build completed"); - println!("Downloading {url}"); - let response = ureq::get(url).call().map_err(|err| match err { - ureq::Error::Status(code, _) => anyhow!("Download failed: HTTP {code}"), - other => anyhow!("Download failed: {other}"), - })?; - - let final_url = response.get_url().to_string(); - let parsed = url::Url::parse(&final_url) - .with_context(|| format!("parsing final download URL {final_url}"))?; - let filename = parsed - .path_segments() - .and_then(|segments| segments.last()) - .filter(|s| !s.is_empty()) - .map(|s| s.to_string()) - .ok_or_else(|| anyhow!("Cannot determine filename from URL"))?; - - let outpath = base.join(&filename); - let mut reader = response.into_reader(); - let mut file = - File::create(&outpath).with_context(|| format!("creating archive file {:?}", outpath))?; - io::copy(&mut reader, &mut file) - .with_context(|| format!("writing archive to {:?}", outpath))?; - - println!("Extracting archive {:?}", outpath); - let status = Command::new("tar") - .arg("-xf") - .arg(&outpath) - .arg("-C") - .arg(base) - .status() - .with_context(|| "spawning tar".to_string())?; - if !status.success() { - return Err(anyhow!("tar extraction failed")); + // 9) run install commands (make install) + if !cfg.info.install_cmds.is_empty() { + for inst in &cfg.info.install_cmds { + let mut parts = shell_words::split(inst).unwrap_or_else(|_| vec![inst.clone()]); + let prog = parts.remove(0); + let mut cmd = Command::new(prog); + if !parts.is_empty() { + cmd.args(parts); + } + cmd.current_dir(&build_dir); + cmd.stdout(std::process::Stdio::inherit()); + cmd.stderr(std::process::Stdio::inherit()); + let status = cmd.status().await?; + if !status.success() { + return Err(format!("install step failed: {:?}", inst).into()); + } + } + } else { + // fallback `make install` + let mut mi = Command::new("make"); + mi.arg("install"); + mi.current_dir(&build_dir); + mi.stdout(std::process::Stdio::inherit()); + mi.stderr(std::process::Stdio::inherit()); + let status = mi.status().await?; + if !status.success() { + return Err("make install failed".into()); + } } - locate_binutils_dir(base) -} - -fn run_command(cmd: &mut Command) -> Result<()> { - cmd.stdout(std::process::Stdio::inherit()); - cmd.stderr(std::process::Stdio::inherit()); - let status = cmd - .status() - .with_context(|| "spawning process".to_string())?; - if !status.success() { - return Err(anyhow!("command exited with status {status}")); - } + info!("install completed. Binutils Pass 1 done."); Ok(()) } - -fn run_shell_command(raw: &str, cwd: &Path) -> Result<()> { - let mut parts = shell_words::split(raw).unwrap_or_else(|_| vec![raw.to_string()]); - if parts.is_empty() { - return Ok(()); - } - let prog = parts.remove(0); - let mut cmd = Command::new(prog); - if !parts.is_empty() { - cmd.args(parts); - } - cmd.current_dir(cwd); - run_command(&mut cmd) -} diff --git a/src/pkgs/by_name/bi/binutils/parser.rs b/src/pkgs/by_name/bi/binutils/parser.rs index 494655f..5ec2540 100644 --- a/src/pkgs/by_name/bi/binutils/parser.rs +++ b/src/pkgs/by_name/bi/binutils/parser.rs @@ -1,6 +1,7 @@ -// Parser for Binutils Pass 1 page using lightweight HTTP fetching. -use anyhow::{Context, Result}; +// async parser for Binutils Pass 1 page +use reqwest::Client; use scraper::{Html, Selector}; +use std::error::Error; #[derive(Debug, Clone)] pub struct BinutilsInfo { @@ -33,16 +34,16 @@ impl Default for BinutilsInfo { } } -/// Fetch page content synchronously -pub fn fetch_page(url: &str) -> Result { - ureq::get(url) - .call() - .map_err(|err| match err { - ureq::Error::Status(code, _) => anyhow::anyhow!("Failed to fetch {url}: HTTP {code}"), - other => anyhow::anyhow!("Failed to fetch {url}: {other}"), - })? - .into_string() - .with_context(|| format!("reading body from {url}")) +/// Fetch page content (async) +pub async fn fetch_page(url: &str) -> Result> { + let client = Client::new(); + let res = client.get(url).send().await?; + let status = res.status(); + if !status.is_success() { + return Err(format!("Failed to fetch {}: {}", url, status).into()); + } + let text = res.text().await?; + Ok(text) } /// Parse the LFS Binutils pass1 page; robust to small formatting changes. @@ -50,7 +51,7 @@ pub fn fetch_page(url: &str) -> Result { /// - finds a download URL ending with .tar.xz/.tar.gz /// - finds configure pre block(s), builds token list /// - finds `make` / `make install` pre blocks -pub fn parse_binutils(html: &str) -> Result { +pub fn parse_binutils(html: &str) -> Result> { let document = Html::parse_document(html); let mut info = BinutilsInfo::default(); diff --git a/src/pkgs/generator.rs b/src/pkgs/generator.rs index 63a4548..a215b3c 100644 --- a/src/pkgs/generator.rs +++ b/src/pkgs/generator.rs @@ -34,7 +34,6 @@ struct HarvestedMetadata { #[derive(Debug, Deserialize)] struct HarvestedSource { #[serde(default)] - #[allow(dead_code)] archive: Option, #[serde(default)] urls: Vec, @@ -68,18 +67,14 @@ struct HarvestedOptimisations { #[derive(Debug, Deserialize)] struct CommandPhase { #[serde(default)] - #[allow(dead_code)] phase: Option, #[serde(default)] commands: Vec, #[serde(default)] - #[allow(dead_code)] cwd: Option, #[serde(default)] - #[allow(dead_code)] requires_root: Option, #[serde(default)] - #[allow(dead_code)] notes: Option, } @@ -102,12 +97,6 @@ pub fn generate_module( Ok(result.module_path) } -/// Build a scaffolding request directly from a metadata JSON file. -pub fn request_from_metadata(path: impl AsRef) -> Result { - let harvested = parse_metadata(path.as_ref())?; - build_request(&harvested) -} - /// Compute the directory for a module derived from the given metadata. pub fn module_directory( metadata_path: impl AsRef, diff --git a/src/pkgs/mlfs.rs b/src/pkgs/mlfs.rs index 64a6ece..214da30 100644 --- a/src/pkgs/mlfs.rs +++ b/src/pkgs/mlfs.rs @@ -1,4 +1,4 @@ -use std::{borrow::Cow, fs, path::PathBuf}; +use std::borrow::Cow; use anyhow::{Context, Result, anyhow}; use serde::{Deserialize, Serialize}; @@ -83,30 +83,6 @@ impl MlfsPackageRecord { } } -#[derive(Debug, Deserialize)] -struct MetadataIndex { - packages: Vec, -} - -#[derive(Debug, Clone, Deserialize)] -struct MetadataPackage { - book: String, - id: String, - name: String, - path: String, - stage: Option, - variant: Option, -} - -#[derive(Debug, Clone)] -pub struct MlfsMetadataEntry { - pub id: String, - pub name: String, - pub stage: Option, - pub variant: Option, - pub path: PathBuf, -} - pub fn fetch_catalog(base_url: &str) -> Result> { let options = FetchOptions::new(base_url, BookKind::Mlfs); let packages = lfs::fetch_book(&options)?; @@ -133,75 +109,8 @@ pub fn load_or_fetch_catalog(base_url: Option<&str>) -> Result Ok(records), Err(err) => { - eprintln!("[mlfs] fetch error ({err}); falling back to cached MLFS package list"); + tracing::warn!("mlfs_fetch_error" = %err, "Falling back to cached MLFS package list"); load_cached_catalog() } } } - -pub fn load_metadata_index() -> Result> { - let raw = fs::read_to_string("ai/metadata/index.json").context("reading AI metadata index")?; - let index: MetadataIndex = - serde_json::from_str(&raw).context("parsing AI metadata index JSON")?; - - let entries = index - .packages - .into_iter() - .filter(|pkg| pkg.book.eq_ignore_ascii_case("mlfs")) - .map(|pkg| MlfsMetadataEntry { - id: pkg.id, - name: pkg.name, - stage: pkg.stage, - variant: pkg.variant, - path: PathBuf::from(pkg.path), - }) - .collect(); - - Ok(entries) -} - -pub fn match_metadata<'a>( - record: &MlfsPackageRecord, - entries: &'a [MlfsMetadataEntry], -) -> Option<&'a MlfsMetadataEntry> { - let target_name = normalize(&record.name); - let target_variant = normalize_opt(record.variant.as_deref()); - let target_stage = normalize_opt(record.stage.as_deref()); - - entries - .iter() - .filter(|entry| normalize(&entry.name) == target_name) - .max_by_key(|entry| { - let mut score = 0; - if let (Some(tv), Some(ev)) = (&target_variant, normalize_opt(entry.variant.as_deref())) - { - if *tv == ev { - score += 4; - } - } - if let (Some(ts), Some(es)) = (&target_stage, normalize_opt(entry.stage.as_deref())) { - if *ts == es { - score += 2; - } - } - if target_variant.is_none() && entry.variant.is_none() { - score += 1; - } - if target_stage.is_none() && entry.stage.is_none() { - score += 1; - } - score - }) -} - -fn normalize(value: &str) -> String { - value - .chars() - .filter(|c| c.is_ascii_alphanumeric()) - .collect::() - .to_ascii_lowercase() -} - -fn normalize_opt(value: Option<&str>) -> Option { - value.map(normalize) -} diff --git a/src/svg_builder.rs b/src/svg_builder.rs deleted file mode 100644 index 77a6a74..0000000 --- a/src/svg_builder.rs +++ /dev/null @@ -1,375 +0,0 @@ -#[derive(Default)] -pub struct Document { - width: u32, - height: u32, - view_box: Option, - role: Option, - aria_label: Option<(String, String)>, - title: Option, - desc: Option, - defs: Vec, - elements: Vec, -} - -impl Document { - pub fn new(width: u32, height: u32) -> Self { - Self { - width, - height, - ..Default::default() - } - } - - pub fn view_box(mut self, value: &str) -> Self { - self.view_box = Some(value.to_string()); - self - } - - pub fn role(mut self, value: &str) -> Self { - self.role = Some(value.to_string()); - self - } - - pub fn aria_label(mut self, title_id: &str, desc_id: &str) -> Self { - self.aria_label = Some((title_id.to_string(), desc_id.to_string())); - self - } - - pub fn title(mut self, value: &str) -> Self { - self.title = Some(value.to_string()); - self - } - - pub fn desc(mut self, value: &str) -> Self { - self.desc = Some(value.to_string()); - self - } - - pub fn add_defs(mut self, defs: Defs) -> Self { - self.defs.push(defs.finish()); - self - } - - pub fn add_element(mut self, element: impl Into) -> Self { - self.elements.push(element.into()); - self - } - - pub fn finish(self) -> String { - let Document { - width, - height, - view_box, - role, - aria_label, - title, - desc, - defs, - elements, - } = self; - - let mut out = String::new(); - out.push_str(&format!( - ""); - out.push('\n'); - - if let Some(title) = title { - out.push_str(&format!(" {}\n", title_id, title)); - } - if let Some(desc) = desc { - out.push_str(&format!(" {}\n", desc_id, desc)); - } - - if !defs.is_empty() { - out.push_str(" \n"); - for block in &defs { - out.push_str(block); - } - out.push_str(" \n"); - } - - for element in &elements { - out.push_str(element); - out.push('\n'); - } - - out.push_str("\n"); - out - } -} - -pub struct Defs { - content: Vec, -} - -impl Defs { - pub fn new() -> Self { - Self { - content: Vec::new(), - } - } - - pub fn linear_gradient(mut self, id: &str, gradient: Gradient) -> Self { - self.content - .push(format!(" {}\n", gradient.render_linear(id))); - self - } - - pub fn radial_gradient(mut self, id: &str, gradient: RadialGradient) -> Self { - self.content.push(format!(" {}\n", gradient.render(id))); - self - } - - pub fn filter(mut self, id: &str, filter: Filter) -> Self { - self.content.push(format!(" {}\n", filter.render(id))); - self - } - - pub fn finish(self) -> String { - self.content.concat() - } -} - -pub struct Gradient { - x1: String, - y1: String, - x2: String, - y2: String, - stops: Vec, -} - -impl Gradient { - pub fn new(x1: &str, y1: &str, x2: &str, y2: &str) -> Self { - Self { - x1: x1.to_string(), - y1: y1.to_string(), - x2: x2.to_string(), - y2: y2.to_string(), - stops: Vec::new(), - } - } - - pub fn stop(mut self, offset: &str, attrs: &[(&str, &str)]) -> Self { - let mut tag = format!(""); - self.stops.push(tag); - self - } - - fn render_linear(&self, id: &str) -> String { - let mut out = format!( - "\n", - id, self.x1, self.y1, self.x2, self.y2 - ); - for stop in &self.stops { - out.push_str(" "); - out.push_str(stop); - out.push('\n'); - } - out.push_str(" "); - out - } -} - -pub struct RadialGradient { - cx: String, - cy: String, - r: String, - stops: Vec, -} - -impl RadialGradient { - pub fn new(cx: &str, cy: &str, r: &str) -> Self { - Self { - cx: cx.to_string(), - cy: cy.to_string(), - r: r.to_string(), - stops: Vec::new(), - } - } - - pub fn stop(mut self, offset: &str, attrs: &[(&str, &str)]) -> Self { - let mut tag = format!(""); - self.stops.push(tag); - self - } - - fn render(&self, id: &str) -> String { - let mut out = format!( - "\n", - id, self.cx, self.cy, self.r - ); - for stop in &self.stops { - out.push_str(" "); - out.push_str(stop); - out.push('\n'); - } - out.push_str(" "); - out - } -} - -pub struct Filter { - attrs: Vec<(String, String)>, - content: Vec, -} - -impl Filter { - pub fn new() -> Self { - Self { - attrs: Vec::new(), - content: Vec::new(), - } - } - - pub fn attr(mut self, key: &str, value: &str) -> Self { - self.attrs.push((key.to_string(), value.to_string())); - self - } - - pub fn raw(mut self, markup: &str) -> Self { - self.content.push(format!(" {}\n", markup)); - self - } - - fn render(&self, id: &str) -> String { - let attrs = self - .attrs - .iter() - .map(|(k, v)| format!(" {}=\"{}\"", k, v)) - .collect::(); - let mut out = format!("\n", id, attrs); - for child in &self.content { - out.push_str(child); - } - out.push_str(" "); - out - } -} - -pub struct Element { - tag: String, - attrs: Vec<(String, String)>, - content: Option, -} - -impl Element { - pub fn new(tag: &str) -> Self { - Self { - tag: tag.to_string(), - attrs: Vec::new(), - content: None, - } - } - - pub fn attr(mut self, key: &str, value: &str) -> Self { - self.attrs.push((key.to_string(), value.to_string())); - self - } - - pub fn text(mut self, text: &str) -> String { - self.content = Some(text.to_string()); - self.render() - } - - pub fn empty(mut self) -> String { - self.content = None; - self.render() - } - - fn render(&self) -> String { - let attrs = self - .attrs - .iter() - .map(|(k, v)| format!(" {}=\"{}\"", k, v)) - .collect::(); - if let Some(content) = &self.content { - format!( - " <{tag}{attrs}>{content}", - tag = self.tag, - attrs = attrs, - content = content - ) - } else { - format!(" <{tag}{attrs} />", tag = self.tag, attrs = attrs) - } - } -} - -pub struct Group { - attrs: Vec<(String, String)>, - children: Vec, -} - -impl Group { - pub fn new() -> Self { - Self { - attrs: Vec::new(), - children: Vec::new(), - } - } - - pub fn attr(mut self, key: &str, value: &str) -> Self { - self.attrs.push((key.to_string(), value.to_string())); - self - } - - pub fn child(mut self, element: impl Into) -> Self { - self.children.push(element.into()); - self - } - - pub fn render(&self) -> String { - let attrs = self - .attrs - .iter() - .map(|(k, v)| format!(" {}=\"{}\"", k, v)) - .collect::(); - let mut out = format!(" \n", attrs); - for child in &self.children { - out.push_str(child); - out.push('\n'); - } - out.push_str(" "); - out - } -} - -impl From for String { - fn from(group: Group) -> Self { - group.render() - } -} - -impl From for String { - fn from(element: Element) -> Self { - element.render() - } -} - -pub fn path(d: &str) -> String { - Element::new("path").attr("d", d).empty() -} diff --git a/src/tui/animations/donut.rs b/src/tui/animations/donut.rs deleted file mode 100644 index 531c4f3..0000000 --- a/src/tui/animations/donut.rs +++ /dev/null @@ -1,80 +0,0 @@ -use std::time::Duration; -use rsille::canvas::Canvas; -use super::Animation; - -const THETA_SPACING: f64 = 0.07; -const PHI_SPACING: f64 = 0.02; - -pub struct DonutAnimation { - a: f64, // rotation around X - b: f64, // rotation around Z - size: (u16, u16), -} - -impl DonutAnimation { - pub fn new(width: u16, height: u16) -> Self { - Self { - a: 0.0, - b: 0.0, - size: (width, height), - } - } -} - -impl Animation for DonutAnimation { - fn update(&mut self, delta: Duration) { - let delta_secs = delta.as_secs_f64(); - self.a += delta_secs; - self.b += delta_secs * 0.5; - } - - fn render(&self, canvas: &mut Canvas) { - let (width, height) = self.size; - let (width_f, height_f) = (width as f64, height as f64); - let k2 = 5.0; - let k1 = width_f * k2 * 3.0 / (8.0 * (height_f + width_f)); - - for theta in 0..((2.0 * std::f64::consts::PI / THETA_SPACING) as i32) { - let theta_f = theta as f64 * THETA_SPACING; - let cos_theta = theta_f.cos(); - let sin_theta = theta_f.sin(); - - for phi in 0..((2.0 * std::f64::consts::PI / PHI_SPACING) as i32) { - let phi_f = phi as f64 * PHI_SPACING; - let cos_phi = phi_f.cos(); - let sin_phi = phi_f.sin(); - - let cos_a = self.a.cos(); - let sin_a = self.a.sin(); - let cos_b = self.b.cos(); - let sin_b = self.b.sin(); - - let h = cos_theta + 2.0; - let d = 1.0 / (sin_phi * h * sin_a + sin_theta * cos_a + 5.0); - let t = sin_phi * h * cos_a - sin_theta * sin_a; - - let x = (width_f / 2.0 + 30.0 * d * (cos_phi * h * cos_b - t * sin_b)) as i32; - let y = (height_f / 2.0 + 15.0 * d * (cos_phi * h * sin_b + t * cos_b)) as i32; - let z = (1.0 / d) as u8; - - if x >= 0 && x < width as i32 && y >= 0 && y < height as i32 { - let luminance = if z > 0 { z } else { 1 }; - let c = match luminance { - 0..=31 => '.', - 32..=63 => '*', - 64..=95 => 'o', - 96..=127 => '&', - 128..=159 => '8', - 160..=191 => '#', - _ => '@', - }; - canvas.put_char(x as u16, y as u16, c); - } - } - } - } - - fn is_finished(&self) -> bool { - false // continuous animation - } -} \ No newline at end of file diff --git a/src/tui/animations/mod.rs b/src/tui/animations/mod.rs deleted file mode 100644 index 5eb3a7b..0000000 --- a/src/tui/animations/mod.rs +++ /dev/null @@ -1,13 +0,0 @@ -use rsille::canvas::Canvas; -use std::time::Duration; - -pub trait Animation { - fn update(&mut self, delta: Duration); - fn render(&self, canvas: &mut Canvas); - fn is_finished(&self) -> bool; -} - -pub trait ProgressAnimation: Animation { - fn set_progress(&mut self, progress: f64); - fn get_progress(&self) -> f64; -} diff --git a/src/tui/animations/progress.rs b/src/tui/animations/progress.rs deleted file mode 100644 index 5ac2377..0000000 --- a/src/tui/animations/progress.rs +++ /dev/null @@ -1,48 +0,0 @@ -use std::time::Duration; -use rsille::canvas::Canvas; -use super::{Animation, ProgressAnimation}; - -pub struct ProgressBarAnimation { - progress: f64, - width: u16, - height: u16, - animation_offset: f64, -} - -impl ProgressBarAnimation { - pub fn new(width: u16, height: u16) -> Self { - Self { - progress: 0.0, - width, - height, - animation_offset: 0.0, - } - } -} - -impl Animation for ProgressBarAnimation { - fn update(&mut self, delta: Duration) { - self.animation_offset += delta.as_secs_f64() * 2.0; - if self.animation_offset >= 1.0 { - self.animation_offset -= 1.0; - } - } - - fn render(&self, canvas: &mut Canvas) { - // Animated progress bar rendering will be implemented here - } - - fn is_finished(&self) -> bool { - self.progress >= 1.0 - } -} - -impl ProgressAnimation for ProgressBarAnimation { - fn set_progress(&mut self, progress: f64) { - self.progress = progress.clamp(0.0, 1.0); - } - - fn get_progress(&self) -> f64 { - self.progress - } -} \ No newline at end of file diff --git a/src/tui/disk_manager.rs b/src/tui/disk_manager.rs index 8654fa5..0a08278 100644 --- a/src/tui/disk_manager.rs +++ b/src/tui/disk_manager.rs @@ -9,6 +9,7 @@ use crossterm::event::{self, Event, KeyCode}; use crossterm::execute; use crossterm::terminal::{EnterAlternateScreen, LeaveAlternateScreen}; use gptman::{GPT, GPTPartitionEntry, PartitionName}; +use tracing::{info, warn}; use tui::{ Terminal, backend::CrosstermBackend, @@ -26,6 +27,7 @@ use uuid::Uuid; /// crossterm = "0.26" /// gptman = "2.0" /// uuid = { version = "1", features = ["v4"] } +/// tracing = "0.1" pub struct DiskManager; impl DiskManager { @@ -146,11 +148,11 @@ impl DiskManager { let path = devices[selected_idx].clone(); match Self::create_partition_tui(&path, &mut term) { Ok(m) => { - println!("[disk-manager] {}", m); + info!(target: "disk_manager", "{}", m); status_msg = m; } Err(e) => { - eprintln!("[disk-manager] create partition error: {e}"); + warn!(target: "disk_manager", "create partition error: {:?}", e); status_msg = format!("Create failed: {}", e); } } diff --git a/src/tui/downloader.rs b/src/tui/downloader.rs index fa536d3..83ddd5b 100644 --- a/src/tui/downloader.rs +++ b/src/tui/downloader.rs @@ -1,25 +1,20 @@ -use crate::tui::animations::{ - Animation, ProgressAnimation, donut::DonutAnimation, progress::ProgressBarAnimation, -}; -use rsille::canvas::Canvas; -use std::{io::Stdout, thread, time::Duration}; +use std::io::Stdout; +use tracing::instrument; use tui::{ Terminal, backend::CrosstermBackend, layout::{Constraint, Direction, Layout}, style::Style, text::Spans, - widgets::{Block, Borders, List, ListItem}, + widgets::{Block, Borders, Gauge, List, ListItem}, }; use crate::tui::settings::Settings; pub struct Downloader; -const TARGET_FPS: u64 = 30; -const FRAME_TIME: Duration = Duration::from_micros(1_000_000 / TARGET_FPS); - impl Downloader { + #[instrument(skip(terminal, settings))] pub fn show_downloader( terminal: &mut Terminal>, settings: &Settings, @@ -27,26 +22,14 @@ impl Downloader { let files = vec!["file1.tar.gz", "file2.tar.gz", "file3.tar.gz"]; let progress = vec![0.3, 0.5, 0.9]; - let mut last_update = std::time::Instant::now(); loop { - let frame_start = std::time::Instant::now(); - let delta = frame_start - last_update; - last_update = frame_start; - terminal.draw(|f| { let size = f.size(); let chunks = Layout::default() .direction(Direction::Vertical) .margin(2) - .constraints( - [ - Constraint::Percentage(50), - Constraint::Percentage(25), - Constraint::Percentage(25), - ] - .as_ref(), - ) + .constraints([Constraint::Percentage(70), Constraint::Percentage(30)].as_ref()) .split(size); let items: Vec = files @@ -61,42 +44,16 @@ impl Downloader { ); f.render_widget(list, chunks[0]); - // Progress bar - let mut progress_canvas = Canvas::new(chunks[1].width, chunks[1].height); - let mut progress_bar = ProgressBarAnimation::new(chunks[1].width, chunks[1].height); - for (i, prog) in progress.iter().enumerate() { - progress_bar.set_progress(*prog as f64); - progress_bar.render(&mut progress_canvas); + let gauge = Gauge::default() + .block(Block::default().title(files[i])) + .gauge_style(Style::default().fg(settings.theme.primary_color())) + .ratio(*prog as f64); + f.render_widget(gauge, chunks[1]); } - - // Render progress bar - let progress_block = Block::default() - .title(files[0]) - .borders(Borders::ALL) - .border_style(Style::default().fg(settings.theme.primary_color())); - - f.render_widget(progress_block, chunks[1]); - - // Donut animation - let mut donut_canvas = Canvas::new(chunks[2].width, chunks[2].height); - let mut donut = DonutAnimation::new(chunks[2].width, chunks[2].height); - donut.render(&mut donut_canvas); - - // Render donut - let donut_block = Block::default() - .title("Progress") - .borders(Borders::ALL) - .border_style(Style::default().fg(settings.theme.secondary_color())); - - f.render_widget(donut_block, chunks[2]); })?; - // Frame rate limiting - let frame_time = frame_start.elapsed(); - if frame_time < FRAME_TIME { - thread::sleep(FRAME_TIME - frame_time); - } + break; // remove in real async loop } Ok(()) diff --git a/src/tui/mod.rs b/src/tui/mod.rs index c371676..ad84ace 100644 --- a/src/tui/mod.rs +++ b/src/tui/mod.rs @@ -1,4 +1,3 @@ -pub mod animations; pub mod disk_manager; pub mod downloader; pub mod main_menu; diff --git a/src/tui/settings.rs b/src/tui/settings.rs index be82287..8badd83 100644 --- a/src/tui/settings.rs +++ b/src/tui/settings.rs @@ -1,4 +1,5 @@ use std::io::Stdout; +use tracing::instrument; use tui::{Terminal, backend::CrosstermBackend}; pub struct Settings { @@ -17,6 +18,7 @@ impl Theme { } impl Settings { + #[instrument(skip(_terminal))] pub fn show_settings( _terminal: &mut Terminal>, ) -> Result<(), Box> { diff --git a/src/wget_list.rs b/src/wget_list.rs index 4b350d2..b8224ec 100644 --- a/src/wget_list.rs +++ b/src/wget_list.rs @@ -1,17 +1,16 @@ -use anyhow::{Context, Result}; +use anyhow::Result; +use reqwest::blocking::Client; +use reqwest::redirect::Policy; pub fn get_wget_list() -> Result { - let url = "https://www.linuxfromscratch.org/~thomas/multilib-m32/wget-list-sysv"; - let agent = ureq::AgentBuilder::new().redirects(5).build(); - agent - .get(url) - .call() - .map_err(|err| match err { - ureq::Error::Status(code, _) => { - anyhow::anyhow!("Failed to fetch wget-list: HTTP {code}") - } - other => anyhow::anyhow!("Failed to fetch wget-list: {other}"), - })? - .into_string() - .with_context(|| format!("reading body from {url}")) + let client = Client::builder().redirect(Policy::limited(5)).build()?; + let res = client + .get("https://www.linuxfromscratch.org/~thomas/multilib-m32/wget-list-sysv") + .send()?; + + if !res.status().is_success() { + anyhow::bail!("Failed to fetch wget-list: HTTP {}", res.status()); + } + + Ok(res.text()?) }