refactor: replace reqwest/tokio stack with ureq

This commit is contained in:
m00d 2025-10-01 12:28:04 +02:00
parent be84650502
commit 0067162d04
19 changed files with 1136 additions and 1198 deletions

41
.cargo/config.toml Normal file
View file

@ -0,0 +1,41 @@
[profile.dev]
opt-level = 0
debug = true
lto = false
codegen-units = 256
panic = "unwind"
incremental = true
[profile.dev.package."*"]
opt-level = 0
[profile.release]
opt-level = 3
lto = "fat"
codegen-units = 1
panic = "abort"
debug = false
incremental = false
[profile.release.package."*"]
opt-level = 3
[profile.release-pgo-instrument]
inherits = "release"
debug = true
lto = false
incremental = false
[profile.release-pgo-instrument.package."*"]
opt-level = 3
[profile.release-pgo]
inherits = "release"
incremental = false
[profile.release-pgo.package."*"]
opt-level = 3
[alias]
pgo-instrument = "build --profile release-pgo-instrument"
pgo-build = "build --profile release-pgo"

1419
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -15,6 +15,9 @@ console = "0.16.1"
# Optional Terminal UI # Optional Terminal UI
crossterm = { version = "0.29.0", optional = true } crossterm = { version = "0.29.0", optional = true }
tui = { version = "0.19.0", optional = true } tui = { version = "0.19.0", optional = true }
rsille = { version = "2.3", optional = true }
gptman = { version = "2.0.1", optional = true }
uuid = { version = "1.18.1", optional = true, features = ["v4"] }
# Parsing & scraping # Parsing & scraping
html_parser = "0.7.0" html_parser = "0.7.0"
@ -22,78 +25,42 @@ scraper = "0.19.0"
regex = "1.11.3" regex = "1.11.3"
serde = { version = "1.0.228", features = ["derive"] } serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.145" serde_json = "1.0.145"
jsonschema = "0.17.0" jsonschema = { version = "0.17.0", default-features = false, features = ["draft202012"] }
walkdir = "2.5.0" walkdir = "2.5.0"
chrono = { version = "0.4.38", default-features = false, features = ["clock"] } chrono = { version = "0.4.38", default-features = false, features = ["clock"] }
sha2 = "0.10.8" sha2 = "0.10.8"
# Utilities # Utilities
indicatif = "0.18.0"
spinners = "4.1.1"
num_cpus = "1.17.0" num_cpus = "1.17.0"
rand = "0.9.2" rand = { version = "0.9.2", optional = true }
md5 = "0.8.0" md5 = "0.8.0"
# HTTP # HTTP
reqwest = { version = "0.12.23", features = ["blocking", "json"] }
semver = "1.0.27" semver = "1.0.27"
inquire = "0.9.1" inquire = "0.9.1"
tracing = "0.1.41" juniper = { version = "0.17", optional = true }
tracing-appender = "0.2.3" actix-web = { version = "4.9", optional = true }
tracing-subscriber = { version = "0.3.20", features = ["env-filter", "fmt"] } juniper_actix = { version = "0.7", optional = true }
gptman = "2.0.1"
dialoguer = "0.12.0"
tokio = { version = "1.47.1", features = ["full"] }
shell-words = "1.1.0" shell-words = "1.1.0"
url = "2.5.7" url = "2.5.7"
uuid = { version = "1.18.1", features = ["v4"] }
hex = "0.4.3" hex = "0.4.3"
diesel = { version = "2.1.6", features = ["sqlite", "r2d2", "returning_clauses_for_sqlite_3_35"] } diesel = { version = "2.1.6", features = ["sqlite", "r2d2", "returning_clauses_for_sqlite_3_35"] }
# Networking
ureq = { version = "2.9.7", features = ["tls", "json"] }
[features] [features]
# TUI feature flag # TUI feature flag
tui = ["dep:tui", "dep:crossterm"] tui = ["dep:tui", "dep:crossterm", "dep:rsille", "dep:gptman", "dep:uuid"]
# GraphQL/HTTP server feature flag
graphql = ["dep:juniper", "dep:actix-web", "dep:juniper_actix", "dep:rand"]
# Optional default features # Optional default features
default = [] default = []
# ----------------------- [[bin]]
# Cargo-make tasks name = "graphql_server"
# ----------------------- path = "src/bin/graphql_server.rs"
[tasks.format] required-features = ["graphql"]
description = "Format Rust code using rustfmt"
install_crate = "rustfmt"
command = "cargo"
args = ["fmt", "--", "--emit=files"]
[tasks.clean]
description = "Clean build artifacts"
command = "cargo"
args = ["clean"]
[tasks.build]
description = "Build the project"
command = "cargo"
args = ["build"]
dependencies = ["clean"]
[tasks.test]
description = "Run tests"
command = "cargo"
args = ["test"]
dependencies = ["clean"]
[tasks.my-flow]
description = "Run full workflow: format, build, test"
dependencies = ["format", "build", "test"]
[tasks.dev-flow]
description = "Full developer workflow: format, lint, build, test"
dependencies = ["format", "clippy", "build", "test"]
[tasks.release-build]
description = "Build the project in release mode"
command = "cargo"
args = ["build", "--release", "--all-features"]
dependencies = ["clean"]

View file

@ -2,12 +2,11 @@ use std::collections::HashSet;
use std::fs; use std::fs;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use anyhow::{Context, Result}; use anyhow::{Context, Result, anyhow};
use chrono::Utc; use chrono::Utc;
use clap::{Parser, Subcommand}; use clap::{Parser, Subcommand};
use jsonschema::JSONSchema; use jsonschema::JSONSchema;
use regex::Regex; use regex::Regex;
use reqwest::{blocking::Client, redirect::Policy};
use scraper::{ElementRef, Html, Selector}; use scraper::{ElementRef, Html, Selector};
use serde_json::{Value, json}; use serde_json::{Value, json};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
@ -379,6 +378,16 @@ fn extract_summary(value: &Value, relative_path: &Path) -> Result<PackageSummary
.and_then(Value::as_str) .and_then(Value::as_str)
.context("missing status.state")? .context("missing status.state")?
.to_string(); .to_string();
let tags = status
.get("tags")
.and_then(Value::as_array)
.map(|array| {
array
.iter()
.filter_map(|value| value.as_str().map(|s| s.to_string()))
.collect::<Vec<_>>()
})
.unwrap_or_default();
Ok(PackageSummary { Ok(PackageSummary {
schema_version, schema_version,
@ -393,6 +402,7 @@ fn extract_summary(value: &Value, relative_path: &Path) -> Result<PackageSummary
.to_str() .to_str()
.unwrap_or_default() .unwrap_or_default()
.replace('\\', "/"), .replace('\\', "/"),
tags,
}) })
} }
@ -409,18 +419,7 @@ fn harvest_metadata(
override_base: Option<&str>, override_base: Option<&str>,
) -> Result<HarvestResult> { ) -> Result<HarvestResult> {
let page_url = resolve_page_url(book, page, override_base)?; let page_url = resolve_page_url(book, page, override_base)?;
let client = Client::builder() let html = fetch_text(&page_url).with_context(|| format!("fetching {page_url}"))?;
.user_agent("lpkg-metadata-indexer/0.1")
.build()?;
let response = client
.get(&page_url)
.send()
.with_context(|| format!("fetching {}", page_url))?
.error_for_status()
.with_context(|| format!("non-success status for {}", page_url))?;
let html = response
.text()
.with_context(|| format!("reading response body from {}", page_url))?;
let document = Html::parse_document(&html); let document = Html::parse_document(&html);
let harvest = build_metadata_value(metadata_dir, book, &page_url, &document, &html)?; let harvest = build_metadata_value(metadata_dir, book, &page_url, &document, &html)?;
@ -637,6 +636,7 @@ fn build_metadata_value(
}; };
let status_state = "draft"; let status_state = "draft";
let stage_tag = stage.clone().unwrap_or_else(|| "base-system".to_string());
let package_json = json!({ let package_json = json!({
"schema_version": "v0.1.0", "schema_version": "v0.1.0",
@ -687,10 +687,7 @@ fn build_metadata_value(
"status": { "status": {
"state": status_state, "state": status_state,
"issues": issues, "issues": issues,
"tags": vec![ "tags": vec!["25.10".to_string(), stage_tag.clone()]
"25.10".to_string(),
stage.unwrap_or("base-system").to_string()
]
} }
}); });
@ -940,15 +937,7 @@ fn refresh_manifest(
let url = manifest_url(book, &kind) let url = manifest_url(book, &kind)
.with_context(|| format!("no manifest URL configured for book '{}'", book))?; .with_context(|| format!("no manifest URL configured for book '{}'", book))?;
let client = Client::builder().redirect(Policy::limited(5)).build()?; let body = fetch_text(url).with_context(|| format!("fetching {url}"))?;
let body = client
.get(url)
.send()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.with_context(|| format!("reading response body from {}", url))?;
fs::write(&cache_path, &body) fs::write(&cache_path, &body)
.with_context(|| format!("caching manifest {}", cache_path.display()))?; .with_context(|| format!("caching manifest {}", cache_path.display()))?;
@ -956,6 +945,17 @@ fn refresh_manifest(
Ok(cache_path) Ok(cache_path)
} }
fn fetch_text(url: &str) -> Result<String> {
ureq::get(url)
.call()
.map_err(|err| match err {
ureq::Error::Status(code, _) => anyhow!("request failed: HTTP {code}"),
other => anyhow!("request failed: {other}"),
})?
.into_string()
.with_context(|| format!("reading response body from {url}"))
}
fn manifest_url(book: &str, kind: &ManifestKind) -> Option<&'static str> { fn manifest_url(book: &str, kind: &ManifestKind) -> Option<&'static str> {
match (book, kind) { match (book, kind) {
("mlfs", ManifestKind::WgetList) => { ("mlfs", ManifestKind::WgetList) => {

View file

@ -1,7 +1,12 @@
use anyhow::{Context, Result};
use scraper::{Html, Selector}; use scraper::{Html, Selector};
pub fn fetch_pre_blocks(url: &str) -> anyhow::Result<Vec<String>> { pub fn fetch_pre_blocks(url: &str) -> Result<Vec<String>> {
let body = reqwest::blocking::get(url)?.text()?; let body = ureq::get(url)
.call()
.with_context(|| format!("requesting {url}"))?
.into_string()
.with_context(|| format!("reading body from {url}"))?;
let document = Html::parse_document(&body); let document = Html::parse_document(&body);
let selector = Selector::parse("pre").unwrap(); let selector = Selector::parse("pre").unwrap();

View file

@ -1,6 +1,5 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use regex::Regex; use regex::Regex;
use reqwest::blocking::Client;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use super::{BookPackage, FetchOptions}; use super::{BookPackage, FetchOptions};
@ -10,14 +9,10 @@ pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
let base = options.base_url.trim_end_matches('/'); let base = options.base_url.trim_end_matches('/');
let url = format!("{base}/book.html"); let url = format!("{base}/book.html");
let client = Client::builder().build().context("building HTTP client")?; let body = ureq::get(&url)
let body = client .call()
.get(&url) .with_context(|| format!("fetching {url}"))?
.send() .into_string()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.context("reading response body")?; .context("reading response body")?;
parse_book_html(options, &url, &body) parse_book_html(options, &url, &body)

View file

@ -1,6 +1,5 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use regex::Regex; use regex::Regex;
use reqwest::blocking::Client;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use super::{BookPackage, FetchOptions}; use super::{BookPackage, FetchOptions};
@ -10,14 +9,10 @@ pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
let base = options.base_url.trim_end_matches('/'); let base = options.base_url.trim_end_matches('/');
let url = format!("{base}/book.html"); let url = format!("{base}/book.html");
let client = Client::builder().build().context("building HTTP client")?; let body = ureq::get(&url)
let body = client .call()
.get(&url) .with_context(|| format!("fetching {url}"))?
.send() .into_string()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.context("reading response body")?; .context("reading response body")?;
parse_book_html(options, &url, &body) parse_book_html(options, &url, &body)

View file

@ -1,6 +1,5 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use regex::Regex; use regex::Regex;
use reqwest::blocking::Client;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use super::{BookPackage, FetchOptions}; use super::{BookPackage, FetchOptions};
@ -9,14 +8,10 @@ pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
let base = options.base_url.trim_end_matches('/'); let base = options.base_url.trim_end_matches('/');
let url = format!("{base}/book.html"); let url = format!("{base}/book.html");
let client = Client::builder().build().context("building HTTP client")?; let body = ureq::get(&url)
let body = client .call()
.get(&url) .with_context(|| format!("fetching {url}"))?
.send() .into_string()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.context("reading response body")?; .context("reading response body")?;
parse_book_html(options, &url, &body) parse_book_html(options, &url, &body)

View file

@ -7,7 +7,7 @@ use package_management::{
db, html, md5_utils, db, html, md5_utils,
pkgs::{ pkgs::{
by_name::bi::binutils::cross_toolchain::build_binutils_from_page, by_name::bi::binutils::cross_toolchain::build_binutils_from_page,
mlfs, generator, mlfs,
scaffolder::{self, ScaffoldRequest}, scaffolder::{self, ScaffoldRequest},
}, },
version_check, wget_list, version_check, wget_list,
@ -134,8 +134,6 @@ enum TuiCommand {
} }
fn main() -> Result<()> { fn main() -> Result<()> {
let _ = tracing_subscriber::fmt::try_init();
let cli = Cli::parse(); let cli = Cli::parse();
match cli.command { match cli.command {
@ -219,10 +217,8 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
lfs_root, lfs_root,
target, target,
} => { } => {
let runtime = tokio::runtime::Runtime::new().context("Creating async runtime")?; build_binutils_from_page(&url, &lfs_root, target)
runtime .with_context(|| format!("Building Binutils using instructions from {url}"))?;
.block_on(build_binutils_from_page(&url, &lfs_root, target))
.map_err(|e| anyhow!("Building Binutils using instructions from {url}: {e}"))?;
println!("Binutils workflow completed successfully"); println!("Binutils workflow completed successfully");
} }
@ -306,6 +302,14 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
let mut created = 0usize; let mut created = 0usize;
let mut skipped = Vec::new(); let mut skipped = Vec::new();
let metadata_entries = match mlfs::load_metadata_index() {
Ok(entries) => Some(entries),
Err(err) => {
eprintln!("[mlfs] metadata index error: {err}");
None
}
};
let pool = if dry_run { let pool = if dry_run {
None None
} else { } else {
@ -313,11 +317,6 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
}; };
for record in records { for record in records {
let module_alias = record.module_alias();
if !seen.insert(module_alias.clone()) {
continue;
}
if let Some(limit) = limit { if let Some(limit) = limit {
if processed >= limit { if processed >= limit {
break; break;
@ -325,15 +324,20 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
} }
processed += 1; processed += 1;
if dry_run { let metadata_entry = metadata_entries
println!( .as_ref()
"Would scaffold {:<18} {:<12} -> {}", .and_then(|entries| mlfs::match_metadata(&record, entries));
record.name, record.version, module_alias
);
continue;
}
let request = ScaffoldRequest { let mut request = if let Some(entry) = metadata_entry {
let path = PathBuf::from("ai/metadata").join(&entry.path);
match generator::request_from_metadata(&path) {
Ok(req) => req,
Err(err) => {
eprintln!(
"[mlfs] metadata apply error for {} {}: {}",
record.name, record.version, err
);
ScaffoldRequest {
name: record.name.clone(), name: record.name.clone(),
version: record.version.clone(), version: record.version.clone(),
source: None, source: None,
@ -350,9 +354,63 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
stage: record.stage.clone(), stage: record.stage.clone(),
variant: record.variant.clone(), variant: record.variant.clone(),
notes: record.notes.clone(), notes: record.notes.clone(),
module_override: Some(module_alias.clone()), module_override: None,
}
}
}
} else {
ScaffoldRequest {
name: record.name.clone(),
version: record.version.clone(),
source: None,
md5: None,
configure_args: Vec::new(),
build_commands: Vec::new(),
install_commands: Vec::new(),
dependencies: Vec::new(),
enable_lto: true,
enable_pgo: true,
cflags: Vec::new(),
ldflags: Vec::new(),
profdata: None,
stage: record.stage.clone(),
variant: record.variant.clone(),
notes: record.notes.clone(),
module_override: None,
}
}; };
if request.stage.is_none() {
request.stage = record.stage.clone();
}
if request.variant.is_none() {
request.variant = record.variant.clone();
}
if request.notes.is_none() {
request.notes = record.notes.clone();
}
let module_alias = request
.module_override
.clone()
.unwrap_or_else(|| record.module_alias());
if !seen.insert(module_alias.clone()) {
continue;
}
if request.module_override.is_none() {
request.module_override = Some(module_alias.clone());
}
if dry_run {
println!(
"Would scaffold {:<18} {:<12} -> {}",
record.name, record.version, module_alias
);
continue;
}
match scaffolder::scaffold_package(&base_dir, request) { match scaffolder::scaffold_package(&base_dir, request) {
Ok(result) => { Ok(result) => {
if let Some(pool) = &pool { if let Some(pool) = &pool {

View file

@ -1,16 +1,15 @@
use anyhow::Result; use anyhow::{Context, Result};
use reqwest::blocking::Client;
use reqwest::redirect::Policy;
pub fn get_md5sums() -> Result<String> { pub fn get_md5sums() -> Result<String> {
let client = Client::builder().redirect(Policy::limited(5)).build()?; let agent = ureq::AgentBuilder::new().redirects(5).build();
let res = client let url = "https://www.linuxfromscratch.org/~thomas/multilib-m32/md5sums";
.get("https://www.linuxfromscratch.org/~thomas/multilib-m32/md5sums")
.send()?;
if !res.status().is_success() { let response = agent.get(url).call().map_err(|err| match err {
anyhow::bail!("Failed to fetch MD5sums: HTTP {}", res.status()); ureq::Error::Status(code, _) => anyhow::anyhow!("Failed to fetch MD5sums: HTTP {code}"),
} other => anyhow::anyhow!("Failed to fetch MD5sums: {other}"),
})?;
Ok(res.text()?) response
.into_string()
.with_context(|| format!("reading body from {url}"))
} }

View file

@ -1,14 +1,11 @@
use console::Style; use console::Style;
use reqwest::blocking::Client;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use std::io::{self, Write}; use std::io::{self, Write};
pub fn fetch_mirrors() -> Result<Vec<String>, Box<dyn std::error::Error>> { pub fn fetch_mirrors() -> Result<Vec<String>, Box<dyn std::error::Error>> {
let client = Client::new(); let res = ureq::get("https://www.linuxfromscratch.org/lfs/mirrors.html#files")
let res = client .call()?
.get("https://www.linuxfromscratch.org/lfs/mirrors.html#files") .into_string()?;
.send()?
.text()?;
let document = Html::parse_document(&res); let document = Html::parse_document(&res);
let selector = Selector::parse("a[href^='http']").unwrap(); let selector = Selector::parse("a[href^='http']").unwrap();

View file

@ -1,12 +1,13 @@
// async cross-toolchain runner that uses parser.rs info (no hardcoding) // Cross-toolchain runner that uses parser.rs info (no hardcoding).
use crate::pkgs::by_name::bi::binutils::parser::{BinutilsInfo, fetch_page, parse_binutils}; use crate::pkgs::by_name::bi::binutils::parser::{BinutilsInfo, fetch_page, parse_binutils};
use reqwest::Client; use anyhow::{Context, Result, anyhow};
use shell_words;
use std::{ use std::{
error::Error, fs::{self, File},
io,
path::{Path, PathBuf}, path::{Path, PathBuf},
process::Command,
}; };
use tokio::process::Command;
use tracing::{info, warn};
/// Configuration object - uses environment if values omitted. /// Configuration object - uses environment if values omitted.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -22,14 +23,7 @@ impl BinutilsConfig {
let lfs_root = lfs_root.as_ref().to_path_buf(); let lfs_root = lfs_root.as_ref().to_path_buf();
let target = target let target = target
.or_else(|| std::env::var("LFS_TGT").ok()) .or_else(|| std::env::var("LFS_TGT").ok())
.unwrap_or_else(|| { .unwrap_or_else(|| "x86_64-lfs-linux-gnu".to_string());
// fallback best-effort
if cfg!(target_os = "linux") {
"x86_64-lfs-linux-gnu".to_string()
} else {
"x86_64-lfs-linux-gnu".to_string()
}
});
Self { Self {
lfs_root, lfs_root,
@ -65,122 +59,50 @@ impl BinutilsConfig {
} }
} }
/// High-level orchestration. Async. /// High-level orchestration.
pub async fn build_binutils_from_page( pub fn build_binutils_from_page(
page_url: &str, page_url: &str,
lfs_root: impl AsRef<std::path::Path>, lfs_root: impl AsRef<Path>,
target: Option<String>, target: Option<String>,
) -> Result<(), Box<dyn Error>> { ) -> Result<()> {
// 1) fetch page println!("Fetching page: {page_url}");
info!("Fetching page: {}", page_url); let html = fetch_page(page_url).context("fetching binutils instructions")?;
let html = fetch_page(page_url).await?; let info = parse_binutils(&html).context("parsing binutils instructions")?;
let info = parse_binutils(&html)?; println!("Parsed info: {:?}", info);
info!("Parsed info: {:?}", info);
// 2) build config
let cfg = BinutilsConfig::new(lfs_root, target, info.clone()); let cfg = BinutilsConfig::new(lfs_root, target, info.clone());
// 3) ensure source base dir exists
let src_base = cfg.source_base_dir(); let src_base = cfg.source_base_dir();
if !src_base.exists() { if !src_base.exists() {
info!("Creating source base dir: {:?}", src_base); println!("Creating source base dir: {:?}", src_base);
tokio::fs::create_dir_all(&src_base).await?; fs::create_dir_all(&src_base)
.with_context(|| format!("creating source base dir {:?}", src_base))?;
} }
// 4) find extracted source directory (binutils-*) let mut source_dir = locate_binutils_dir(&src_base)?;
let mut source_dir: Option<PathBuf> = None;
if let Ok(mut rd) = tokio::fs::read_dir(&src_base).await {
while let Some(entry) = rd.next_entry().await? {
let ft = entry.file_type().await?;
if ft.is_dir() {
let name = entry.file_name().to_string_lossy().to_string();
if name.to_lowercase().contains("binutils") {
source_dir = Some(entry.path());
break;
}
}
}
}
// 5) if not found, attempt to download & extract
if source_dir.is_none() { if source_dir.is_none() {
if let Some(dl) = &cfg.info.download_url { source_dir = download_and_extract(&cfg, &src_base)?;
info!("No extracted source found; will download {}", dl);
// download file into src_base
let client = Client::new();
let resp = client.get(dl).send().await?;
if !resp.status().is_success() {
return Err(format!("Download failed: {}", resp.status()).into());
} }
// pick a filename from URL let source_dir = source_dir
let url_path = url::Url::parse(dl)?; .ok_or_else(|| anyhow!("Could not locate or download/extract Binutils source"))?;
let filename = url_path println!("Using source dir: {:?}", source_dir);
.path_segments()
.and_then(|seg| seg.last())
.and_then(|s| {
if !s.is_empty() {
Some(s.to_string())
} else {
None
}
})
.ok_or("Cannot determine filename from URL")?;
let outpath = src_base.join(&filename);
info!("Saving archive to {:?}", outpath);
let bytes = resp.bytes().await?;
tokio::fs::write(&outpath, &bytes).await?;
// extract using tar (async spawn). Use absolute path to src_base
info!("Extracting archive {:?}", outpath);
let tar_path = outpath.clone();
let mut tar_cmd = Command::new("tar");
tar_cmd.arg("-xf").arg(&tar_path).arg("-C").arg(&src_base);
let status = tar_cmd.status().await?;
if !status.success() {
return Err("tar extraction failed".into());
}
// look for extracted dir again
if let Ok(mut rd) = tokio::fs::read_dir(&src_base).await {
while let Some(entry) = rd.next_entry().await? {
let ft = entry.file_type().await?;
if ft.is_dir() {
let name = entry.file_name().to_string_lossy().to_string();
if name.to_lowercase().contains("binutils") {
source_dir = Some(entry.path());
break;
}
}
}
}
} else {
warn!("No download URL found on the page and no unpacked source present.");
}
}
let source_dir = match source_dir {
Some(p) => p,
None => return Err("Could not locate or download/extract Binutils source".into()),
};
info!("Using source dir: {:?}", source_dir);
// 6) prepare build dir
let build_dir = cfg.build_dir(); let build_dir = cfg.build_dir();
if !build_dir.exists() { if !build_dir.exists() {
info!("Creating build dir {:?}", build_dir); println!("Creating build dir {:?}", build_dir);
tokio::fs::create_dir_all(&build_dir).await?; fs::create_dir_all(&build_dir)
.with_context(|| format!("creating build dir {:?}", build_dir))?;
} }
// 7) run configure: use absolute configure script path in source_dir
let configure_path = source_dir.join("configure"); let configure_path = source_dir.join("configure");
if !configure_path.exists() { if !configure_path.exists() {
return Err(format!("configure script not found at {:?}", configure_path).into()); return Err(anyhow!(
"configure script not found at {:?}",
configure_path
));
} }
// If parser produced configure args tokens, use them; otherwise fallback to common flags
let args = if !cfg.info.configure_args.is_empty() { let args = if !cfg.info.configure_args.is_empty() {
cfg.info.configure_args.clone() cfg.info.configure_args.clone()
} else { } else {
@ -193,7 +115,6 @@ pub async fn build_binutils_from_page(
] ]
}; };
// replace $LFS and $LFS_TGT in args
let args: Vec<String> = args let args: Vec<String> = args
.into_iter() .into_iter()
.map(|a| { .map(|a| {
@ -202,83 +123,126 @@ pub async fn build_binutils_from_page(
}) })
.collect(); .collect();
info!("Configuring with args: {:?}", args); println!("Configuring with args: {:?}", args);
let mut configure_cmd = Command::new(&configure_path);
configure_cmd.current_dir(&build_dir);
configure_cmd.args(&args);
run_command(&mut configure_cmd).context("configure step failed")?;
println!("configure completed");
// spawn configure
let mut conf_cmd = Command::new(&configure_path);
conf_cmd.current_dir(&build_dir);
for a in &args {
conf_cmd.arg(a);
}
conf_cmd.stdout(std::process::Stdio::inherit());
conf_cmd.stderr(std::process::Stdio::inherit());
let status = conf_cmd.status().await?;
if !status.success() {
return Err("configure step failed".into());
}
info!("configure completed");
// 8) run build commands (make-like)
if !cfg.info.build_cmds.is_empty() { if !cfg.info.build_cmds.is_empty() {
for b in &cfg.info.build_cmds { for raw in &cfg.info.build_cmds {
// split into program + args run_shell_command(raw, &build_dir)
let mut parts = shell_words::split(b).unwrap_or_else(|_| vec![b.clone()]); .with_context(|| format!("build step failed: {raw}"))?;
let prog = parts.remove(0);
let mut cmd = Command::new(prog);
if !parts.is_empty() {
cmd.args(parts);
}
cmd.current_dir(&build_dir);
cmd.stdout(std::process::Stdio::inherit());
cmd.stderr(std::process::Stdio::inherit());
let status = cmd.status().await?;
if !status.success() {
return Err(format!("build step failed: {:?}", b).into());
}
} }
} else { } else {
// fallback to running `make` let mut make_cmd = Command::new("make");
let mut m = Command::new("make"); make_cmd.current_dir(&build_dir);
m.current_dir(&build_dir); run_command(&mut make_cmd).context("make failed")?;
m.stdout(std::process::Stdio::inherit());
m.stderr(std::process::Stdio::inherit());
let status = m.status().await?;
if !status.success() {
return Err("make failed".into());
} }
} println!("build completed");
info!("build completed");
// 9) run install commands (make install)
if !cfg.info.install_cmds.is_empty() { if !cfg.info.install_cmds.is_empty() {
for inst in &cfg.info.install_cmds { for raw in &cfg.info.install_cmds {
let mut parts = shell_words::split(inst).unwrap_or_else(|_| vec![inst.clone()]); run_shell_command(raw, &build_dir)
let prog = parts.remove(0); .with_context(|| format!("install step failed: {raw}"))?;
let mut cmd = Command::new(prog);
if !parts.is_empty() {
cmd.args(parts);
}
cmd.current_dir(&build_dir);
cmd.stdout(std::process::Stdio::inherit());
cmd.stderr(std::process::Stdio::inherit());
let status = cmd.status().await?;
if !status.success() {
return Err(format!("install step failed: {:?}", inst).into());
}
} }
} else { } else {
// fallback `make install` let mut install_cmd = Command::new("make");
let mut mi = Command::new("make"); install_cmd.arg("install");
mi.arg("install"); install_cmd.current_dir(&build_dir);
mi.current_dir(&build_dir); run_command(&mut install_cmd).context("make install failed")?;
mi.stdout(std::process::Stdio::inherit());
mi.stderr(std::process::Stdio::inherit());
let status = mi.status().await?;
if !status.success() {
return Err("make install failed".into());
}
} }
println!("install completed");
info!("install completed. Binutils Pass 1 done.");
Ok(()) Ok(())
} }
fn locate_binutils_dir(base: &Path) -> Result<Option<PathBuf>> {
if !base.exists() {
return Ok(None);
}
for entry in fs::read_dir(base).with_context(|| format!("reading directory {:?}", base))? {
let entry = entry?;
if entry.file_type()?.is_dir() {
let name = entry.file_name().to_string_lossy().to_lowercase();
if name.contains("binutils") {
return Ok(Some(entry.path()));
}
}
}
Ok(None)
}
fn download_and_extract(cfg: &BinutilsConfig, base: &Path) -> Result<Option<PathBuf>> {
let url = match cfg.info.download_url.as_deref() {
Some(url) => url,
None => {
eprintln!("No download URL found on the page and no unpacked source present.");
return Ok(None);
}
};
println!("Downloading {url}");
let response = ureq::get(url).call().map_err(|err| match err {
ureq::Error::Status(code, _) => anyhow!("Download failed: HTTP {code}"),
other => anyhow!("Download failed: {other}"),
})?;
let final_url = response.get_url().to_string();
let parsed = url::Url::parse(&final_url)
.with_context(|| format!("parsing final download URL {final_url}"))?;
let filename = parsed
.path_segments()
.and_then(|segments| segments.last())
.filter(|s| !s.is_empty())
.map(|s| s.to_string())
.ok_or_else(|| anyhow!("Cannot determine filename from URL"))?;
let outpath = base.join(&filename);
let mut reader = response.into_reader();
let mut file =
File::create(&outpath).with_context(|| format!("creating archive file {:?}", outpath))?;
io::copy(&mut reader, &mut file)
.with_context(|| format!("writing archive to {:?}", outpath))?;
println!("Extracting archive {:?}", outpath);
let status = Command::new("tar")
.arg("-xf")
.arg(&outpath)
.arg("-C")
.arg(base)
.status()
.with_context(|| "spawning tar".to_string())?;
if !status.success() {
return Err(anyhow!("tar extraction failed"));
}
locate_binutils_dir(base)
}
fn run_command(cmd: &mut Command) -> Result<()> {
cmd.stdout(std::process::Stdio::inherit());
cmd.stderr(std::process::Stdio::inherit());
let status = cmd
.status()
.with_context(|| "spawning process".to_string())?;
if !status.success() {
return Err(anyhow!("command exited with status {status}"));
}
Ok(())
}
fn run_shell_command(raw: &str, cwd: &Path) -> Result<()> {
let mut parts = shell_words::split(raw).unwrap_or_else(|_| vec![raw.to_string()]);
if parts.is_empty() {
return Ok(());
}
let prog = parts.remove(0);
let mut cmd = Command::new(prog);
if !parts.is_empty() {
cmd.args(parts);
}
cmd.current_dir(cwd);
run_command(&mut cmd)
}

View file

@ -1,7 +1,6 @@
// async parser for Binutils Pass 1 page // Parser for Binutils Pass 1 page using lightweight HTTP fetching.
use reqwest::Client; use anyhow::{Context, Result};
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use std::error::Error;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct BinutilsInfo { pub struct BinutilsInfo {
@ -34,16 +33,16 @@ impl Default for BinutilsInfo {
} }
} }
/// Fetch page content (async) /// Fetch page content synchronously
pub async fn fetch_page(url: &str) -> Result<String, Box<dyn Error>> { pub fn fetch_page(url: &str) -> Result<String> {
let client = Client::new(); ureq::get(url)
let res = client.get(url).send().await?; .call()
let status = res.status(); .map_err(|err| match err {
if !status.is_success() { ureq::Error::Status(code, _) => anyhow::anyhow!("Failed to fetch {url}: HTTP {code}"),
return Err(format!("Failed to fetch {}: {}", url, status).into()); other => anyhow::anyhow!("Failed to fetch {url}: {other}"),
} })?
let text = res.text().await?; .into_string()
Ok(text) .with_context(|| format!("reading body from {url}"))
} }
/// Parse the LFS Binutils pass1 page; robust to small formatting changes. /// Parse the LFS Binutils pass1 page; robust to small formatting changes.
@ -51,7 +50,7 @@ pub async fn fetch_page(url: &str) -> Result<String, Box<dyn Error>> {
/// - finds a download URL ending with .tar.xz/.tar.gz /// - finds a download URL ending with .tar.xz/.tar.gz
/// - finds configure pre block(s), builds token list /// - finds configure pre block(s), builds token list
/// - finds `make` / `make install` pre blocks /// - finds `make` / `make install` pre blocks
pub fn parse_binutils(html: &str) -> Result<BinutilsInfo, Box<dyn Error>> { pub fn parse_binutils(html: &str) -> Result<BinutilsInfo> {
let document = Html::parse_document(html); let document = Html::parse_document(html);
let mut info = BinutilsInfo::default(); let mut info = BinutilsInfo::default();

View file

@ -34,6 +34,7 @@ struct HarvestedMetadata {
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct HarvestedSource { struct HarvestedSource {
#[serde(default)] #[serde(default)]
#[allow(dead_code)]
archive: Option<String>, archive: Option<String>,
#[serde(default)] #[serde(default)]
urls: Vec<HarvestedUrl>, urls: Vec<HarvestedUrl>,
@ -67,14 +68,18 @@ struct HarvestedOptimisations {
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct CommandPhase { struct CommandPhase {
#[serde(default)] #[serde(default)]
#[allow(dead_code)]
phase: Option<String>, phase: Option<String>,
#[serde(default)] #[serde(default)]
commands: Vec<String>, commands: Vec<String>,
#[serde(default)] #[serde(default)]
#[allow(dead_code)]
cwd: Option<String>, cwd: Option<String>,
#[serde(default)] #[serde(default)]
#[allow(dead_code)]
requires_root: Option<bool>, requires_root: Option<bool>,
#[serde(default)] #[serde(default)]
#[allow(dead_code)]
notes: Option<String>, notes: Option<String>,
} }
@ -97,6 +102,12 @@ pub fn generate_module(
Ok(result.module_path) Ok(result.module_path)
} }
/// Build a scaffolding request directly from a metadata JSON file.
pub fn request_from_metadata(path: impl AsRef<Path>) -> Result<ScaffoldRequest> {
let harvested = parse_metadata(path.as_ref())?;
build_request(&harvested)
}
/// Compute the directory for a module derived from the given metadata. /// Compute the directory for a module derived from the given metadata.
pub fn module_directory( pub fn module_directory(
metadata_path: impl AsRef<Path>, metadata_path: impl AsRef<Path>,

View file

@ -1,4 +1,4 @@
use std::borrow::Cow; use std::{borrow::Cow, fs, path::PathBuf};
use anyhow::{Context, Result, anyhow}; use anyhow::{Context, Result, anyhow};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -83,6 +83,30 @@ impl MlfsPackageRecord {
} }
} }
#[derive(Debug, Deserialize)]
struct MetadataIndex {
packages: Vec<MetadataPackage>,
}
#[derive(Debug, Clone, Deserialize)]
struct MetadataPackage {
book: String,
id: String,
name: String,
path: String,
stage: Option<String>,
variant: Option<String>,
}
#[derive(Debug, Clone)]
pub struct MlfsMetadataEntry {
pub id: String,
pub name: String,
pub stage: Option<String>,
pub variant: Option<String>,
pub path: PathBuf,
}
pub fn fetch_catalog(base_url: &str) -> Result<Vec<MlfsPackageRecord>> { pub fn fetch_catalog(base_url: &str) -> Result<Vec<MlfsPackageRecord>> {
let options = FetchOptions::new(base_url, BookKind::Mlfs); let options = FetchOptions::new(base_url, BookKind::Mlfs);
let packages = lfs::fetch_book(&options)?; let packages = lfs::fetch_book(&options)?;
@ -109,8 +133,75 @@ pub fn load_or_fetch_catalog(base_url: Option<&str>) -> Result<Vec<MlfsPackageRe
match fetch_catalog(base) { match fetch_catalog(base) {
Ok(records) => Ok(records), Ok(records) => Ok(records),
Err(err) => { Err(err) => {
tracing::warn!("mlfs_fetch_error" = %err, "Falling back to cached MLFS package list"); eprintln!("[mlfs] fetch error ({err}); falling back to cached MLFS package list");
load_cached_catalog() load_cached_catalog()
} }
} }
} }
pub fn load_metadata_index() -> Result<Vec<MlfsMetadataEntry>> {
let raw = fs::read_to_string("ai/metadata/index.json").context("reading AI metadata index")?;
let index: MetadataIndex =
serde_json::from_str(&raw).context("parsing AI metadata index JSON")?;
let entries = index
.packages
.into_iter()
.filter(|pkg| pkg.book.eq_ignore_ascii_case("mlfs"))
.map(|pkg| MlfsMetadataEntry {
id: pkg.id,
name: pkg.name,
stage: pkg.stage,
variant: pkg.variant,
path: PathBuf::from(pkg.path),
})
.collect();
Ok(entries)
}
pub fn match_metadata<'a>(
record: &MlfsPackageRecord,
entries: &'a [MlfsMetadataEntry],
) -> Option<&'a MlfsMetadataEntry> {
let target_name = normalize(&record.name);
let target_variant = normalize_opt(record.variant.as_deref());
let target_stage = normalize_opt(record.stage.as_deref());
entries
.iter()
.filter(|entry| normalize(&entry.name) == target_name)
.max_by_key(|entry| {
let mut score = 0;
if let (Some(tv), Some(ev)) = (&target_variant, normalize_opt(entry.variant.as_deref()))
{
if *tv == ev {
score += 4;
}
}
if let (Some(ts), Some(es)) = (&target_stage, normalize_opt(entry.stage.as_deref())) {
if *ts == es {
score += 2;
}
}
if target_variant.is_none() && entry.variant.is_none() {
score += 1;
}
if target_stage.is_none() && entry.stage.is_none() {
score += 1;
}
score
})
}
fn normalize(value: &str) -> String {
value
.chars()
.filter(|c| c.is_ascii_alphanumeric())
.collect::<String>()
.to_ascii_lowercase()
}
fn normalize_opt(value: Option<&str>) -> Option<String> {
value.map(normalize)
}

View file

@ -9,7 +9,6 @@ use crossterm::event::{self, Event, KeyCode};
use crossterm::execute; use crossterm::execute;
use crossterm::terminal::{EnterAlternateScreen, LeaveAlternateScreen}; use crossterm::terminal::{EnterAlternateScreen, LeaveAlternateScreen};
use gptman::{GPT, GPTPartitionEntry, PartitionName}; use gptman::{GPT, GPTPartitionEntry, PartitionName};
use tracing::{info, warn};
use tui::{ use tui::{
Terminal, Terminal,
backend::CrosstermBackend, backend::CrosstermBackend,
@ -27,7 +26,6 @@ use uuid::Uuid;
/// crossterm = "0.26" /// crossterm = "0.26"
/// gptman = "2.0" /// gptman = "2.0"
/// uuid = { version = "1", features = ["v4"] } /// uuid = { version = "1", features = ["v4"] }
/// tracing = "0.1"
pub struct DiskManager; pub struct DiskManager;
impl DiskManager { impl DiskManager {
@ -148,11 +146,11 @@ impl DiskManager {
let path = devices[selected_idx].clone(); let path = devices[selected_idx].clone();
match Self::create_partition_tui(&path, &mut term) { match Self::create_partition_tui(&path, &mut term) {
Ok(m) => { Ok(m) => {
info!(target: "disk_manager", "{}", m); println!("[disk-manager] {}", m);
status_msg = m; status_msg = m;
} }
Err(e) => { Err(e) => {
warn!(target: "disk_manager", "create partition error: {:?}", e); eprintln!("[disk-manager] create partition error: {e}");
status_msg = format!("Create failed: {}", e); status_msg = format!("Create failed: {}", e);
} }
} }

View file

@ -1,20 +1,25 @@
use std::io::Stdout; use crate::tui::animations::{
use tracing::instrument; Animation, ProgressAnimation, donut::DonutAnimation, progress::ProgressBarAnimation,
};
use rsille::canvas::Canvas;
use std::{io::Stdout, thread, time::Duration};
use tui::{ use tui::{
Terminal, Terminal,
backend::CrosstermBackend, backend::CrosstermBackend,
layout::{Constraint, Direction, Layout}, layout::{Constraint, Direction, Layout},
style::Style, style::Style,
text::Spans, text::Spans,
widgets::{Block, Borders, Gauge, List, ListItem}, widgets::{Block, Borders, List, ListItem},
}; };
use crate::tui::settings::Settings; use crate::tui::settings::Settings;
pub struct Downloader; pub struct Downloader;
const TARGET_FPS: u64 = 30;
const FRAME_TIME: Duration = Duration::from_micros(1_000_000 / TARGET_FPS);
impl Downloader { impl Downloader {
#[instrument(skip(terminal, settings))]
pub fn show_downloader( pub fn show_downloader(
terminal: &mut Terminal<CrosstermBackend<Stdout>>, terminal: &mut Terminal<CrosstermBackend<Stdout>>,
settings: &Settings, settings: &Settings,
@ -22,14 +27,26 @@ impl Downloader {
let files = vec!["file1.tar.gz", "file2.tar.gz", "file3.tar.gz"]; let files = vec!["file1.tar.gz", "file2.tar.gz", "file3.tar.gz"];
let progress = vec![0.3, 0.5, 0.9]; let progress = vec![0.3, 0.5, 0.9];
let mut last_update = std::time::Instant::now();
loop { loop {
let frame_start = std::time::Instant::now();
let delta = frame_start - last_update;
last_update = frame_start;
terminal.draw(|f| { terminal.draw(|f| {
let size = f.size(); let size = f.size();
let chunks = Layout::default() let chunks = Layout::default()
.direction(Direction::Vertical) .direction(Direction::Vertical)
.margin(2) .margin(2)
.constraints([Constraint::Percentage(70), Constraint::Percentage(30)].as_ref()) .constraints(
[
Constraint::Percentage(50),
Constraint::Percentage(25),
Constraint::Percentage(25),
]
.as_ref(),
)
.split(size); .split(size);
let items: Vec<ListItem> = files let items: Vec<ListItem> = files
@ -44,16 +61,42 @@ impl Downloader {
); );
f.render_widget(list, chunks[0]); f.render_widget(list, chunks[0]);
// Progress bar
let mut progress_canvas = Canvas::new(chunks[1].width, chunks[1].height);
let mut progress_bar = ProgressBarAnimation::new(chunks[1].width, chunks[1].height);
for (i, prog) in progress.iter().enumerate() { for (i, prog) in progress.iter().enumerate() {
let gauge = Gauge::default() progress_bar.set_progress(*prog as f64);
.block(Block::default().title(files[i])) progress_bar.render(&mut progress_canvas);
.gauge_style(Style::default().fg(settings.theme.primary_color()))
.ratio(*prog as f64);
f.render_widget(gauge, chunks[1]);
} }
// Render progress bar
let progress_block = Block::default()
.title(files[0])
.borders(Borders::ALL)
.border_style(Style::default().fg(settings.theme.primary_color()));
f.render_widget(progress_block, chunks[1]);
// Donut animation
let mut donut_canvas = Canvas::new(chunks[2].width, chunks[2].height);
let mut donut = DonutAnimation::new(chunks[2].width, chunks[2].height);
donut.render(&mut donut_canvas);
// Render donut
let donut_block = Block::default()
.title("Progress")
.borders(Borders::ALL)
.border_style(Style::default().fg(settings.theme.secondary_color()));
f.render_widget(donut_block, chunks[2]);
})?; })?;
break; // remove in real async loop // Frame rate limiting
let frame_time = frame_start.elapsed();
if frame_time < FRAME_TIME {
thread::sleep(FRAME_TIME - frame_time);
}
} }
Ok(()) Ok(())

View file

@ -1,5 +1,4 @@
use std::io::Stdout; use std::io::Stdout;
use tracing::instrument;
use tui::{Terminal, backend::CrosstermBackend}; use tui::{Terminal, backend::CrosstermBackend};
pub struct Settings { pub struct Settings {
@ -18,7 +17,6 @@ impl Theme {
} }
impl Settings { impl Settings {
#[instrument(skip(_terminal))]
pub fn show_settings( pub fn show_settings(
_terminal: &mut Terminal<CrosstermBackend<Stdout>>, _terminal: &mut Terminal<CrosstermBackend<Stdout>>,
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Box<dyn std::error::Error>> {

View file

@ -1,16 +1,17 @@
use anyhow::Result; use anyhow::{Context, Result};
use reqwest::blocking::Client;
use reqwest::redirect::Policy;
pub fn get_wget_list() -> Result<String> { pub fn get_wget_list() -> Result<String> {
let client = Client::builder().redirect(Policy::limited(5)).build()?; let url = "https://www.linuxfromscratch.org/~thomas/multilib-m32/wget-list-sysv";
let res = client let agent = ureq::AgentBuilder::new().redirects(5).build();
.get("https://www.linuxfromscratch.org/~thomas/multilib-m32/wget-list-sysv") agent
.send()?; .get(url)
.call()
if !res.status().is_success() { .map_err(|err| match err {
anyhow::bail!("Failed to fetch wget-list: HTTP {}", res.status()); ureq::Error::Status(code, _) => {
anyhow::anyhow!("Failed to fetch wget-list: HTTP {code}")
} }
other => anyhow::anyhow!("Failed to fetch wget-list: {other}"),
Ok(res.text()?) })?
.into_string()
.with_context(|| format!("reading body from {url}"))
} }