refactor: replace reqwest/tokio stack with ureq

This commit is contained in:
m00d 2025-10-01 12:28:04 +02:00
parent be84650502
commit 0067162d04
19 changed files with 1136 additions and 1198 deletions

View file

@ -1,12 +1,13 @@
// async cross-toolchain runner that uses parser.rs info (no hardcoding)
// Cross-toolchain runner that uses parser.rs info (no hardcoding).
use crate::pkgs::by_name::bi::binutils::parser::{BinutilsInfo, fetch_page, parse_binutils};
use reqwest::Client;
use anyhow::{Context, Result, anyhow};
use shell_words;
use std::{
error::Error,
fs::{self, File},
io,
path::{Path, PathBuf},
process::Command,
};
use tokio::process::Command;
use tracing::{info, warn};
/// Configuration object - uses environment if values omitted.
#[derive(Debug, Clone)]
@ -22,14 +23,7 @@ impl BinutilsConfig {
let lfs_root = lfs_root.as_ref().to_path_buf();
let target = target
.or_else(|| std::env::var("LFS_TGT").ok())
.unwrap_or_else(|| {
// fallback best-effort
if cfg!(target_os = "linux") {
"x86_64-lfs-linux-gnu".to_string()
} else {
"x86_64-lfs-linux-gnu".to_string()
}
});
.unwrap_or_else(|| "x86_64-lfs-linux-gnu".to_string());
Self {
lfs_root,
@ -65,122 +59,50 @@ impl BinutilsConfig {
}
}
/// High-level orchestration. Async.
pub async fn build_binutils_from_page(
/// High-level orchestration.
pub fn build_binutils_from_page(
page_url: &str,
lfs_root: impl AsRef<std::path::Path>,
lfs_root: impl AsRef<Path>,
target: Option<String>,
) -> Result<(), Box<dyn Error>> {
// 1) fetch page
info!("Fetching page: {}", page_url);
let html = fetch_page(page_url).await?;
let info = parse_binutils(&html)?;
info!("Parsed info: {:?}", info);
) -> Result<()> {
println!("Fetching page: {page_url}");
let html = fetch_page(page_url).context("fetching binutils instructions")?;
let info = parse_binutils(&html).context("parsing binutils instructions")?;
println!("Parsed info: {:?}", info);
// 2) build config
let cfg = BinutilsConfig::new(lfs_root, target, info.clone());
// 3) ensure source base dir exists
let src_base = cfg.source_base_dir();
if !src_base.exists() {
info!("Creating source base dir: {:?}", src_base);
tokio::fs::create_dir_all(&src_base).await?;
println!("Creating source base dir: {:?}", src_base);
fs::create_dir_all(&src_base)
.with_context(|| format!("creating source base dir {:?}", src_base))?;
}
// 4) find extracted source directory (binutils-*)
let mut source_dir: Option<PathBuf> = None;
if let Ok(mut rd) = tokio::fs::read_dir(&src_base).await {
while let Some(entry) = rd.next_entry().await? {
let ft = entry.file_type().await?;
if ft.is_dir() {
let name = entry.file_name().to_string_lossy().to_string();
if name.to_lowercase().contains("binutils") {
source_dir = Some(entry.path());
break;
}
}
}
}
// 5) if not found, attempt to download & extract
let mut source_dir = locate_binutils_dir(&src_base)?;
if source_dir.is_none() {
if let Some(dl) = &cfg.info.download_url {
info!("No extracted source found; will download {}", dl);
// download file into src_base
let client = Client::new();
let resp = client.get(dl).send().await?;
if !resp.status().is_success() {
return Err(format!("Download failed: {}", resp.status()).into());
}
// pick a filename from URL
let url_path = url::Url::parse(dl)?;
let filename = url_path
.path_segments()
.and_then(|seg| seg.last())
.and_then(|s| {
if !s.is_empty() {
Some(s.to_string())
} else {
None
}
})
.ok_or("Cannot determine filename from URL")?;
let outpath = src_base.join(&filename);
info!("Saving archive to {:?}", outpath);
let bytes = resp.bytes().await?;
tokio::fs::write(&outpath, &bytes).await?;
// extract using tar (async spawn). Use absolute path to src_base
info!("Extracting archive {:?}", outpath);
let tar_path = outpath.clone();
let mut tar_cmd = Command::new("tar");
tar_cmd.arg("-xf").arg(&tar_path).arg("-C").arg(&src_base);
let status = tar_cmd.status().await?;
if !status.success() {
return Err("tar extraction failed".into());
}
// look for extracted dir again
if let Ok(mut rd) = tokio::fs::read_dir(&src_base).await {
while let Some(entry) = rd.next_entry().await? {
let ft = entry.file_type().await?;
if ft.is_dir() {
let name = entry.file_name().to_string_lossy().to_string();
if name.to_lowercase().contains("binutils") {
source_dir = Some(entry.path());
break;
}
}
}
}
} else {
warn!("No download URL found on the page and no unpacked source present.");
}
source_dir = download_and_extract(&cfg, &src_base)?;
}
let source_dir = match source_dir {
Some(p) => p,
None => return Err("Could not locate or download/extract Binutils source".into()),
};
info!("Using source dir: {:?}", source_dir);
let source_dir = source_dir
.ok_or_else(|| anyhow!("Could not locate or download/extract Binutils source"))?;
println!("Using source dir: {:?}", source_dir);
// 6) prepare build dir
let build_dir = cfg.build_dir();
if !build_dir.exists() {
info!("Creating build dir {:?}", build_dir);
tokio::fs::create_dir_all(&build_dir).await?;
println!("Creating build dir {:?}", build_dir);
fs::create_dir_all(&build_dir)
.with_context(|| format!("creating build dir {:?}", build_dir))?;
}
// 7) run configure: use absolute configure script path in source_dir
let configure_path = source_dir.join("configure");
if !configure_path.exists() {
return Err(format!("configure script not found at {:?}", configure_path).into());
return Err(anyhow!(
"configure script not found at {:?}",
configure_path
));
}
// If parser produced configure args tokens, use them; otherwise fallback to common flags
let args = if !cfg.info.configure_args.is_empty() {
cfg.info.configure_args.clone()
} else {
@ -193,7 +115,6 @@ pub async fn build_binutils_from_page(
]
};
// replace $LFS and $LFS_TGT in args
let args: Vec<String> = args
.into_iter()
.map(|a| {
@ -202,83 +123,126 @@ pub async fn build_binutils_from_page(
})
.collect();
info!("Configuring with args: {:?}", args);
println!("Configuring with args: {:?}", args);
let mut configure_cmd = Command::new(&configure_path);
configure_cmd.current_dir(&build_dir);
configure_cmd.args(&args);
run_command(&mut configure_cmd).context("configure step failed")?;
println!("configure completed");
// spawn configure
let mut conf_cmd = Command::new(&configure_path);
conf_cmd.current_dir(&build_dir);
for a in &args {
conf_cmd.arg(a);
}
conf_cmd.stdout(std::process::Stdio::inherit());
conf_cmd.stderr(std::process::Stdio::inherit());
let status = conf_cmd.status().await?;
if !status.success() {
return Err("configure step failed".into());
}
info!("configure completed");
// 8) run build commands (make-like)
if !cfg.info.build_cmds.is_empty() {
for b in &cfg.info.build_cmds {
// split into program + args
let mut parts = shell_words::split(b).unwrap_or_else(|_| vec![b.clone()]);
let prog = parts.remove(0);
let mut cmd = Command::new(prog);
if !parts.is_empty() {
cmd.args(parts);
}
cmd.current_dir(&build_dir);
cmd.stdout(std::process::Stdio::inherit());
cmd.stderr(std::process::Stdio::inherit());
let status = cmd.status().await?;
if !status.success() {
return Err(format!("build step failed: {:?}", b).into());
}
for raw in &cfg.info.build_cmds {
run_shell_command(raw, &build_dir)
.with_context(|| format!("build step failed: {raw}"))?;
}
} else {
// fallback to running `make`
let mut m = Command::new("make");
m.current_dir(&build_dir);
m.stdout(std::process::Stdio::inherit());
m.stderr(std::process::Stdio::inherit());
let status = m.status().await?;
if !status.success() {
return Err("make failed".into());
}
let mut make_cmd = Command::new("make");
make_cmd.current_dir(&build_dir);
run_command(&mut make_cmd).context("make failed")?;
}
info!("build completed");
println!("build completed");
// 9) run install commands (make install)
if !cfg.info.install_cmds.is_empty() {
for inst in &cfg.info.install_cmds {
let mut parts = shell_words::split(inst).unwrap_or_else(|_| vec![inst.clone()]);
let prog = parts.remove(0);
let mut cmd = Command::new(prog);
if !parts.is_empty() {
cmd.args(parts);
}
cmd.current_dir(&build_dir);
cmd.stdout(std::process::Stdio::inherit());
cmd.stderr(std::process::Stdio::inherit());
let status = cmd.status().await?;
if !status.success() {
return Err(format!("install step failed: {:?}", inst).into());
}
for raw in &cfg.info.install_cmds {
run_shell_command(raw, &build_dir)
.with_context(|| format!("install step failed: {raw}"))?;
}
} else {
// fallback `make install`
let mut mi = Command::new("make");
mi.arg("install");
mi.current_dir(&build_dir);
mi.stdout(std::process::Stdio::inherit());
mi.stderr(std::process::Stdio::inherit());
let status = mi.status().await?;
if !status.success() {
return Err("make install failed".into());
}
let mut install_cmd = Command::new("make");
install_cmd.arg("install");
install_cmd.current_dir(&build_dir);
run_command(&mut install_cmd).context("make install failed")?;
}
println!("install completed");
info!("install completed. Binutils Pass 1 done.");
Ok(())
}
fn locate_binutils_dir(base: &Path) -> Result<Option<PathBuf>> {
if !base.exists() {
return Ok(None);
}
for entry in fs::read_dir(base).with_context(|| format!("reading directory {:?}", base))? {
let entry = entry?;
if entry.file_type()?.is_dir() {
let name = entry.file_name().to_string_lossy().to_lowercase();
if name.contains("binutils") {
return Ok(Some(entry.path()));
}
}
}
Ok(None)
}
fn download_and_extract(cfg: &BinutilsConfig, base: &Path) -> Result<Option<PathBuf>> {
let url = match cfg.info.download_url.as_deref() {
Some(url) => url,
None => {
eprintln!("No download URL found on the page and no unpacked source present.");
return Ok(None);
}
};
println!("Downloading {url}");
let response = ureq::get(url).call().map_err(|err| match err {
ureq::Error::Status(code, _) => anyhow!("Download failed: HTTP {code}"),
other => anyhow!("Download failed: {other}"),
})?;
let final_url = response.get_url().to_string();
let parsed = url::Url::parse(&final_url)
.with_context(|| format!("parsing final download URL {final_url}"))?;
let filename = parsed
.path_segments()
.and_then(|segments| segments.last())
.filter(|s| !s.is_empty())
.map(|s| s.to_string())
.ok_or_else(|| anyhow!("Cannot determine filename from URL"))?;
let outpath = base.join(&filename);
let mut reader = response.into_reader();
let mut file =
File::create(&outpath).with_context(|| format!("creating archive file {:?}", outpath))?;
io::copy(&mut reader, &mut file)
.with_context(|| format!("writing archive to {:?}", outpath))?;
println!("Extracting archive {:?}", outpath);
let status = Command::new("tar")
.arg("-xf")
.arg(&outpath)
.arg("-C")
.arg(base)
.status()
.with_context(|| "spawning tar".to_string())?;
if !status.success() {
return Err(anyhow!("tar extraction failed"));
}
locate_binutils_dir(base)
}
fn run_command(cmd: &mut Command) -> Result<()> {
cmd.stdout(std::process::Stdio::inherit());
cmd.stderr(std::process::Stdio::inherit());
let status = cmd
.status()
.with_context(|| "spawning process".to_string())?;
if !status.success() {
return Err(anyhow!("command exited with status {status}"));
}
Ok(())
}
fn run_shell_command(raw: &str, cwd: &Path) -> Result<()> {
let mut parts = shell_words::split(raw).unwrap_or_else(|_| vec![raw.to_string()]);
if parts.is_empty() {
return Ok(());
}
let prog = parts.remove(0);
let mut cmd = Command::new(prog);
if !parts.is_empty() {
cmd.args(parts);
}
cmd.current_dir(cwd);
run_command(&mut cmd)
}

View file

@ -1,7 +1,6 @@
// async parser for Binutils Pass 1 page
use reqwest::Client;
// Parser for Binutils Pass 1 page using lightweight HTTP fetching.
use anyhow::{Context, Result};
use scraper::{Html, Selector};
use std::error::Error;
#[derive(Debug, Clone)]
pub struct BinutilsInfo {
@ -34,16 +33,16 @@ impl Default for BinutilsInfo {
}
}
/// Fetch page content (async)
pub async fn fetch_page(url: &str) -> Result<String, Box<dyn Error>> {
let client = Client::new();
let res = client.get(url).send().await?;
let status = res.status();
if !status.is_success() {
return Err(format!("Failed to fetch {}: {}", url, status).into());
}
let text = res.text().await?;
Ok(text)
/// Fetch page content synchronously
pub fn fetch_page(url: &str) -> Result<String> {
ureq::get(url)
.call()
.map_err(|err| match err {
ureq::Error::Status(code, _) => anyhow::anyhow!("Failed to fetch {url}: HTTP {code}"),
other => anyhow::anyhow!("Failed to fetch {url}: {other}"),
})?
.into_string()
.with_context(|| format!("reading body from {url}"))
}
/// Parse the LFS Binutils pass1 page; robust to small formatting changes.
@ -51,7 +50,7 @@ pub async fn fetch_page(url: &str) -> Result<String, Box<dyn Error>> {
/// - finds a download URL ending with .tar.xz/.tar.gz
/// - finds configure pre block(s), builds token list
/// - finds `make` / `make install` pre blocks
pub fn parse_binutils(html: &str) -> Result<BinutilsInfo, Box<dyn Error>> {
pub fn parse_binutils(html: &str) -> Result<BinutilsInfo> {
let document = Html::parse_document(html);
let mut info = BinutilsInfo::default();