meow
This commit is contained in:
parent
b5dd2df0d3
commit
7424aba439
14 changed files with 1092 additions and 1110 deletions
285
src/pkgs/by_name/bi/binutils/cross_toolchain.rs
Normal file
285
src/pkgs/by_name/bi/binutils/cross_toolchain.rs
Normal file
|
|
@ -0,0 +1,285 @@
|
|||
// async cross-toolchain runner that uses parser.rs info (no hardcoding)
|
||||
use crate::pkgs::by_name::bi::binutils::parser::{BinutilsInfo, fetch_page, parse_binutils};
|
||||
use reqwest::Client;
|
||||
use std::{
|
||||
error::Error,
|
||||
ffi::OsStr,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use tokio::process::Command;
|
||||
use tracing::{error, info, warn};
|
||||
|
||||
/// Configuration object - uses environment if values omitted.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BinutilsConfig {
|
||||
pub lfs_root: PathBuf, // where the LFS tree will be (used for $LFS)
|
||||
pub target: String, // LFS_TGT (e.g. x86_64-lfs-linux-gnu)
|
||||
pub info: BinutilsInfo, // parsed page info
|
||||
}
|
||||
|
||||
impl BinutilsConfig {
|
||||
/// create from env or params. If target is None, tries $LFS_TGT env var.
|
||||
pub fn new(lfs_root: impl AsRef<Path>, target: Option<String>, info: BinutilsInfo) -> Self {
|
||||
let lfs_root = lfs_root.as_ref().to_path_buf();
|
||||
let target = target
|
||||
.or_else(|| std::env::var("LFS_TGT").ok())
|
||||
.unwrap_or_else(|| {
|
||||
// fallback best-effort
|
||||
if cfg!(target_os = "linux") {
|
||||
"x86_64-lfs-linux-gnu".to_string()
|
||||
} else {
|
||||
"x86_64-lfs-linux-gnu".to_string()
|
||||
}
|
||||
});
|
||||
|
||||
Self {
|
||||
lfs_root,
|
||||
target,
|
||||
info,
|
||||
}
|
||||
}
|
||||
|
||||
/// default places (non-hardcoded) where sources live.
|
||||
/// If env `BINUTILS_SRC_DIR` is set, use that; else try LFS layout:
|
||||
/// - $LFS/src/pkgs/by-name/bi/binutils
|
||||
pub fn source_base_dir(&self) -> PathBuf {
|
||||
if let Ok(s) = std::env::var("BINUTILS_SRC_DIR") {
|
||||
PathBuf::from(s)
|
||||
} else {
|
||||
self.lfs_root
|
||||
.join("src")
|
||||
.join("pkgs")
|
||||
.join("by-name")
|
||||
.join("bi")
|
||||
.join("binutils")
|
||||
}
|
||||
}
|
||||
|
||||
/// build directory inside LFS tree (following LFS style)
|
||||
pub fn build_dir(&self) -> PathBuf {
|
||||
self.lfs_root.join("build").join("binutils-pass1")
|
||||
}
|
||||
|
||||
/// install dir (tools)
|
||||
pub fn install_dir(&self) -> PathBuf {
|
||||
self.lfs_root.join("tools")
|
||||
}
|
||||
}
|
||||
|
||||
/// High-level orchestration. Async.
|
||||
pub async fn build_binutils_from_page(
|
||||
page_url: &str,
|
||||
lfs_root: impl AsRef<std::path::Path>,
|
||||
target: Option<String>,
|
||||
) -> Result<(), Box<dyn Error>> {
|
||||
// 1) fetch page
|
||||
info!("Fetching page: {}", page_url);
|
||||
let html = fetch_page(page_url).await?;
|
||||
let info = parse_binutils(&html)?;
|
||||
info!("Parsed info: {:?}", info);
|
||||
|
||||
// 2) build config
|
||||
let cfg = BinutilsConfig::new(lfs_root, target, info.clone());
|
||||
|
||||
// 3) ensure source base dir exists
|
||||
let src_base = cfg.source_base_dir();
|
||||
if !src_base.exists() {
|
||||
info!("Creating source base dir: {:?}", src_base);
|
||||
tokio::fs::create_dir_all(&src_base).await?;
|
||||
}
|
||||
|
||||
// 4) find extracted source directory (binutils-*)
|
||||
let mut source_dir: Option<PathBuf> = None;
|
||||
if let Ok(mut rd) = tokio::fs::read_dir(&src_base).await {
|
||||
while let Some(entry) = rd.next_entry().await? {
|
||||
let ft = entry.file_type().await?;
|
||||
if ft.is_dir() {
|
||||
let name = entry.file_name().to_string_lossy().to_string();
|
||||
if name.to_lowercase().contains("binutils") {
|
||||
source_dir = Some(entry.path());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 5) if not found, attempt to download & extract
|
||||
if source_dir.is_none() {
|
||||
if let Some(dl) = &cfg.info.download_url {
|
||||
info!("No extracted source found; will download {}", dl);
|
||||
|
||||
// download file into src_base
|
||||
let client = Client::new();
|
||||
let resp = client.get(dl).send().await?;
|
||||
if !resp.status().is_success() {
|
||||
return Err(format!("Download failed: {}", resp.status()).into());
|
||||
}
|
||||
|
||||
// pick a filename from URL
|
||||
let url_path = url::Url::parse(dl)?;
|
||||
let filename = url_path
|
||||
.path_segments()
|
||||
.and_then(|seg| seg.last())
|
||||
.and_then(|s| {
|
||||
if !s.is_empty() {
|
||||
Some(s.to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.ok_or("Cannot determine filename from URL")?;
|
||||
|
||||
let outpath = src_base.join(&filename);
|
||||
info!("Saving archive to {:?}", outpath);
|
||||
let bytes = resp.bytes().await?;
|
||||
tokio::fs::write(&outpath, &bytes).await?;
|
||||
|
||||
// extract using tar (async spawn). Use absolute path to src_base
|
||||
info!("Extracting archive {:?}", outpath);
|
||||
let tar_path = outpath.clone();
|
||||
let mut tar_cmd = Command::new("tar");
|
||||
tar_cmd.arg("-xf").arg(&tar_path).arg("-C").arg(&src_base);
|
||||
let status = tar_cmd.status().await?;
|
||||
if !status.success() {
|
||||
return Err("tar extraction failed".into());
|
||||
}
|
||||
|
||||
// look for extracted dir again
|
||||
if let Ok(mut rd) = tokio::fs::read_dir(&src_base).await {
|
||||
while let Some(entry) = rd.next_entry().await? {
|
||||
let ft = entry.file_type().await?;
|
||||
if ft.is_dir() {
|
||||
let name = entry.file_name().to_string_lossy().to_string();
|
||||
if name.to_lowercase().contains("binutils") {
|
||||
source_dir = Some(entry.path());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
warn!("No download URL found on the page and no unpacked source present.");
|
||||
}
|
||||
}
|
||||
|
||||
let source_dir = match source_dir {
|
||||
Some(p) => p,
|
||||
None => return Err("Could not locate or download/extract Binutils source".into()),
|
||||
};
|
||||
info!("Using source dir: {:?}", source_dir);
|
||||
|
||||
// 6) prepare build dir
|
||||
let build_dir = cfg.build_dir();
|
||||
if !build_dir.exists() {
|
||||
info!("Creating build dir {:?}", build_dir);
|
||||
tokio::fs::create_dir_all(&build_dir).await?;
|
||||
}
|
||||
|
||||
// 7) run configure: use absolute configure script path in source_dir
|
||||
let configure_path = source_dir.join("configure");
|
||||
if !configure_path.exists() {
|
||||
return Err(format!("configure script not found at {:?}", configure_path).into());
|
||||
}
|
||||
|
||||
// If parser produced configure args tokens, use them; otherwise fallback to common flags
|
||||
let args = if !cfg.info.configure_args.is_empty() {
|
||||
cfg.info.configure_args.clone()
|
||||
} else {
|
||||
vec![
|
||||
format!("--prefix={}", cfg.install_dir().display()),
|
||||
format!("--with-sysroot={}", cfg.lfs_root.display()),
|
||||
format!("--target={}", cfg.target),
|
||||
"--disable-nls".to_string(),
|
||||
"--disable-werror".to_string(),
|
||||
]
|
||||
};
|
||||
|
||||
// replace $LFS and $LFS_TGT in args
|
||||
let args: Vec<String> = args
|
||||
.into_iter()
|
||||
.map(|a| {
|
||||
a.replace("$LFS", &cfg.lfs_root.to_string_lossy())
|
||||
.replace("$LFS_TGT", &cfg.target)
|
||||
})
|
||||
.collect();
|
||||
|
||||
info!("Configuring with args: {:?}", args);
|
||||
|
||||
// spawn configure
|
||||
let mut conf_cmd = Command::new(&configure_path);
|
||||
conf_cmd.current_dir(&build_dir);
|
||||
for a in &args {
|
||||
conf_cmd.arg(a);
|
||||
}
|
||||
conf_cmd.stdout(std::process::Stdio::inherit());
|
||||
conf_cmd.stderr(std::process::Stdio::inherit());
|
||||
let status = conf_cmd.status().await?;
|
||||
if !status.success() {
|
||||
return Err("configure step failed".into());
|
||||
}
|
||||
info!("configure completed");
|
||||
|
||||
// 8) run build commands (make-like)
|
||||
if !cfg.info.build_cmds.is_empty() {
|
||||
for b in &cfg.info.build_cmds {
|
||||
// split into program + args
|
||||
let mut parts = shell_words::split(b).unwrap_or_else(|_| vec![b.clone()]);
|
||||
let prog = parts.remove(0);
|
||||
let mut cmd = Command::new(prog);
|
||||
if !parts.is_empty() {
|
||||
cmd.args(parts);
|
||||
}
|
||||
cmd.current_dir(&build_dir);
|
||||
cmd.stdout(std::process::Stdio::inherit());
|
||||
cmd.stderr(std::process::Stdio::inherit());
|
||||
let status = cmd.status().await?;
|
||||
if !status.success() {
|
||||
return Err(format!("build step failed: {:?}", b).into());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// fallback to running `make`
|
||||
let mut m = Command::new("make");
|
||||
m.current_dir(&build_dir);
|
||||
m.stdout(std::process::Stdio::inherit());
|
||||
m.stderr(std::process::Stdio::inherit());
|
||||
let status = m.status().await?;
|
||||
if !status.success() {
|
||||
return Err("make failed".into());
|
||||
}
|
||||
}
|
||||
info!("build completed");
|
||||
|
||||
// 9) run install commands (make install)
|
||||
if !cfg.info.install_cmds.is_empty() {
|
||||
for inst in &cfg.info.install_cmds {
|
||||
let mut parts = shell_words::split(inst).unwrap_or_else(|_| vec![inst.clone()]);
|
||||
let prog = parts.remove(0);
|
||||
let mut cmd = Command::new(prog);
|
||||
if !parts.is_empty() {
|
||||
cmd.args(parts);
|
||||
}
|
||||
cmd.current_dir(&build_dir);
|
||||
cmd.stdout(std::process::Stdio::inherit());
|
||||
cmd.stderr(std::process::Stdio::inherit());
|
||||
let status = cmd.status().await?;
|
||||
if !status.success() {
|
||||
return Err(format!("install step failed: {:?}", inst).into());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// fallback `make install`
|
||||
let mut mi = Command::new("make");
|
||||
mi.arg("install");
|
||||
mi.current_dir(&build_dir);
|
||||
mi.stdout(std::process::Stdio::inherit());
|
||||
mi.stderr(std::process::Stdio::inherit());
|
||||
let status = mi.status().await?;
|
||||
if !status.success() {
|
||||
return Err("make install failed".into());
|
||||
}
|
||||
}
|
||||
|
||||
info!("install completed. Binutils Pass 1 done.");
|
||||
Ok(())
|
||||
}
|
||||
2
src/pkgs/by_name/bi/binutils/mod.rs
Normal file
2
src/pkgs/by_name/bi/binutils/mod.rs
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
pub mod cross_toolchain;
|
||||
pub mod parser;
|
||||
220
src/pkgs/by_name/bi/binutils/parser.rs
Normal file
220
src/pkgs/by_name/bi/binutils/parser.rs
Normal file
|
|
@ -0,0 +1,220 @@
|
|||
// async parser for Binutils Pass 1 page
|
||||
use reqwest::Client;
|
||||
use scraper::{Html, Selector};
|
||||
use std::error::Error;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BinutilsInfo {
|
||||
/// "2.45" or derived version text
|
||||
pub version: Option<String>,
|
||||
/// first archive download URL found (.tar.xz or .tar.gz)
|
||||
pub download_url: Option<String>,
|
||||
/// tokens for configure flags (everything after ../configure)
|
||||
pub configure_args: Vec<String>,
|
||||
/// build commands discovered (e.g. ["make"])
|
||||
pub build_cmds: Vec<String>,
|
||||
/// install commands discovered (e.g. ["make install"])
|
||||
pub install_cmds: Vec<String>,
|
||||
/// optional SBU, disk space
|
||||
pub sbu: Option<String>,
|
||||
pub disk_space: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for BinutilsInfo {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
version: None,
|
||||
download_url: None,
|
||||
configure_args: Vec::new(),
|
||||
build_cmds: Vec::new(),
|
||||
install_cmds: Vec::new(),
|
||||
sbu: None,
|
||||
disk_space: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetch page content (async)
|
||||
pub async fn fetch_page(url: &str) -> Result<String, Box<dyn Error>> {
|
||||
let client = Client::new();
|
||||
let res = client.get(url).send().await?;
|
||||
let status = res.status();
|
||||
if !status.is_success() {
|
||||
return Err(format!("Failed to fetch {}: {}", url, status).into());
|
||||
}
|
||||
let text = res.text().await?;
|
||||
Ok(text)
|
||||
}
|
||||
|
||||
/// Parse the LFS Binutils pass1 page; robust to small formatting changes.
|
||||
/// - extracts version (from <h1> text like "Binutils-2.45 - Pass 1")
|
||||
/// - finds a download URL ending with .tar.xz/.tar.gz
|
||||
/// - finds configure pre block(s), builds token list
|
||||
/// - finds `make` / `make install` pre blocks
|
||||
pub fn parse_binutils(html: &str) -> Result<BinutilsInfo, Box<dyn Error>> {
|
||||
let document = Html::parse_document(html);
|
||||
|
||||
let mut info = BinutilsInfo::default();
|
||||
|
||||
// 1) Version from h1.sect1 (contains "Binutils-2.45 - Pass 1")
|
||||
if let Ok(h1_sel) = Selector::parse("h1.sect1") {
|
||||
if let Some(h1) = document.select(&h1_sel).next() {
|
||||
let txt = h1.text().collect::<Vec<_>>().join(" ");
|
||||
// try to pick the token containing "Binutils-" or "binutils-"
|
||||
if let Some(tok) = txt
|
||||
.split_whitespace()
|
||||
.find(|s| s.to_lowercase().contains("binutils"))
|
||||
{
|
||||
// extract digits from token, e.g. "Binutils-2.45"
|
||||
if let Some(pos) = tok.find('-') {
|
||||
let ver = tok[pos + 1..]
|
||||
.trim()
|
||||
.trim_matches(|c: char| !c.is_ascii() && c != '.')
|
||||
.to_string();
|
||||
if !ver.is_empty() {
|
||||
info.version = Some(ver);
|
||||
}
|
||||
} else {
|
||||
// fallback: try to find "2.45" somewhere in the h1 string
|
||||
for part in txt.split_whitespace() {
|
||||
if part.chars().next().map(|c| c.is_digit(10)).unwrap_or(false) {
|
||||
info.version = Some(part.trim().to_string());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2) Download URL: look for anchors with href ending .tar.xz/.tar.gz
|
||||
if let Ok(a_sel) = Selector::parse("a[href]") {
|
||||
for a in document.select(&a_sel) {
|
||||
if let Some(href) = a.value().attr("href") {
|
||||
let href = href.trim();
|
||||
if href.ends_with(".tar.xz") || href.ends_with(".tar.gz") || href.ends_with(".tgz")
|
||||
{
|
||||
// Make absolute if relative to page; the typical LFS pages use relative links like ../../... or ../..
|
||||
// If it's already absolute (starts with http), keep it.
|
||||
let url = href.to_string();
|
||||
info.download_url = Some(url);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3) Parse "segmentedlist" entries for SBU and disk space
|
||||
if let Ok(segtitle_sel) =
|
||||
Selector::parse("div.package .segmentedlist .seglistitem .seg strong.segtitle")
|
||||
{
|
||||
if let Ok(segbody_sel) =
|
||||
Selector::parse("div.package .segmentedlist .seglistitem .seg span.segbody")
|
||||
{
|
||||
for (t, b) in document
|
||||
.select(&segtitle_sel)
|
||||
.zip(document.select(&segbody_sel))
|
||||
{
|
||||
let title = t.text().collect::<String>().to_lowercase();
|
||||
let body = b.text().collect::<String>().trim().to_string();
|
||||
if title.contains("approximate build time") {
|
||||
info.sbu = Some(body.clone());
|
||||
} else if title.contains("required disk space") {
|
||||
info.disk_space = Some(body.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 4) `pre.kbd.command` blocks for configure & make lines
|
||||
if let Ok(pre_sel) = Selector::parse("div.installation pre.kbd.command, pre.kbd.command") {
|
||||
for pre in document.select(&pre_sel) {
|
||||
let text = pre.text().collect::<Vec<_>>().join("\n");
|
||||
let trimmed = text.trim();
|
||||
|
||||
// handle configure block (starts with ../configure or ./configure)
|
||||
if trimmed.starts_with("../configure")
|
||||
|| trimmed.starts_with("./configure")
|
||||
|| trimmed.starts_with(".. /configure")
|
||||
{
|
||||
// normalize: remove trailing backslashes and join lines
|
||||
let mut joined = String::new();
|
||||
for line in trimmed.lines() {
|
||||
let line = line.trim_end();
|
||||
if line.ends_with('\\') {
|
||||
joined.push_str(line.trim_end_matches('\\').trim());
|
||||
joined.push(' ');
|
||||
} else {
|
||||
joined.push_str(line.trim());
|
||||
joined.push(' ');
|
||||
}
|
||||
}
|
||||
// remove leading "../configure" token and split into args
|
||||
let pieces: Vec<&str> = joined.split_whitespace().collect();
|
||||
let mut args = Vec::new();
|
||||
let mut started = false;
|
||||
for p in pieces {
|
||||
if !started {
|
||||
if p.ends_with("configure")
|
||||
|| p.ends_with("configure")
|
||||
|| p.contains("configure")
|
||||
{
|
||||
started = true;
|
||||
continue;
|
||||
}
|
||||
// skip until configure found
|
||||
continue;
|
||||
} else {
|
||||
args.push(p.to_string());
|
||||
}
|
||||
}
|
||||
// fallback: if no tokens parsed, try chopping first token
|
||||
if args.is_empty() {
|
||||
// attempt to remove the first token (../configure) by index
|
||||
if let Some(pos) = joined.find("configure") {
|
||||
let after = &joined[pos + "configure".len()..];
|
||||
for t in after.split_whitespace() {
|
||||
args.push(t.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
info.configure_args = args
|
||||
.into_iter()
|
||||
.map(|s| s.trim().to_string())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect();
|
||||
continue;
|
||||
}
|
||||
|
||||
// handle make / make install lines
|
||||
// consider blocks that are exactly "make" or "make install" or lines containing them
|
||||
for line in trimmed.lines().map(|l| l.trim()) {
|
||||
if line == "make" {
|
||||
if !info.build_cmds.contains(&"make".to_string()) {
|
||||
info.build_cmds.push("make".to_string());
|
||||
}
|
||||
} else if line == "make install" {
|
||||
if !info.install_cmds.contains(&"make install".to_string()) {
|
||||
info.install_cmds.push("make install".to_string());
|
||||
}
|
||||
} else if line.starts_with("make ") {
|
||||
// e.g., "make -j2"
|
||||
let t = line.to_string();
|
||||
if !info.build_cmds.contains(&t) {
|
||||
info.build_cmds.push(t);
|
||||
}
|
||||
} else if line.starts_with("time {") && line.contains("make") {
|
||||
// handle the time wrapper line in the note; ignore
|
||||
// skip
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// final sanity: if build_cmds empty but install_cmds contains "make install", add "make"
|
||||
if info.build_cmds.is_empty() && !info.install_cmds.is_empty() {
|
||||
info.build_cmds.push("make".to_string());
|
||||
}
|
||||
|
||||
Ok(info)
|
||||
}
|
||||
1
src/pkgs/by_name/bi/mod.rs
Normal file
1
src/pkgs/by_name/bi/mod.rs
Normal file
|
|
@ -0,0 +1 @@
|
|||
pub mod binutils;
|
||||
1
src/pkgs/by_name/mod.rs
Normal file
1
src/pkgs/by_name/mod.rs
Normal file
|
|
@ -0,0 +1 @@
|
|||
pub mod bi;
|
||||
1
src/pkgs/mod.rs
Normal file
1
src/pkgs/mod.rs
Normal file
|
|
@ -0,0 +1 @@
|
|||
pub mod by_name;
|
||||
Loading…
Add table
Add a link
Reference in a new issue