refactor: replace reqwest/tokio stack with ureq
This commit is contained in:
parent
be84650502
commit
0067162d04
19 changed files with 1136 additions and 1198 deletions
41
.cargo/config.toml
Normal file
41
.cargo/config.toml
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
[profile.dev]
|
||||
opt-level = 0
|
||||
debug = true
|
||||
lto = false
|
||||
codegen-units = 256
|
||||
panic = "unwind"
|
||||
incremental = true
|
||||
|
||||
[profile.dev.package."*"]
|
||||
opt-level = 0
|
||||
|
||||
[profile.release]
|
||||
opt-level = 3
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
debug = false
|
||||
incremental = false
|
||||
|
||||
[profile.release.package."*"]
|
||||
opt-level = 3
|
||||
|
||||
[profile.release-pgo-instrument]
|
||||
inherits = "release"
|
||||
debug = true
|
||||
lto = false
|
||||
incremental = false
|
||||
|
||||
[profile.release-pgo-instrument.package."*"]
|
||||
opt-level = 3
|
||||
|
||||
[profile.release-pgo]
|
||||
inherits = "release"
|
||||
incremental = false
|
||||
|
||||
[profile.release-pgo.package."*"]
|
||||
opt-level = 3
|
||||
|
||||
[alias]
|
||||
pgo-instrument = "build --profile release-pgo-instrument"
|
||||
pgo-build = "build --profile release-pgo"
|
||||
1419
Cargo.lock
generated
1419
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
71
Cargo.toml
71
Cargo.toml
|
|
@ -15,6 +15,9 @@ console = "0.16.1"
|
|||
# Optional Terminal UI
|
||||
crossterm = { version = "0.29.0", optional = true }
|
||||
tui = { version = "0.19.0", optional = true }
|
||||
rsille = { version = "2.3", optional = true }
|
||||
gptman = { version = "2.0.1", optional = true }
|
||||
uuid = { version = "1.18.1", optional = true, features = ["v4"] }
|
||||
|
||||
# Parsing & scraping
|
||||
html_parser = "0.7.0"
|
||||
|
|
@ -22,78 +25,42 @@ scraper = "0.19.0"
|
|||
regex = "1.11.3"
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
serde_json = "1.0.145"
|
||||
jsonschema = "0.17.0"
|
||||
jsonschema = { version = "0.17.0", default-features = false, features = ["draft202012"] }
|
||||
walkdir = "2.5.0"
|
||||
chrono = { version = "0.4.38", default-features = false, features = ["clock"] }
|
||||
sha2 = "0.10.8"
|
||||
|
||||
# Utilities
|
||||
indicatif = "0.18.0"
|
||||
spinners = "4.1.1"
|
||||
num_cpus = "1.17.0"
|
||||
rand = "0.9.2"
|
||||
rand = { version = "0.9.2", optional = true }
|
||||
md5 = "0.8.0"
|
||||
|
||||
# HTTP
|
||||
reqwest = { version = "0.12.23", features = ["blocking", "json"] }
|
||||
semver = "1.0.27"
|
||||
inquire = "0.9.1"
|
||||
tracing = "0.1.41"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = { version = "0.3.20", features = ["env-filter", "fmt"] }
|
||||
gptman = "2.0.1"
|
||||
dialoguer = "0.12.0"
|
||||
tokio = { version = "1.47.1", features = ["full"] }
|
||||
juniper = { version = "0.17", optional = true }
|
||||
actix-web = { version = "4.9", optional = true }
|
||||
juniper_actix = { version = "0.7", optional = true }
|
||||
shell-words = "1.1.0"
|
||||
url = "2.5.7"
|
||||
uuid = { version = "1.18.1", features = ["v4"] }
|
||||
hex = "0.4.3"
|
||||
diesel = { version = "2.1.6", features = ["sqlite", "r2d2", "returning_clauses_for_sqlite_3_35"] }
|
||||
|
||||
# Networking
|
||||
ureq = { version = "2.9.7", features = ["tls", "json"] }
|
||||
|
||||
|
||||
[features]
|
||||
# TUI feature flag
|
||||
tui = ["dep:tui", "dep:crossterm"]
|
||||
tui = ["dep:tui", "dep:crossterm", "dep:rsille", "dep:gptman", "dep:uuid"]
|
||||
|
||||
# GraphQL/HTTP server feature flag
|
||||
graphql = ["dep:juniper", "dep:actix-web", "dep:juniper_actix", "dep:rand"]
|
||||
|
||||
# Optional default features
|
||||
default = []
|
||||
|
||||
# -----------------------
|
||||
# Cargo-make tasks
|
||||
# -----------------------
|
||||
[tasks.format]
|
||||
description = "Format Rust code using rustfmt"
|
||||
install_crate = "rustfmt"
|
||||
command = "cargo"
|
||||
args = ["fmt", "--", "--emit=files"]
|
||||
|
||||
[tasks.clean]
|
||||
description = "Clean build artifacts"
|
||||
command = "cargo"
|
||||
args = ["clean"]
|
||||
|
||||
[tasks.build]
|
||||
description = "Build the project"
|
||||
command = "cargo"
|
||||
args = ["build"]
|
||||
dependencies = ["clean"]
|
||||
|
||||
[tasks.test]
|
||||
description = "Run tests"
|
||||
command = "cargo"
|
||||
args = ["test"]
|
||||
dependencies = ["clean"]
|
||||
|
||||
[tasks.my-flow]
|
||||
description = "Run full workflow: format, build, test"
|
||||
dependencies = ["format", "build", "test"]
|
||||
|
||||
[tasks.dev-flow]
|
||||
description = "Full developer workflow: format, lint, build, test"
|
||||
dependencies = ["format", "clippy", "build", "test"]
|
||||
|
||||
[tasks.release-build]
|
||||
description = "Build the project in release mode"
|
||||
command = "cargo"
|
||||
args = ["build", "--release", "--all-features"]
|
||||
dependencies = ["clean"]
|
||||
[[bin]]
|
||||
name = "graphql_server"
|
||||
path = "src/bin/graphql_server.rs"
|
||||
required-features = ["graphql"]
|
||||
|
|
|
|||
|
|
@ -2,12 +2,11 @@ use std::collections::HashSet;
|
|||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use chrono::Utc;
|
||||
use clap::{Parser, Subcommand};
|
||||
use jsonschema::JSONSchema;
|
||||
use regex::Regex;
|
||||
use reqwest::{blocking::Client, redirect::Policy};
|
||||
use scraper::{ElementRef, Html, Selector};
|
||||
use serde_json::{Value, json};
|
||||
use sha2::{Digest, Sha256};
|
||||
|
|
@ -379,6 +378,16 @@ fn extract_summary(value: &Value, relative_path: &Path) -> Result<PackageSummary
|
|||
.and_then(Value::as_str)
|
||||
.context("missing status.state")?
|
||||
.to_string();
|
||||
let tags = status
|
||||
.get("tags")
|
||||
.and_then(Value::as_array)
|
||||
.map(|array| {
|
||||
array
|
||||
.iter()
|
||||
.filter_map(|value| value.as_str().map(|s| s.to_string()))
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(PackageSummary {
|
||||
schema_version,
|
||||
|
|
@ -393,6 +402,7 @@ fn extract_summary(value: &Value, relative_path: &Path) -> Result<PackageSummary
|
|||
.to_str()
|
||||
.unwrap_or_default()
|
||||
.replace('\\', "/"),
|
||||
tags,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -409,18 +419,7 @@ fn harvest_metadata(
|
|||
override_base: Option<&str>,
|
||||
) -> Result<HarvestResult> {
|
||||
let page_url = resolve_page_url(book, page, override_base)?;
|
||||
let client = Client::builder()
|
||||
.user_agent("lpkg-metadata-indexer/0.1")
|
||||
.build()?;
|
||||
let response = client
|
||||
.get(&page_url)
|
||||
.send()
|
||||
.with_context(|| format!("fetching {}", page_url))?
|
||||
.error_for_status()
|
||||
.with_context(|| format!("non-success status for {}", page_url))?;
|
||||
let html = response
|
||||
.text()
|
||||
.with_context(|| format!("reading response body from {}", page_url))?;
|
||||
let html = fetch_text(&page_url).with_context(|| format!("fetching {page_url}"))?;
|
||||
|
||||
let document = Html::parse_document(&html);
|
||||
let harvest = build_metadata_value(metadata_dir, book, &page_url, &document, &html)?;
|
||||
|
|
@ -637,6 +636,7 @@ fn build_metadata_value(
|
|||
};
|
||||
|
||||
let status_state = "draft";
|
||||
let stage_tag = stage.clone().unwrap_or_else(|| "base-system".to_string());
|
||||
|
||||
let package_json = json!({
|
||||
"schema_version": "v0.1.0",
|
||||
|
|
@ -687,10 +687,7 @@ fn build_metadata_value(
|
|||
"status": {
|
||||
"state": status_state,
|
||||
"issues": issues,
|
||||
"tags": vec![
|
||||
"25.10".to_string(),
|
||||
stage.unwrap_or("base-system").to_string()
|
||||
]
|
||||
"tags": vec!["25.10".to_string(), stage_tag.clone()]
|
||||
}
|
||||
});
|
||||
|
||||
|
|
@ -940,15 +937,7 @@ fn refresh_manifest(
|
|||
let url = manifest_url(book, &kind)
|
||||
.with_context(|| format!("no manifest URL configured for book '{}'", book))?;
|
||||
|
||||
let client = Client::builder().redirect(Policy::limited(5)).build()?;
|
||||
let body = client
|
||||
.get(url)
|
||||
.send()
|
||||
.with_context(|| format!("fetching {}", url))?
|
||||
.error_for_status()
|
||||
.with_context(|| format!("request failed for {}", url))?
|
||||
.text()
|
||||
.with_context(|| format!("reading response body from {}", url))?;
|
||||
let body = fetch_text(url).with_context(|| format!("fetching {url}"))?;
|
||||
|
||||
fs::write(&cache_path, &body)
|
||||
.with_context(|| format!("caching manifest {}", cache_path.display()))?;
|
||||
|
|
@ -956,6 +945,17 @@ fn refresh_manifest(
|
|||
Ok(cache_path)
|
||||
}
|
||||
|
||||
fn fetch_text(url: &str) -> Result<String> {
|
||||
ureq::get(url)
|
||||
.call()
|
||||
.map_err(|err| match err {
|
||||
ureq::Error::Status(code, _) => anyhow!("request failed: HTTP {code}"),
|
||||
other => anyhow!("request failed: {other}"),
|
||||
})?
|
||||
.into_string()
|
||||
.with_context(|| format!("reading response body from {url}"))
|
||||
}
|
||||
|
||||
fn manifest_url(book: &str, kind: &ManifestKind) -> Option<&'static str> {
|
||||
match (book, kind) {
|
||||
("mlfs", ManifestKind::WgetList) => {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,12 @@
|
|||
use anyhow::{Context, Result};
|
||||
use scraper::{Html, Selector};
|
||||
|
||||
pub fn fetch_pre_blocks(url: &str) -> anyhow::Result<Vec<String>> {
|
||||
let body = reqwest::blocking::get(url)?.text()?;
|
||||
pub fn fetch_pre_blocks(url: &str) -> Result<Vec<String>> {
|
||||
let body = ureq::get(url)
|
||||
.call()
|
||||
.with_context(|| format!("requesting {url}"))?
|
||||
.into_string()
|
||||
.with_context(|| format!("reading body from {url}"))?;
|
||||
let document = Html::parse_document(&body);
|
||||
let selector = Selector::parse("pre").unwrap();
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use anyhow::{Context, Result};
|
||||
use regex::Regex;
|
||||
use reqwest::blocking::Client;
|
||||
use scraper::{Html, Selector};
|
||||
|
||||
use super::{BookPackage, FetchOptions};
|
||||
|
|
@ -10,14 +9,10 @@ pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
|
|||
let base = options.base_url.trim_end_matches('/');
|
||||
let url = format!("{base}/book.html");
|
||||
|
||||
let client = Client::builder().build().context("building HTTP client")?;
|
||||
let body = client
|
||||
.get(&url)
|
||||
.send()
|
||||
.with_context(|| format!("fetching {}", url))?
|
||||
.error_for_status()
|
||||
.with_context(|| format!("request failed for {}", url))?
|
||||
.text()
|
||||
let body = ureq::get(&url)
|
||||
.call()
|
||||
.with_context(|| format!("fetching {url}"))?
|
||||
.into_string()
|
||||
.context("reading response body")?;
|
||||
|
||||
parse_book_html(options, &url, &body)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use anyhow::{Context, Result};
|
||||
use regex::Regex;
|
||||
use reqwest::blocking::Client;
|
||||
use scraper::{Html, Selector};
|
||||
|
||||
use super::{BookPackage, FetchOptions};
|
||||
|
|
@ -10,14 +9,10 @@ pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
|
|||
let base = options.base_url.trim_end_matches('/');
|
||||
let url = format!("{base}/book.html");
|
||||
|
||||
let client = Client::builder().build().context("building HTTP client")?;
|
||||
let body = client
|
||||
.get(&url)
|
||||
.send()
|
||||
.with_context(|| format!("fetching {}", url))?
|
||||
.error_for_status()
|
||||
.with_context(|| format!("request failed for {}", url))?
|
||||
.text()
|
||||
let body = ureq::get(&url)
|
||||
.call()
|
||||
.with_context(|| format!("fetching {url}"))?
|
||||
.into_string()
|
||||
.context("reading response body")?;
|
||||
|
||||
parse_book_html(options, &url, &body)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use anyhow::{Context, Result};
|
||||
use regex::Regex;
|
||||
use reqwest::blocking::Client;
|
||||
use scraper::{Html, Selector};
|
||||
|
||||
use super::{BookPackage, FetchOptions};
|
||||
|
|
@ -9,14 +8,10 @@ pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
|
|||
let base = options.base_url.trim_end_matches('/');
|
||||
let url = format!("{base}/book.html");
|
||||
|
||||
let client = Client::builder().build().context("building HTTP client")?;
|
||||
let body = client
|
||||
.get(&url)
|
||||
.send()
|
||||
.with_context(|| format!("fetching {}", url))?
|
||||
.error_for_status()
|
||||
.with_context(|| format!("request failed for {}", url))?
|
||||
.text()
|
||||
let body = ureq::get(&url)
|
||||
.call()
|
||||
.with_context(|| format!("fetching {url}"))?
|
||||
.into_string()
|
||||
.context("reading response body")?;
|
||||
|
||||
parse_book_html(options, &url, &body)
|
||||
|
|
|
|||
100
src/main.rs
100
src/main.rs
|
|
@ -7,7 +7,7 @@ use package_management::{
|
|||
db, html, md5_utils,
|
||||
pkgs::{
|
||||
by_name::bi::binutils::cross_toolchain::build_binutils_from_page,
|
||||
mlfs,
|
||||
generator, mlfs,
|
||||
scaffolder::{self, ScaffoldRequest},
|
||||
},
|
||||
version_check, wget_list,
|
||||
|
|
@ -134,8 +134,6 @@ enum TuiCommand {
|
|||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let _ = tracing_subscriber::fmt::try_init();
|
||||
|
||||
let cli = Cli::parse();
|
||||
|
||||
match cli.command {
|
||||
|
|
@ -219,10 +217,8 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
|
|||
lfs_root,
|
||||
target,
|
||||
} => {
|
||||
let runtime = tokio::runtime::Runtime::new().context("Creating async runtime")?;
|
||||
runtime
|
||||
.block_on(build_binutils_from_page(&url, &lfs_root, target))
|
||||
.map_err(|e| anyhow!("Building Binutils using instructions from {url}: {e}"))?;
|
||||
build_binutils_from_page(&url, &lfs_root, target)
|
||||
.with_context(|| format!("Building Binutils using instructions from {url}"))?;
|
||||
|
||||
println!("Binutils workflow completed successfully");
|
||||
}
|
||||
|
|
@ -306,6 +302,14 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
|
|||
let mut created = 0usize;
|
||||
let mut skipped = Vec::new();
|
||||
|
||||
let metadata_entries = match mlfs::load_metadata_index() {
|
||||
Ok(entries) => Some(entries),
|
||||
Err(err) => {
|
||||
eprintln!("[mlfs] metadata index error: {err}");
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
let pool = if dry_run {
|
||||
None
|
||||
} else {
|
||||
|
|
@ -313,11 +317,6 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
|
|||
};
|
||||
|
||||
for record in records {
|
||||
let module_alias = record.module_alias();
|
||||
if !seen.insert(module_alias.clone()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(limit) = limit {
|
||||
if processed >= limit {
|
||||
break;
|
||||
|
|
@ -325,15 +324,20 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
|
|||
}
|
||||
processed += 1;
|
||||
|
||||
if dry_run {
|
||||
println!(
|
||||
"Would scaffold {:<18} {:<12} -> {}",
|
||||
record.name, record.version, module_alias
|
||||
);
|
||||
continue;
|
||||
}
|
||||
let metadata_entry = metadata_entries
|
||||
.as_ref()
|
||||
.and_then(|entries| mlfs::match_metadata(&record, entries));
|
||||
|
||||
let request = ScaffoldRequest {
|
||||
let mut request = if let Some(entry) = metadata_entry {
|
||||
let path = PathBuf::from("ai/metadata").join(&entry.path);
|
||||
match generator::request_from_metadata(&path) {
|
||||
Ok(req) => req,
|
||||
Err(err) => {
|
||||
eprintln!(
|
||||
"[mlfs] metadata apply error for {} {}: {}",
|
||||
record.name, record.version, err
|
||||
);
|
||||
ScaffoldRequest {
|
||||
name: record.name.clone(),
|
||||
version: record.version.clone(),
|
||||
source: None,
|
||||
|
|
@ -350,9 +354,63 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
|
|||
stage: record.stage.clone(),
|
||||
variant: record.variant.clone(),
|
||||
notes: record.notes.clone(),
|
||||
module_override: Some(module_alias.clone()),
|
||||
module_override: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ScaffoldRequest {
|
||||
name: record.name.clone(),
|
||||
version: record.version.clone(),
|
||||
source: None,
|
||||
md5: None,
|
||||
configure_args: Vec::new(),
|
||||
build_commands: Vec::new(),
|
||||
install_commands: Vec::new(),
|
||||
dependencies: Vec::new(),
|
||||
enable_lto: true,
|
||||
enable_pgo: true,
|
||||
cflags: Vec::new(),
|
||||
ldflags: Vec::new(),
|
||||
profdata: None,
|
||||
stage: record.stage.clone(),
|
||||
variant: record.variant.clone(),
|
||||
notes: record.notes.clone(),
|
||||
module_override: None,
|
||||
}
|
||||
};
|
||||
|
||||
if request.stage.is_none() {
|
||||
request.stage = record.stage.clone();
|
||||
}
|
||||
if request.variant.is_none() {
|
||||
request.variant = record.variant.clone();
|
||||
}
|
||||
if request.notes.is_none() {
|
||||
request.notes = record.notes.clone();
|
||||
}
|
||||
|
||||
let module_alias = request
|
||||
.module_override
|
||||
.clone()
|
||||
.unwrap_or_else(|| record.module_alias());
|
||||
|
||||
if !seen.insert(module_alias.clone()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if request.module_override.is_none() {
|
||||
request.module_override = Some(module_alias.clone());
|
||||
}
|
||||
|
||||
if dry_run {
|
||||
println!(
|
||||
"Would scaffold {:<18} {:<12} -> {}",
|
||||
record.name, record.version, module_alias
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
match scaffolder::scaffold_package(&base_dir, request) {
|
||||
Ok(result) => {
|
||||
if let Some(pool) = &pool {
|
||||
|
|
|
|||
|
|
@ -1,16 +1,15 @@
|
|||
use anyhow::Result;
|
||||
use reqwest::blocking::Client;
|
||||
use reqwest::redirect::Policy;
|
||||
use anyhow::{Context, Result};
|
||||
|
||||
pub fn get_md5sums() -> Result<String> {
|
||||
let client = Client::builder().redirect(Policy::limited(5)).build()?;
|
||||
let res = client
|
||||
.get("https://www.linuxfromscratch.org/~thomas/multilib-m32/md5sums")
|
||||
.send()?;
|
||||
let agent = ureq::AgentBuilder::new().redirects(5).build();
|
||||
let url = "https://www.linuxfromscratch.org/~thomas/multilib-m32/md5sums";
|
||||
|
||||
if !res.status().is_success() {
|
||||
anyhow::bail!("Failed to fetch MD5sums: HTTP {}", res.status());
|
||||
}
|
||||
let response = agent.get(url).call().map_err(|err| match err {
|
||||
ureq::Error::Status(code, _) => anyhow::anyhow!("Failed to fetch MD5sums: HTTP {code}"),
|
||||
other => anyhow::anyhow!("Failed to fetch MD5sums: {other}"),
|
||||
})?;
|
||||
|
||||
Ok(res.text()?)
|
||||
response
|
||||
.into_string()
|
||||
.with_context(|| format!("reading body from {url}"))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,14 +1,11 @@
|
|||
use console::Style;
|
||||
use reqwest::blocking::Client;
|
||||
use scraper::{Html, Selector};
|
||||
use std::io::{self, Write};
|
||||
|
||||
pub fn fetch_mirrors() -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
||||
let client = Client::new();
|
||||
let res = client
|
||||
.get("https://www.linuxfromscratch.org/lfs/mirrors.html#files")
|
||||
.send()?
|
||||
.text()?;
|
||||
let res = ureq::get("https://www.linuxfromscratch.org/lfs/mirrors.html#files")
|
||||
.call()?
|
||||
.into_string()?;
|
||||
|
||||
let document = Html::parse_document(&res);
|
||||
let selector = Selector::parse("a[href^='http']").unwrap();
|
||||
|
|
|
|||
|
|
@ -1,12 +1,13 @@
|
|||
// async cross-toolchain runner that uses parser.rs info (no hardcoding)
|
||||
// Cross-toolchain runner that uses parser.rs info (no hardcoding).
|
||||
use crate::pkgs::by_name::bi::binutils::parser::{BinutilsInfo, fetch_page, parse_binutils};
|
||||
use reqwest::Client;
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use shell_words;
|
||||
use std::{
|
||||
error::Error,
|
||||
fs::{self, File},
|
||||
io,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
};
|
||||
use tokio::process::Command;
|
||||
use tracing::{info, warn};
|
||||
|
||||
/// Configuration object - uses environment if values omitted.
|
||||
#[derive(Debug, Clone)]
|
||||
|
|
@ -22,14 +23,7 @@ impl BinutilsConfig {
|
|||
let lfs_root = lfs_root.as_ref().to_path_buf();
|
||||
let target = target
|
||||
.or_else(|| std::env::var("LFS_TGT").ok())
|
||||
.unwrap_or_else(|| {
|
||||
// fallback best-effort
|
||||
if cfg!(target_os = "linux") {
|
||||
"x86_64-lfs-linux-gnu".to_string()
|
||||
} else {
|
||||
"x86_64-lfs-linux-gnu".to_string()
|
||||
}
|
||||
});
|
||||
.unwrap_or_else(|| "x86_64-lfs-linux-gnu".to_string());
|
||||
|
||||
Self {
|
||||
lfs_root,
|
||||
|
|
@ -65,122 +59,50 @@ impl BinutilsConfig {
|
|||
}
|
||||
}
|
||||
|
||||
/// High-level orchestration. Async.
|
||||
pub async fn build_binutils_from_page(
|
||||
/// High-level orchestration.
|
||||
pub fn build_binutils_from_page(
|
||||
page_url: &str,
|
||||
lfs_root: impl AsRef<std::path::Path>,
|
||||
lfs_root: impl AsRef<Path>,
|
||||
target: Option<String>,
|
||||
) -> Result<(), Box<dyn Error>> {
|
||||
// 1) fetch page
|
||||
info!("Fetching page: {}", page_url);
|
||||
let html = fetch_page(page_url).await?;
|
||||
let info = parse_binutils(&html)?;
|
||||
info!("Parsed info: {:?}", info);
|
||||
) -> Result<()> {
|
||||
println!("Fetching page: {page_url}");
|
||||
let html = fetch_page(page_url).context("fetching binutils instructions")?;
|
||||
let info = parse_binutils(&html).context("parsing binutils instructions")?;
|
||||
println!("Parsed info: {:?}", info);
|
||||
|
||||
// 2) build config
|
||||
let cfg = BinutilsConfig::new(lfs_root, target, info.clone());
|
||||
|
||||
// 3) ensure source base dir exists
|
||||
let src_base = cfg.source_base_dir();
|
||||
if !src_base.exists() {
|
||||
info!("Creating source base dir: {:?}", src_base);
|
||||
tokio::fs::create_dir_all(&src_base).await?;
|
||||
println!("Creating source base dir: {:?}", src_base);
|
||||
fs::create_dir_all(&src_base)
|
||||
.with_context(|| format!("creating source base dir {:?}", src_base))?;
|
||||
}
|
||||
|
||||
// 4) find extracted source directory (binutils-*)
|
||||
let mut source_dir: Option<PathBuf> = None;
|
||||
if let Ok(mut rd) = tokio::fs::read_dir(&src_base).await {
|
||||
while let Some(entry) = rd.next_entry().await? {
|
||||
let ft = entry.file_type().await?;
|
||||
if ft.is_dir() {
|
||||
let name = entry.file_name().to_string_lossy().to_string();
|
||||
if name.to_lowercase().contains("binutils") {
|
||||
source_dir = Some(entry.path());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 5) if not found, attempt to download & extract
|
||||
let mut source_dir = locate_binutils_dir(&src_base)?;
|
||||
if source_dir.is_none() {
|
||||
if let Some(dl) = &cfg.info.download_url {
|
||||
info!("No extracted source found; will download {}", dl);
|
||||
|
||||
// download file into src_base
|
||||
let client = Client::new();
|
||||
let resp = client.get(dl).send().await?;
|
||||
if !resp.status().is_success() {
|
||||
return Err(format!("Download failed: {}", resp.status()).into());
|
||||
source_dir = download_and_extract(&cfg, &src_base)?;
|
||||
}
|
||||
|
||||
// pick a filename from URL
|
||||
let url_path = url::Url::parse(dl)?;
|
||||
let filename = url_path
|
||||
.path_segments()
|
||||
.and_then(|seg| seg.last())
|
||||
.and_then(|s| {
|
||||
if !s.is_empty() {
|
||||
Some(s.to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.ok_or("Cannot determine filename from URL")?;
|
||||
let source_dir = source_dir
|
||||
.ok_or_else(|| anyhow!("Could not locate or download/extract Binutils source"))?;
|
||||
println!("Using source dir: {:?}", source_dir);
|
||||
|
||||
let outpath = src_base.join(&filename);
|
||||
info!("Saving archive to {:?}", outpath);
|
||||
let bytes = resp.bytes().await?;
|
||||
tokio::fs::write(&outpath, &bytes).await?;
|
||||
|
||||
// extract using tar (async spawn). Use absolute path to src_base
|
||||
info!("Extracting archive {:?}", outpath);
|
||||
let tar_path = outpath.clone();
|
||||
let mut tar_cmd = Command::new("tar");
|
||||
tar_cmd.arg("-xf").arg(&tar_path).arg("-C").arg(&src_base);
|
||||
let status = tar_cmd.status().await?;
|
||||
if !status.success() {
|
||||
return Err("tar extraction failed".into());
|
||||
}
|
||||
|
||||
// look for extracted dir again
|
||||
if let Ok(mut rd) = tokio::fs::read_dir(&src_base).await {
|
||||
while let Some(entry) = rd.next_entry().await? {
|
||||
let ft = entry.file_type().await?;
|
||||
if ft.is_dir() {
|
||||
let name = entry.file_name().to_string_lossy().to_string();
|
||||
if name.to_lowercase().contains("binutils") {
|
||||
source_dir = Some(entry.path());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
warn!("No download URL found on the page and no unpacked source present.");
|
||||
}
|
||||
}
|
||||
|
||||
let source_dir = match source_dir {
|
||||
Some(p) => p,
|
||||
None => return Err("Could not locate or download/extract Binutils source".into()),
|
||||
};
|
||||
info!("Using source dir: {:?}", source_dir);
|
||||
|
||||
// 6) prepare build dir
|
||||
let build_dir = cfg.build_dir();
|
||||
if !build_dir.exists() {
|
||||
info!("Creating build dir {:?}", build_dir);
|
||||
tokio::fs::create_dir_all(&build_dir).await?;
|
||||
println!("Creating build dir {:?}", build_dir);
|
||||
fs::create_dir_all(&build_dir)
|
||||
.with_context(|| format!("creating build dir {:?}", build_dir))?;
|
||||
}
|
||||
|
||||
// 7) run configure: use absolute configure script path in source_dir
|
||||
let configure_path = source_dir.join("configure");
|
||||
if !configure_path.exists() {
|
||||
return Err(format!("configure script not found at {:?}", configure_path).into());
|
||||
return Err(anyhow!(
|
||||
"configure script not found at {:?}",
|
||||
configure_path
|
||||
));
|
||||
}
|
||||
|
||||
// If parser produced configure args tokens, use them; otherwise fallback to common flags
|
||||
let args = if !cfg.info.configure_args.is_empty() {
|
||||
cfg.info.configure_args.clone()
|
||||
} else {
|
||||
|
|
@ -193,7 +115,6 @@ pub async fn build_binutils_from_page(
|
|||
]
|
||||
};
|
||||
|
||||
// replace $LFS and $LFS_TGT in args
|
||||
let args: Vec<String> = args
|
||||
.into_iter()
|
||||
.map(|a| {
|
||||
|
|
@ -202,83 +123,126 @@ pub async fn build_binutils_from_page(
|
|||
})
|
||||
.collect();
|
||||
|
||||
info!("Configuring with args: {:?}", args);
|
||||
println!("Configuring with args: {:?}", args);
|
||||
let mut configure_cmd = Command::new(&configure_path);
|
||||
configure_cmd.current_dir(&build_dir);
|
||||
configure_cmd.args(&args);
|
||||
run_command(&mut configure_cmd).context("configure step failed")?;
|
||||
println!("configure completed");
|
||||
|
||||
// spawn configure
|
||||
let mut conf_cmd = Command::new(&configure_path);
|
||||
conf_cmd.current_dir(&build_dir);
|
||||
for a in &args {
|
||||
conf_cmd.arg(a);
|
||||
}
|
||||
conf_cmd.stdout(std::process::Stdio::inherit());
|
||||
conf_cmd.stderr(std::process::Stdio::inherit());
|
||||
let status = conf_cmd.status().await?;
|
||||
if !status.success() {
|
||||
return Err("configure step failed".into());
|
||||
}
|
||||
info!("configure completed");
|
||||
|
||||
// 8) run build commands (make-like)
|
||||
if !cfg.info.build_cmds.is_empty() {
|
||||
for b in &cfg.info.build_cmds {
|
||||
// split into program + args
|
||||
let mut parts = shell_words::split(b).unwrap_or_else(|_| vec![b.clone()]);
|
||||
let prog = parts.remove(0);
|
||||
let mut cmd = Command::new(prog);
|
||||
if !parts.is_empty() {
|
||||
cmd.args(parts);
|
||||
}
|
||||
cmd.current_dir(&build_dir);
|
||||
cmd.stdout(std::process::Stdio::inherit());
|
||||
cmd.stderr(std::process::Stdio::inherit());
|
||||
let status = cmd.status().await?;
|
||||
if !status.success() {
|
||||
return Err(format!("build step failed: {:?}", b).into());
|
||||
}
|
||||
for raw in &cfg.info.build_cmds {
|
||||
run_shell_command(raw, &build_dir)
|
||||
.with_context(|| format!("build step failed: {raw}"))?;
|
||||
}
|
||||
} else {
|
||||
// fallback to running `make`
|
||||
let mut m = Command::new("make");
|
||||
m.current_dir(&build_dir);
|
||||
m.stdout(std::process::Stdio::inherit());
|
||||
m.stderr(std::process::Stdio::inherit());
|
||||
let status = m.status().await?;
|
||||
if !status.success() {
|
||||
return Err("make failed".into());
|
||||
let mut make_cmd = Command::new("make");
|
||||
make_cmd.current_dir(&build_dir);
|
||||
run_command(&mut make_cmd).context("make failed")?;
|
||||
}
|
||||
}
|
||||
info!("build completed");
|
||||
println!("build completed");
|
||||
|
||||
// 9) run install commands (make install)
|
||||
if !cfg.info.install_cmds.is_empty() {
|
||||
for inst in &cfg.info.install_cmds {
|
||||
let mut parts = shell_words::split(inst).unwrap_or_else(|_| vec![inst.clone()]);
|
||||
let prog = parts.remove(0);
|
||||
let mut cmd = Command::new(prog);
|
||||
if !parts.is_empty() {
|
||||
cmd.args(parts);
|
||||
}
|
||||
cmd.current_dir(&build_dir);
|
||||
cmd.stdout(std::process::Stdio::inherit());
|
||||
cmd.stderr(std::process::Stdio::inherit());
|
||||
let status = cmd.status().await?;
|
||||
if !status.success() {
|
||||
return Err(format!("install step failed: {:?}", inst).into());
|
||||
}
|
||||
for raw in &cfg.info.install_cmds {
|
||||
run_shell_command(raw, &build_dir)
|
||||
.with_context(|| format!("install step failed: {raw}"))?;
|
||||
}
|
||||
} else {
|
||||
// fallback `make install`
|
||||
let mut mi = Command::new("make");
|
||||
mi.arg("install");
|
||||
mi.current_dir(&build_dir);
|
||||
mi.stdout(std::process::Stdio::inherit());
|
||||
mi.stderr(std::process::Stdio::inherit());
|
||||
let status = mi.status().await?;
|
||||
if !status.success() {
|
||||
return Err("make install failed".into());
|
||||
}
|
||||
let mut install_cmd = Command::new("make");
|
||||
install_cmd.arg("install");
|
||||
install_cmd.current_dir(&build_dir);
|
||||
run_command(&mut install_cmd).context("make install failed")?;
|
||||
}
|
||||
println!("install completed");
|
||||
|
||||
info!("install completed. Binutils Pass 1 done.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn locate_binutils_dir(base: &Path) -> Result<Option<PathBuf>> {
|
||||
if !base.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
for entry in fs::read_dir(base).with_context(|| format!("reading directory {:?}", base))? {
|
||||
let entry = entry?;
|
||||
if entry.file_type()?.is_dir() {
|
||||
let name = entry.file_name().to_string_lossy().to_lowercase();
|
||||
if name.contains("binutils") {
|
||||
return Ok(Some(entry.path()));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn download_and_extract(cfg: &BinutilsConfig, base: &Path) -> Result<Option<PathBuf>> {
|
||||
let url = match cfg.info.download_url.as_deref() {
|
||||
Some(url) => url,
|
||||
None => {
|
||||
eprintln!("No download URL found on the page and no unpacked source present.");
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
|
||||
println!("Downloading {url}");
|
||||
let response = ureq::get(url).call().map_err(|err| match err {
|
||||
ureq::Error::Status(code, _) => anyhow!("Download failed: HTTP {code}"),
|
||||
other => anyhow!("Download failed: {other}"),
|
||||
})?;
|
||||
|
||||
let final_url = response.get_url().to_string();
|
||||
let parsed = url::Url::parse(&final_url)
|
||||
.with_context(|| format!("parsing final download URL {final_url}"))?;
|
||||
let filename = parsed
|
||||
.path_segments()
|
||||
.and_then(|segments| segments.last())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(|s| s.to_string())
|
||||
.ok_or_else(|| anyhow!("Cannot determine filename from URL"))?;
|
||||
|
||||
let outpath = base.join(&filename);
|
||||
let mut reader = response.into_reader();
|
||||
let mut file =
|
||||
File::create(&outpath).with_context(|| format!("creating archive file {:?}", outpath))?;
|
||||
io::copy(&mut reader, &mut file)
|
||||
.with_context(|| format!("writing archive to {:?}", outpath))?;
|
||||
|
||||
println!("Extracting archive {:?}", outpath);
|
||||
let status = Command::new("tar")
|
||||
.arg("-xf")
|
||||
.arg(&outpath)
|
||||
.arg("-C")
|
||||
.arg(base)
|
||||
.status()
|
||||
.with_context(|| "spawning tar".to_string())?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("tar extraction failed"));
|
||||
}
|
||||
|
||||
locate_binutils_dir(base)
|
||||
}
|
||||
|
||||
fn run_command(cmd: &mut Command) -> Result<()> {
|
||||
cmd.stdout(std::process::Stdio::inherit());
|
||||
cmd.stderr(std::process::Stdio::inherit());
|
||||
let status = cmd
|
||||
.status()
|
||||
.with_context(|| "spawning process".to_string())?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("command exited with status {status}"));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_shell_command(raw: &str, cwd: &Path) -> Result<()> {
|
||||
let mut parts = shell_words::split(raw).unwrap_or_else(|_| vec![raw.to_string()]);
|
||||
if parts.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
let prog = parts.remove(0);
|
||||
let mut cmd = Command::new(prog);
|
||||
if !parts.is_empty() {
|
||||
cmd.args(parts);
|
||||
}
|
||||
cmd.current_dir(cwd);
|
||||
run_command(&mut cmd)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
// async parser for Binutils Pass 1 page
|
||||
use reqwest::Client;
|
||||
// Parser for Binutils Pass 1 page using lightweight HTTP fetching.
|
||||
use anyhow::{Context, Result};
|
||||
use scraper::{Html, Selector};
|
||||
use std::error::Error;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BinutilsInfo {
|
||||
|
|
@ -34,16 +33,16 @@ impl Default for BinutilsInfo {
|
|||
}
|
||||
}
|
||||
|
||||
/// Fetch page content (async)
|
||||
pub async fn fetch_page(url: &str) -> Result<String, Box<dyn Error>> {
|
||||
let client = Client::new();
|
||||
let res = client.get(url).send().await?;
|
||||
let status = res.status();
|
||||
if !status.is_success() {
|
||||
return Err(format!("Failed to fetch {}: {}", url, status).into());
|
||||
}
|
||||
let text = res.text().await?;
|
||||
Ok(text)
|
||||
/// Fetch page content synchronously
|
||||
pub fn fetch_page(url: &str) -> Result<String> {
|
||||
ureq::get(url)
|
||||
.call()
|
||||
.map_err(|err| match err {
|
||||
ureq::Error::Status(code, _) => anyhow::anyhow!("Failed to fetch {url}: HTTP {code}"),
|
||||
other => anyhow::anyhow!("Failed to fetch {url}: {other}"),
|
||||
})?
|
||||
.into_string()
|
||||
.with_context(|| format!("reading body from {url}"))
|
||||
}
|
||||
|
||||
/// Parse the LFS Binutils pass1 page; robust to small formatting changes.
|
||||
|
|
@ -51,7 +50,7 @@ pub async fn fetch_page(url: &str) -> Result<String, Box<dyn Error>> {
|
|||
/// - finds a download URL ending with .tar.xz/.tar.gz
|
||||
/// - finds configure pre block(s), builds token list
|
||||
/// - finds `make` / `make install` pre blocks
|
||||
pub fn parse_binutils(html: &str) -> Result<BinutilsInfo, Box<dyn Error>> {
|
||||
pub fn parse_binutils(html: &str) -> Result<BinutilsInfo> {
|
||||
let document = Html::parse_document(html);
|
||||
|
||||
let mut info = BinutilsInfo::default();
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@ struct HarvestedMetadata {
|
|||
#[derive(Debug, Deserialize)]
|
||||
struct HarvestedSource {
|
||||
#[serde(default)]
|
||||
#[allow(dead_code)]
|
||||
archive: Option<String>,
|
||||
#[serde(default)]
|
||||
urls: Vec<HarvestedUrl>,
|
||||
|
|
@ -67,14 +68,18 @@ struct HarvestedOptimisations {
|
|||
#[derive(Debug, Deserialize)]
|
||||
struct CommandPhase {
|
||||
#[serde(default)]
|
||||
#[allow(dead_code)]
|
||||
phase: Option<String>,
|
||||
#[serde(default)]
|
||||
commands: Vec<String>,
|
||||
#[serde(default)]
|
||||
#[allow(dead_code)]
|
||||
cwd: Option<String>,
|
||||
#[serde(default)]
|
||||
#[allow(dead_code)]
|
||||
requires_root: Option<bool>,
|
||||
#[serde(default)]
|
||||
#[allow(dead_code)]
|
||||
notes: Option<String>,
|
||||
}
|
||||
|
||||
|
|
@ -97,6 +102,12 @@ pub fn generate_module(
|
|||
Ok(result.module_path)
|
||||
}
|
||||
|
||||
/// Build a scaffolding request directly from a metadata JSON file.
|
||||
pub fn request_from_metadata(path: impl AsRef<Path>) -> Result<ScaffoldRequest> {
|
||||
let harvested = parse_metadata(path.as_ref())?;
|
||||
build_request(&harvested)
|
||||
}
|
||||
|
||||
/// Compute the directory for a module derived from the given metadata.
|
||||
pub fn module_directory(
|
||||
metadata_path: impl AsRef<Path>,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
use std::borrow::Cow;
|
||||
use std::{borrow::Cow, fs, path::PathBuf};
|
||||
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
|
@ -83,6 +83,30 @@ impl MlfsPackageRecord {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct MetadataIndex {
|
||||
packages: Vec<MetadataPackage>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
struct MetadataPackage {
|
||||
book: String,
|
||||
id: String,
|
||||
name: String,
|
||||
path: String,
|
||||
stage: Option<String>,
|
||||
variant: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MlfsMetadataEntry {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub stage: Option<String>,
|
||||
pub variant: Option<String>,
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
pub fn fetch_catalog(base_url: &str) -> Result<Vec<MlfsPackageRecord>> {
|
||||
let options = FetchOptions::new(base_url, BookKind::Mlfs);
|
||||
let packages = lfs::fetch_book(&options)?;
|
||||
|
|
@ -109,8 +133,75 @@ pub fn load_or_fetch_catalog(base_url: Option<&str>) -> Result<Vec<MlfsPackageRe
|
|||
match fetch_catalog(base) {
|
||||
Ok(records) => Ok(records),
|
||||
Err(err) => {
|
||||
tracing::warn!("mlfs_fetch_error" = %err, "Falling back to cached MLFS package list");
|
||||
eprintln!("[mlfs] fetch error ({err}); falling back to cached MLFS package list");
|
||||
load_cached_catalog()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_metadata_index() -> Result<Vec<MlfsMetadataEntry>> {
|
||||
let raw = fs::read_to_string("ai/metadata/index.json").context("reading AI metadata index")?;
|
||||
let index: MetadataIndex =
|
||||
serde_json::from_str(&raw).context("parsing AI metadata index JSON")?;
|
||||
|
||||
let entries = index
|
||||
.packages
|
||||
.into_iter()
|
||||
.filter(|pkg| pkg.book.eq_ignore_ascii_case("mlfs"))
|
||||
.map(|pkg| MlfsMetadataEntry {
|
||||
id: pkg.id,
|
||||
name: pkg.name,
|
||||
stage: pkg.stage,
|
||||
variant: pkg.variant,
|
||||
path: PathBuf::from(pkg.path),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
pub fn match_metadata<'a>(
|
||||
record: &MlfsPackageRecord,
|
||||
entries: &'a [MlfsMetadataEntry],
|
||||
) -> Option<&'a MlfsMetadataEntry> {
|
||||
let target_name = normalize(&record.name);
|
||||
let target_variant = normalize_opt(record.variant.as_deref());
|
||||
let target_stage = normalize_opt(record.stage.as_deref());
|
||||
|
||||
entries
|
||||
.iter()
|
||||
.filter(|entry| normalize(&entry.name) == target_name)
|
||||
.max_by_key(|entry| {
|
||||
let mut score = 0;
|
||||
if let (Some(tv), Some(ev)) = (&target_variant, normalize_opt(entry.variant.as_deref()))
|
||||
{
|
||||
if *tv == ev {
|
||||
score += 4;
|
||||
}
|
||||
}
|
||||
if let (Some(ts), Some(es)) = (&target_stage, normalize_opt(entry.stage.as_deref())) {
|
||||
if *ts == es {
|
||||
score += 2;
|
||||
}
|
||||
}
|
||||
if target_variant.is_none() && entry.variant.is_none() {
|
||||
score += 1;
|
||||
}
|
||||
if target_stage.is_none() && entry.stage.is_none() {
|
||||
score += 1;
|
||||
}
|
||||
score
|
||||
})
|
||||
}
|
||||
|
||||
fn normalize(value: &str) -> String {
|
||||
value
|
||||
.chars()
|
||||
.filter(|c| c.is_ascii_alphanumeric())
|
||||
.collect::<String>()
|
||||
.to_ascii_lowercase()
|
||||
}
|
||||
|
||||
fn normalize_opt(value: Option<&str>) -> Option<String> {
|
||||
value.map(normalize)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ use crossterm::event::{self, Event, KeyCode};
|
|||
use crossterm::execute;
|
||||
use crossterm::terminal::{EnterAlternateScreen, LeaveAlternateScreen};
|
||||
use gptman::{GPT, GPTPartitionEntry, PartitionName};
|
||||
use tracing::{info, warn};
|
||||
use tui::{
|
||||
Terminal,
|
||||
backend::CrosstermBackend,
|
||||
|
|
@ -27,7 +26,6 @@ use uuid::Uuid;
|
|||
/// crossterm = "0.26"
|
||||
/// gptman = "2.0"
|
||||
/// uuid = { version = "1", features = ["v4"] }
|
||||
/// tracing = "0.1"
|
||||
pub struct DiskManager;
|
||||
|
||||
impl DiskManager {
|
||||
|
|
@ -148,11 +146,11 @@ impl DiskManager {
|
|||
let path = devices[selected_idx].clone();
|
||||
match Self::create_partition_tui(&path, &mut term) {
|
||||
Ok(m) => {
|
||||
info!(target: "disk_manager", "{}", m);
|
||||
println!("[disk-manager] {}", m);
|
||||
status_msg = m;
|
||||
}
|
||||
Err(e) => {
|
||||
warn!(target: "disk_manager", "create partition error: {:?}", e);
|
||||
eprintln!("[disk-manager] create partition error: {e}");
|
||||
status_msg = format!("Create failed: {}", e);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,20 +1,25 @@
|
|||
use std::io::Stdout;
|
||||
use tracing::instrument;
|
||||
use crate::tui::animations::{
|
||||
Animation, ProgressAnimation, donut::DonutAnimation, progress::ProgressBarAnimation,
|
||||
};
|
||||
use rsille::canvas::Canvas;
|
||||
use std::{io::Stdout, thread, time::Duration};
|
||||
use tui::{
|
||||
Terminal,
|
||||
backend::CrosstermBackend,
|
||||
layout::{Constraint, Direction, Layout},
|
||||
style::Style,
|
||||
text::Spans,
|
||||
widgets::{Block, Borders, Gauge, List, ListItem},
|
||||
widgets::{Block, Borders, List, ListItem},
|
||||
};
|
||||
|
||||
use crate::tui::settings::Settings;
|
||||
|
||||
pub struct Downloader;
|
||||
|
||||
const TARGET_FPS: u64 = 30;
|
||||
const FRAME_TIME: Duration = Duration::from_micros(1_000_000 / TARGET_FPS);
|
||||
|
||||
impl Downloader {
|
||||
#[instrument(skip(terminal, settings))]
|
||||
pub fn show_downloader(
|
||||
terminal: &mut Terminal<CrosstermBackend<Stdout>>,
|
||||
settings: &Settings,
|
||||
|
|
@ -22,14 +27,26 @@ impl Downloader {
|
|||
let files = vec!["file1.tar.gz", "file2.tar.gz", "file3.tar.gz"];
|
||||
let progress = vec![0.3, 0.5, 0.9];
|
||||
|
||||
let mut last_update = std::time::Instant::now();
|
||||
loop {
|
||||
let frame_start = std::time::Instant::now();
|
||||
let delta = frame_start - last_update;
|
||||
last_update = frame_start;
|
||||
|
||||
terminal.draw(|f| {
|
||||
let size = f.size();
|
||||
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.margin(2)
|
||||
.constraints([Constraint::Percentage(70), Constraint::Percentage(30)].as_ref())
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Percentage(50),
|
||||
Constraint::Percentage(25),
|
||||
Constraint::Percentage(25),
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.split(size);
|
||||
|
||||
let items: Vec<ListItem> = files
|
||||
|
|
@ -44,16 +61,42 @@ impl Downloader {
|
|||
);
|
||||
f.render_widget(list, chunks[0]);
|
||||
|
||||
// Progress bar
|
||||
let mut progress_canvas = Canvas::new(chunks[1].width, chunks[1].height);
|
||||
let mut progress_bar = ProgressBarAnimation::new(chunks[1].width, chunks[1].height);
|
||||
|
||||
for (i, prog) in progress.iter().enumerate() {
|
||||
let gauge = Gauge::default()
|
||||
.block(Block::default().title(files[i]))
|
||||
.gauge_style(Style::default().fg(settings.theme.primary_color()))
|
||||
.ratio(*prog as f64);
|
||||
f.render_widget(gauge, chunks[1]);
|
||||
progress_bar.set_progress(*prog as f64);
|
||||
progress_bar.render(&mut progress_canvas);
|
||||
}
|
||||
|
||||
// Render progress bar
|
||||
let progress_block = Block::default()
|
||||
.title(files[0])
|
||||
.borders(Borders::ALL)
|
||||
.border_style(Style::default().fg(settings.theme.primary_color()));
|
||||
|
||||
f.render_widget(progress_block, chunks[1]);
|
||||
|
||||
// Donut animation
|
||||
let mut donut_canvas = Canvas::new(chunks[2].width, chunks[2].height);
|
||||
let mut donut = DonutAnimation::new(chunks[2].width, chunks[2].height);
|
||||
donut.render(&mut donut_canvas);
|
||||
|
||||
// Render donut
|
||||
let donut_block = Block::default()
|
||||
.title("Progress")
|
||||
.borders(Borders::ALL)
|
||||
.border_style(Style::default().fg(settings.theme.secondary_color()));
|
||||
|
||||
f.render_widget(donut_block, chunks[2]);
|
||||
})?;
|
||||
|
||||
break; // remove in real async loop
|
||||
// Frame rate limiting
|
||||
let frame_time = frame_start.elapsed();
|
||||
if frame_time < FRAME_TIME {
|
||||
thread::sleep(FRAME_TIME - frame_time);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
use std::io::Stdout;
|
||||
use tracing::instrument;
|
||||
use tui::{Terminal, backend::CrosstermBackend};
|
||||
|
||||
pub struct Settings {
|
||||
|
|
@ -18,7 +17,6 @@ impl Theme {
|
|||
}
|
||||
|
||||
impl Settings {
|
||||
#[instrument(skip(_terminal))]
|
||||
pub fn show_settings(
|
||||
_terminal: &mut Terminal<CrosstermBackend<Stdout>>,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
|
|
|
|||
|
|
@ -1,16 +1,17 @@
|
|||
use anyhow::Result;
|
||||
use reqwest::blocking::Client;
|
||||
use reqwest::redirect::Policy;
|
||||
use anyhow::{Context, Result};
|
||||
|
||||
pub fn get_wget_list() -> Result<String> {
|
||||
let client = Client::builder().redirect(Policy::limited(5)).build()?;
|
||||
let res = client
|
||||
.get("https://www.linuxfromscratch.org/~thomas/multilib-m32/wget-list-sysv")
|
||||
.send()?;
|
||||
|
||||
if !res.status().is_success() {
|
||||
anyhow::bail!("Failed to fetch wget-list: HTTP {}", res.status());
|
||||
let url = "https://www.linuxfromscratch.org/~thomas/multilib-m32/wget-list-sysv";
|
||||
let agent = ureq::AgentBuilder::new().redirects(5).build();
|
||||
agent
|
||||
.get(url)
|
||||
.call()
|
||||
.map_err(|err| match err {
|
||||
ureq::Error::Status(code, _) => {
|
||||
anyhow::anyhow!("Failed to fetch wget-list: HTTP {code}")
|
||||
}
|
||||
|
||||
Ok(res.text()?)
|
||||
other => anyhow::anyhow!("Failed to fetch wget-list: {other}"),
|
||||
})?
|
||||
.into_string()
|
||||
.with_context(|| format!("reading body from {url}"))
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue