initial commit

Signed-off-by: Fabian Grünbichler <f.gruenbichler@proxmox.com>
This commit is contained in:
Fabian Grünbichler 2022-02-17 17:09:40 +01:00
commit f4f8dff05a
6 changed files with 495 additions and 0 deletions

5
.cargo/config Normal file
View File

@ -0,0 +1,5 @@
[source]
[source.debian-packages]
directory = "/usr/share/cargo/registry"
[source.crates-io]
replace-with = "debian-packages"

28
Cargo.toml Normal file
View File

@ -0,0 +1,28 @@
[package]
name = "proxmox-apt-mirror"
version = "0.1.0"
authors = ["Fabian Grünbichler <f.gruenbichler@proxmox.com>"]
edition = "2021"
license = "AGPL-3"
description = "Proxmox APT repository mirror"
exclude = ["debian"]
[dependencies]
anyhow = "1.0"
bzip2 = "0.4"
flate2 = "1.0.22"
hex = "0.4.3"
hyper = "0.14"
openssl = "0.10"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
sequoia-openpgp = { version = "1.7" }
ureq = { version = "2.4.0", features = ["native-certs"] }
xz2 = "0.1"
proxmox-apt = { path = "../proxmox-apt", version = "0.8.0" }
proxmox-async = "0.3"
proxmox-router = { version = "1.1", features = [ "cli" ] }
proxmox-schema = { version = "1.1", features = [ "api-macro" ] }
proxmox-sys = { version = "0.2.2" }

View File

@ -0,0 +1,83 @@
use anyhow::Error;
use serde_json::Value;
use proxmox_apt::repositories::APTRepository;
use proxmox_router::cli::{
run_cli_command, CliCommand, CliCommandMap, CliEnvironment, OUTPUT_FORMAT,
};
use proxmox_schema::api;
use proxmox_sys::fs::file_get_contents;
#[api(
input: {
properties: {
repository: {
type: String,
description: "Repository string to parse.",
},
key: {
type: String,
description: "Path to repository key."
},
architectures: {
type: Array,
items: {
type: String,
description: "Architecture string (e.g., 'all', 'amd64', ..)",
},
description: "Architectures to mirror (default: 'all' and 'amd64')",
optional: true,
},
path: {
type: String,
description: "Output path. Contents will be re-used if still valid.",
},
"output-format": {
schema: OUTPUT_FORMAT,
optional: true,
},
}
},
returns: {
type: APTRepository,
},
)]
/// Parse a repository line.
async fn mirror(
repository: String,
key: String,
architectures: Option<Vec<String>>,
path: String,
_param: Value,
) -> Result<Value, Error> {
//let output_format = get_output_format(&param);
let repository = proxmox_apt_mirror::parse_repo(repository)?;
let key = file_get_contents(&key)?;
let architectures = architectures.unwrap_or_else(|| vec!["amd64".to_owned(), "all".to_owned()]);
let config = proxmox_apt_mirror::config::MirrorConfig {
repository,
key,
path,
architectures,
};
proxmox_apt_mirror::mirror(&config)?;
Ok(Value::Null)
}
fn main() {
let rpcenv = CliEnvironment::new();
let mirror_cmd_def = CliCommand::new(&API_METHOD_MIRROR);
let cmd_def = CliCommandMap::new().insert("mirror", mirror_cmd_def);
run_cli_command(
cmd_def,
rpcenv,
Some(|future| proxmox_async::runtime::main(future)),
);
}

11
src/config.rs Normal file
View File

@ -0,0 +1,11 @@
use std::fmt::Debug;
use proxmox_apt::repositories::APTRepository;
#[derive(Debug)]
pub struct MirrorConfig {
pub repository: APTRepository,
pub architectures: Vec<String>,
pub path: String,
pub key: Vec<u8>,
}

274
src/lib.rs Normal file
View File

@ -0,0 +1,274 @@
use std::{collections::HashMap, io::Read, path::PathBuf};
use anyhow::{bail, Error};
use config::MirrorConfig;
use flate2::bufread::GzDecoder;
use proxmox_apt::{
deb822::{CompressionType, FileReference, FileReferenceType, PackagesFile, ReleaseFile},
repositories::{
APTRepository, APTRepositoryFile, APTRepositoryFileType, APTRepositoryPackageType,
},
};
use proxmox_sys::fs::{create_path, file_get_contents, replace_file, CreateOptions};
pub mod config;
mod verifier;
/// Parse a single line in sources.list format
pub fn parse_repo(repo: String) -> Result<APTRepository, Error> {
let mut repo = APTRepositoryFile::with_content(repo, APTRepositoryFileType::List);
repo.parse()?;
Ok(repo.repositories[0].clone())
}
fn get_repo_url(repo: &APTRepository, path: &str) -> String {
let repo_root = format!("{}/dists/{}", repo.uris[0], repo.suites[0]);
format!("{}/{}", repo_root, path)
}
fn fetch_repo_file(uri: &str, max_size: Option<u64>) -> Result<Vec<u8>, Error> {
println!("-> GET '{}'..", uri);
let response = ureq::get(uri).call()?.into_reader();
let mut content = Vec::new();
let bytes = response
.take(max_size.unwrap_or(10_000_000))
.read_to_end(&mut content)?;
println!("<- GOT {} bytes", bytes);
Ok(content)
}
pub fn fetch_release(
repo: &APTRepository,
key: &[u8],
output_dir: Option<&PathBuf>,
detached: bool,
) -> Result<ReleaseFile, Error> {
let (name, content, sig) = if detached {
println!("Fetching Release/Release.gpg files");
let sig = fetch_repo_file(&get_repo_url(repo, "Release.gpg"), None)?;
let content = fetch_repo_file(&get_repo_url(repo, "Release"), Some(32_000_000))?;
("Release(.gpg)", content, Some(sig))
} else {
println!("Fetching InRelease file");
let content = fetch_repo_file(&get_repo_url(repo, "InRelease"), Some(32_000_000))?;
("InRelease", content, None)
};
println!("Verifying '{name}' signature using provided repository key..");
let verified = verifier::verify_signature(&content[..], key, sig.as_deref())?;
println!("Success");
println!("Parsing '{name}'..");
let parsed: ReleaseFile = verified[..].try_into()?;
println!(
"'{name}' file has {} referenced files..",
parsed.files.len()
);
if let Some(output_dir) = output_dir {
if detached {
let mut release_file = output_dir.clone();
release_file.push("Release");
replace_file(release_file, &content, CreateOptions::default(), true)?;
let mut release_sig = output_dir.clone();
release_sig.push("Release.gpg");
replace_file(release_sig, &sig.unwrap(), CreateOptions::default(), true)?;
} else {
let mut in_release = output_dir.clone();
in_release.push("InRelease");
replace_file(in_release, &content, CreateOptions::default(), true)?;
}
}
Ok(parsed)
}
pub fn fetch_referenced_file(
repo: &APTRepository,
output_dir: Option<&PathBuf>,
reference: &FileReference,
) -> Result<Vec<u8>, Error> {
let mut output = None;
let existing = if let Some(output_dir) = output_dir {
let mut path = output_dir.clone();
path.push(&reference.path);
create_path(&path.parent().unwrap(), None, None)?;
output = Some(path.clone());
if let Ok(raw) = file_get_contents(&path) {
if let Ok(()) = reference.checksums.verify(&raw) {
output = None;
Some(raw)
} else {
None
}
} else {
None
}
} else {
None
};
let raw = if let Some(existing) = existing {
println!("Reused existing file '{}'", reference.path);
existing
} else {
let new = fetch_repo_file(&get_repo_url(repo, &reference.path), Some(100_000_000))?;
reference.checksums.verify(&new)?;
new
};
let mut buf = Vec::new();
let decompressed = match reference.file_type.compression() {
None => &raw[..],
Some(CompressionType::Gzip) => {
let mut gz = GzDecoder::new(&raw[..]);
gz.read_to_end(&mut buf)?;
&buf[..]
}
Some(CompressionType::Bzip2) => {
let mut bz = bzip2::read::BzDecoder::new(&raw[..]);
bz.read_to_end(&mut buf)?;
&buf[..]
}
Some(CompressionType::Lzma) | Some(CompressionType::Xz) => {
let mut xz = xz2::read::XzDecoder::new(&raw[..]);
xz.read_to_end(&mut buf)?;
&buf[..]
}
};
if let Some(path) = output {
replace_file(path, &raw[..], CreateOptions::default(), true)?;
}
Ok(decompressed.to_owned())
}
pub fn mirror(config: &MirrorConfig) -> Result<(), Error> {
let repo = &config.repository;
let output_dir = PathBuf::from(&config.path);
if !output_dir.exists() {
proxmox_sys::fs::create_dir(&output_dir, CreateOptions::default())?;
}
let release = fetch_release(repo, &config.key[..], Some(&output_dir), true)?;
let _release2 = fetch_release(repo, &config.key[..], Some(&output_dir), false)?;
let mut per_component = HashMap::new();
let mut others = Vec::new();
let binary = repo.types.contains(&APTRepositoryPackageType::Deb);
let source = repo.types.contains(&APTRepositoryPackageType::DebSrc);
for (basename, references) in &release.files {
let reference = references.first();
let reference = if let Some(reference) = reference {
reference.clone()
} else {
continue;
};
let skip_components = !repo.components.contains(&reference.component);
// TODO make arch filtering some proper thing
let skip = skip_components
|| match &reference.file_type {
FileReferenceType::Contents(arch, _)
| FileReferenceType::ContentsUdeb(arch, _)
| FileReferenceType::Packages(arch, _) => {
!binary || !config.architectures.contains(arch)
}
FileReferenceType::Sources(_) => !source,
_ => false,
};
if skip {
println!("Skipping {}", reference.path);
others.push(reference);
} else {
let list = per_component
.entry(reference.component)
.or_insert_with(Vec::new);
list.push(basename);
}
}
println!();
let mut indices_size = 0_usize;
let mut total_count = 0;
for (component, references) in &per_component {
println!("Component '{component}'");
let mut component_indices_size = 0;
for basename in references {
for reference in release.files.get(*basename).unwrap() {
println!("\t{:?}: {:?}", reference.path, reference.file_type);
component_indices_size += reference.size;
}
}
indices_size += component_indices_size;
let component_count = references.len();
total_count += component_count;
println!("Component references count: {component_count}");
println!("Component indices size: {component_indices_size}");
if references.is_empty() {
println!("\tNo references found..");
}
}
println!("Total indices count: {total_count}");
println!("Total indices size: {indices_size}");
if !others.is_empty() {
println!("Skipped {} references", others.len());
}
println!();
let mut packages_size = 0_usize;
for (component, references) in per_component {
println!("Fetching indices for component '{component}'");
let mut component_deb_size = 0;
for basename in &references {
let mut wrote_decompressed = false;
for reference in release.files.get(*basename).unwrap() {
match fetch_referenced_file(repo, Some(&output_dir), reference) {
Ok(data) => {
if !wrote_decompressed {
let mut path = output_dir.clone();
path.push(basename);
replace_file(path, &data[..], CreateOptions::default(), true)?;
wrote_decompressed = true;
}
if matches!(
reference.file_type,
FileReferenceType::Packages(_, Some(CompressionType::Gzip))
) {
let packages: PackagesFile = data[..].try_into()?;
let size: usize = packages.files.iter().map(|p| p.size).sum();
println!("\t{} packages totalling {size}", packages.files.len());
component_deb_size += size;
}
}
Err(err) => {
eprintln!("Failed to fetch {} - {}", reference.path, err);
}
};
}
if !wrote_decompressed {
bail!("Failed to write raw file..");
}
}
println!("Total deb size for component: {component_deb_size}");
packages_size += component_deb_size;
}
println!("Total deb size: {packages_size}");
Ok(())
}

94
src/verifier.rs Normal file
View File

@ -0,0 +1,94 @@
use anyhow::{bail, Error};
use sequoia_openpgp::{
parse::{
stream::{
DetachedVerifierBuilder, MessageLayer, MessageStructure, VerificationHelper,
VerifierBuilder,
},
Parse,
},
policy::StandardPolicy,
Cert, KeyHandle,
};
use std::io;
struct Helper<'a> {
cert: &'a Cert,
}
impl<'a> VerificationHelper for Helper<'a> {
fn get_certs(&mut self, _ids: &[KeyHandle]) -> sequoia_openpgp::Result<Vec<Cert>> {
// Return public keys for signature verification here.
Ok(vec![self.cert.clone()])
}
fn check(&mut self, structure: MessageStructure) -> sequoia_openpgp::Result<()> {
// In this function, we implement our signature verification policy.
let mut good = false;
// we don't want compression and/or encryption
if structure.len() > 1 || structure.is_empty() {
bail!(
"unexpected GPG message structure - expected plain signed data, got {} layers!",
structure.len()
);
}
let layer = &structure[0];
let mut errors = Vec::new();
match layer {
MessageLayer::SignatureGroup { results } => {
// We possibly have multiple signatures, but not all keys, so `or` all the individual results.
for result in results {
match result {
Ok(_) => good = true,
Err(e) => errors.push(e),
}
}
}
_ => return Err(anyhow::anyhow!("Unexpected message structure")),
}
if good {
Ok(()) // Good signature.
} else if errors.len() > 1 {
Err(anyhow::anyhow!("encountered {} errors", errors.len()))
} else {
Err(anyhow::anyhow!("Signature verification failed"))
}
}
}
pub(crate) fn verify_signature<'msg>(
msg: &'msg [u8],
key: &[u8],
detached_sig: Option<&[u8]>,
) -> Result<Vec<u8>, Error> {
let cert = Cert::from_bytes(key)?;
let policy = StandardPolicy::new();
let helper = Helper { cert: &cert };
let verified = if let Some(sig) = detached_sig {
let mut verifier =
DetachedVerifierBuilder::from_bytes(sig)?.with_policy(&policy, None, helper)?;
verifier.verify_bytes(msg)?;
msg.to_vec()
} else {
let mut verified = Vec::new();
let mut verifier = VerifierBuilder::from_bytes(msg)?
.with_policy(&policy, None, helper)
.map_err(|err| {
println!("{:#?}", err);
err
})?;
let bytes = io::copy(&mut verifier, &mut verified)?;
println!("{bytes} bytes verified");
if !verifier.message_processed() {
bail!("Failed to verify message!");
}
verified
};
Ok(verified)
}