mono commit of first work
This commit is contained in:
parent
7e34961d82
commit
ba36e3b7fc
5 changed files with 886 additions and 0 deletions
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -20,3 +20,7 @@ Cargo.lock
|
|||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
# Used for debugging
|
||||
/index/
|
||||
/core/
|
3
.vscode/settings.json
vendored
Normal file
3
.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"rust-analyzer.check.command": "clippy",
|
||||
}
|
23
Cargo.toml
Normal file
23
Cargo.toml
Normal file
|
@ -0,0 +1,23 @@
|
|||
[package]
|
||||
name = "pacwoman"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[[bin]]
|
||||
name = "pacwoman-bin"
|
||||
path = "src/main.rs"
|
||||
|
||||
[lib]
|
||||
name = "pacwoman"
|
||||
path = "src/lib/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.10.1"
|
||||
flate2 = "1.1.0"
|
||||
reqwest = { version = "0.12.12", features = ["blocking"] }
|
||||
sha256 = "1.6.0"
|
||||
tar = "0.4.44"
|
||||
time = { version = "0.3.39", features = ["formatting"] }
|
||||
url = "2.5.4"
|
||||
users = "0.11.0"
|
839
src/lib/lib.rs
Normal file
839
src/lib/lib.rs
Normal file
|
@ -0,0 +1,839 @@
|
|||
//! Pacwoman library.
|
||||
#![warn(
|
||||
missing_docs,
|
||||
clippy::missing_docs_in_private_items,
|
||||
clippy::empty_docs,
|
||||
clippy::missing_panics_doc
|
||||
)]
|
||||
|
||||
use std::{
|
||||
ffi::OsString,
|
||||
io::{Read, Write},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use base64::Engine;
|
||||
use bytes::Buf;
|
||||
|
||||
/// A descriptor of a repository.
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub struct RepoDescriptor {
|
||||
/// The repo(i.e. core, extra, multilib)
|
||||
repo: String,
|
||||
/// The architecture to use.
|
||||
arch: String,
|
||||
/// The hash of the (repo).db file.
|
||||
hash: Option<Vec<u8>>
|
||||
}
|
||||
|
||||
impl RepoDescriptor {
|
||||
/// Creates a new [RepoDescriptor].
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// The repo(i.e. core, extra, multilib)
|
||||
pub fn repo(&self) -> String {
|
||||
self.repo.clone()
|
||||
}
|
||||
/// Sets the repo.
|
||||
pub fn set_repo(&mut self, repo: String) -> &mut Self {
|
||||
self.repo = repo;
|
||||
self
|
||||
}
|
||||
|
||||
/// The architecture to use.
|
||||
pub fn arch(&self) -> String {
|
||||
self.arch.clone()
|
||||
}
|
||||
/// Sets [`arch`].
|
||||
/// [`arch`]: [RepoDescriptor::arch]
|
||||
pub fn set_arch(&mut self, arch: String) -> &mut Self {
|
||||
self.arch = arch;
|
||||
self
|
||||
}
|
||||
|
||||
/// The hash of the (repo).db file.
|
||||
pub fn hash(&self) -> Option<Vec<u8>> {
|
||||
self.hash.clone()
|
||||
}
|
||||
/// Sets [`hash`].
|
||||
/// [`hash`]: [RepoDescriptor::hash]
|
||||
pub fn set_hash(&mut self, hash: Option<Vec<u8>>) -> &mut Self {
|
||||
self.hash = hash;
|
||||
self
|
||||
}
|
||||
|
||||
/// Formats the repo descriptor into a string.
|
||||
pub fn format(&self) -> String {
|
||||
if self.hash().is_some() {
|
||||
format!("{:x?}-{}-{}", self.hash().unwrap(), self.repo(), self.arch())
|
||||
} else {
|
||||
format!("{}-{}", self.repo(), self.arch())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The identifier for a package. Used to locate the package.
|
||||
#[derive(Clone)]
|
||||
pub struct Package {
|
||||
/// The repo(i.e. core, extra, multilib)
|
||||
repo: RepoDescriptor,
|
||||
/// The filename of the package.
|
||||
filename: String,
|
||||
}
|
||||
|
||||
impl Default for Package {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Package {
|
||||
/// Creates a new [Package].
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
repo: RepoDescriptor::new(),
|
||||
filename: String::new(),
|
||||
}
|
||||
}
|
||||
/// The repo(i.e. core, extra, multilib)
|
||||
pub fn repo(&self) -> RepoDescriptor {
|
||||
self.repo.clone()
|
||||
}
|
||||
/// Sets the repo.
|
||||
pub fn set_repo(&mut self, repo: RepoDescriptor) -> &mut Self {
|
||||
self.repo = repo;
|
||||
self
|
||||
}
|
||||
|
||||
/// The filename of the package.
|
||||
pub fn filename(&self) -> String {
|
||||
self.filename.clone()
|
||||
}
|
||||
/// Sets [`filename`].
|
||||
/// [`filename`]: [Package::filename]
|
||||
pub fn set_filename(&mut self, filename: String) -> &mut Self {
|
||||
self.filename = filename;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// A mirror to use.
|
||||
pub struct Mirror(url::Url);
|
||||
|
||||
impl Mirror {
|
||||
/// Creates a new [Mirror].
|
||||
pub fn new(base_url: url::Url) -> Self {
|
||||
Self(base_url)
|
||||
}
|
||||
/// Returns the base URL of the mirror and, similar to pacman, replacements for the repo and arch.
|
||||
/// (i.e. https://geo.mirror.pkgbuild.com/$repo/os/$arch).
|
||||
pub fn base(&self) -> url::Url {
|
||||
self.0.clone()
|
||||
}
|
||||
/// Sets the base URL of the mirror.
|
||||
pub fn set_base(&mut self, url: url::Url) -> &mut Self {
|
||||
self.0 = url;
|
||||
self
|
||||
}
|
||||
/// Substitutes a repo and architecture into the base url.
|
||||
#[allow(clippy::missing_panics_doc)] // never will panic
|
||||
pub fn substitute(&self, repo: &str, arch: &str, path: &str) -> url::Url {
|
||||
url::Url::parse(
|
||||
&(self
|
||||
.base()
|
||||
.as_str()
|
||||
.replace("$repo", repo)
|
||||
.replace("$arch", arch)
|
||||
+ "/"
|
||||
+ path),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
/// Makes an HTTP request to see if the server is reachable. Specifically, it attempts to reach
|
||||
/// the core.db file in the core repository.
|
||||
#[allow(clippy::missing_panics_doc)] // never will panic
|
||||
pub fn is_reachable(&self, arch: &str) -> bool {
|
||||
let url = self.substitute("core", arch, "core.db");
|
||||
if let Ok(res) = reqwest::blocking::get(url) {
|
||||
res.status().is_success()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the current user. It will first try the effective username. If the effective user has been
|
||||
/// deleted, then it will try the current user. If the current user has also been deleted, then
|
||||
/// it will as a last resort create a unique string in the format `deleted_user_e{e}_c{c}`
|
||||
/// where {e} is the effective user uid and {c} is the current user uid.
|
||||
pub fn get_current_user() -> OsString {
|
||||
let username = users::get_effective_username();
|
||||
if let Some(user) = username {
|
||||
user
|
||||
} else {
|
||||
let username = users::get_current_username();
|
||||
if let Some(user) = username {
|
||||
user
|
||||
} else {
|
||||
format!(
|
||||
"deleted_user_e{}_c{}",
|
||||
users::get_effective_uid(),
|
||||
users::get_current_uid()
|
||||
)
|
||||
.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the config directory. If for the entire system, then it will be /pacwoman/config, otherwise
|
||||
/// it will be /pacwomand/user/{user name from [get_current_user]}/config.
|
||||
///
|
||||
/// This directory is not guaranteed to exist! If you need it to, use [config_directory]!
|
||||
pub fn get_config_directory(system: bool) -> PathBuf {
|
||||
if system {
|
||||
PathBuf::from("/pacwoman/config")
|
||||
} else {
|
||||
PathBuf::from(format!(
|
||||
"/pacwoman/user/{}/config",
|
||||
get_current_user().to_string_lossy()
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as [get_config_directory]; however, this function will create the directory if it doesn't
|
||||
/// exist. If [std::fs::create_dir_all] returns an error, it will be propagated.
|
||||
pub fn config_directory(system: bool) -> std::io::Result<PathBuf> {
|
||||
let dir = get_config_directory(system);
|
||||
std::fs::create_dir_all(&dir)?;
|
||||
Ok(dir)
|
||||
}
|
||||
|
||||
/// Get the store directory. If for the entire system, then it will be /pacwoman/store, otherwise
|
||||
/// it will be /pacwomand/user/{user name from [get_current_user]}/store.
|
||||
///
|
||||
/// This directory is not guaranteed to exist! If you need it to, use [store_directory]!
|
||||
pub fn get_store_directory(system: bool) -> PathBuf {
|
||||
if system {
|
||||
PathBuf::from("/home/arthur/pacwoman/store")
|
||||
} else {
|
||||
PathBuf::from(format!(
|
||||
"/home/arthur/pacwoman/user/{}/store",
|
||||
get_current_user().to_string_lossy()
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as [get_store_directory]; however, this function will create the directory if it doesn't
|
||||
/// exist. If [std::fs::create_dir_all] returns an error, it will be propagated.
|
||||
pub fn store_directory(system: bool) -> std::io::Result<PathBuf> {
|
||||
let dir = get_store_directory(system);
|
||||
std::fs::create_dir_all(&dir)?;
|
||||
Ok(dir)
|
||||
}
|
||||
|
||||
/// Gets the store directory for a repo.
|
||||
pub fn get_repo_store_directory(system: bool, repo: RepoDescriptor) -> PathBuf {
|
||||
get_store_directory(system).join(repo.format())
|
||||
}
|
||||
|
||||
/// Same as [get_repo_store_directory] but creates the directory(s) if they don't exist.
|
||||
pub fn repo_store_directory(system: bool, repo: RepoDescriptor) -> std::io::Result<PathBuf> {
|
||||
let dir = get_repo_store_directory(system, repo);
|
||||
std::fs::create_dir_all(&dir)?;
|
||||
Ok(dir)
|
||||
}
|
||||
|
||||
/// Get the index directory. This is always a system-wide directory of /pacwoman/index.
|
||||
/// This directory is subject to change; because of this, use this method instead of
|
||||
/// hard-coding the directory.
|
||||
///
|
||||
/// This directory is not guaranteed to exist! If you need it to, use [index_directory]!
|
||||
pub fn get_index_directory() -> PathBuf {
|
||||
PathBuf::from("/home/arthur/pacwoman/index")
|
||||
}
|
||||
|
||||
/// Same as [get_index_directory]; however, this function will create the directory if it doesn't
|
||||
/// exist. If [std::fs::create_dir_all] returns an error, it will be propagated.
|
||||
pub fn index_directory() -> std::io::Result<PathBuf> {
|
||||
let dir = get_index_directory();
|
||||
std::fs::create_dir_all(&dir)?;
|
||||
Ok(dir)
|
||||
}
|
||||
|
||||
/// Returns the index directory of a repository. This does not include the hash; this should be
|
||||
/// assumed to symlink to the correct, hashed directory.
|
||||
///
|
||||
/// This directory is not guaranteed to exist! See [repo_index_dir] if you need it too.
|
||||
pub fn get_repo_index_dir(repo: RepoDescriptor) -> PathBuf {
|
||||
get_index_directory().join(repo.format())
|
||||
}
|
||||
|
||||
/// Same as [get_repo_index_dir], except it will create the directory if it doesn't exist.
|
||||
/// If [std::fs::create_dir_all] returns an error, it will be propagated.
|
||||
pub fn repo_index_dir(repo: RepoDescriptor) -> std::io::Result<PathBuf> {
|
||||
let dir = get_repo_index_dir(repo);
|
||||
std::fs::create_dir_all(&dir)?;
|
||||
Ok(dir)
|
||||
}
|
||||
|
||||
/// Populates the index with information for a certain repo from a certain mirror. Will
|
||||
/// overwrite any symlink for the repo.
|
||||
///
|
||||
/// # Panics
|
||||
/// Should never panic as .unwrap() is only used on things that should always be
|
||||
/// Some/Ok.
|
||||
pub fn populate_index(mirror: Mirror, repo: RepoDescriptor) -> std::io::Result<PathBuf> {
|
||||
if !mirror.is_reachable(&repo.arch) {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::HostUnreachable,
|
||||
"mirror is unreachable",
|
||||
));
|
||||
}
|
||||
let url = mirror.substitute(&repo.repo(), &repo.arch(), &format!("{}.db", repo.repo()));
|
||||
|
||||
if let Ok(res) = reqwest::blocking::get(url) {
|
||||
let bytes = res.bytes().unwrap().to_vec();
|
||||
let bytes_reader_bytes = bytes.clone();
|
||||
let bytes_reader = bytes_reader_bytes.reader();
|
||||
let tar = flate2::read::GzDecoder::new(bytes_reader);
|
||||
let mut archive = tar::Archive::new(tar);
|
||||
|
||||
let digest = sha256::digest(bytes);
|
||||
|
||||
let index_dir =
|
||||
index_directory()?.join(format!("{}-{}-{}", digest, repo.repo(), repo.arch()));
|
||||
|
||||
if std::fs::exists(&index_dir)? {
|
||||
let _ = std::fs::remove_dir_all(get_repo_index_dir(repo.clone()));
|
||||
|
||||
std::os::unix::fs::symlink(index_dir, get_repo_index_dir(repo.clone()))?;
|
||||
|
||||
return repo_index_dir(repo);
|
||||
}
|
||||
|
||||
std::fs::create_dir_all(&index_dir)?;
|
||||
|
||||
archive.unpack(&index_dir)?;
|
||||
|
||||
let mut package_list: Vec<OsString> = vec![];
|
||||
|
||||
for item in std::fs::read_dir(&index_dir)? {
|
||||
if item.is_err() {
|
||||
continue;
|
||||
}
|
||||
let item = item.unwrap();
|
||||
if item.file_type().is_err() {
|
||||
continue;
|
||||
}
|
||||
if item.file_type().unwrap().is_dir() {
|
||||
package_list.push(item.file_name());
|
||||
}
|
||||
}
|
||||
|
||||
let mut packages = std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.truncate(false)
|
||||
.open(index_directory()?.join("PACKAGES"))?;
|
||||
|
||||
packages.write_fmt(format_args!(
|
||||
"{}-{}-{}: {}",
|
||||
digest,
|
||||
repo.repo(),
|
||||
repo.arch(),
|
||||
package_list.join(&OsString::from(" ")).to_string_lossy()
|
||||
))?;
|
||||
drop(packages);
|
||||
|
||||
std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.open(index_dir.join("SOURCE"))?
|
||||
.write_fmt(format_args!(
|
||||
"{}\n{}\n{}",
|
||||
mirror.substitute(&repo.repo(), &repo.arch(), ""),
|
||||
time::OffsetDateTime::now_utc()
|
||||
.format(&time::format_description::well_known::Iso8601::DATE_TIME_OFFSET)
|
||||
.unwrap(),
|
||||
digest
|
||||
))?;
|
||||
|
||||
if std::fs::exists(index_directory()?.join("REPOS"))? {
|
||||
let mut repos = std::fs::OpenOptions::new()
|
||||
.read(true)
|
||||
.open(index_directory()?.join("REPOS"))?;
|
||||
|
||||
let mut repos_data = String::new();
|
||||
let mut repos_data_out = String::new();
|
||||
|
||||
repos.read_to_string(&mut repos_data)?;
|
||||
|
||||
drop(repos);
|
||||
|
||||
let mut need_append = true;
|
||||
for line in repos_data.split("\n") {
|
||||
if line.starts_with(&format!("{}-{}: ", repo.repo(), repo.arch())) {
|
||||
need_append = false;
|
||||
repos_data_out += &format!("{} {}\n", line, digest);
|
||||
} else {
|
||||
repos_data_out += &(line.to_owned() + "\n");
|
||||
}
|
||||
}
|
||||
if need_append {
|
||||
repos_data_out += &format!("{}-{}: {}\n", repo.repo(), repo.arch(), digest);
|
||||
}
|
||||
|
||||
let mut repos = std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.open(index_directory()?.join("REPOS"))?;
|
||||
repos.write_all(repos_data_out.as_bytes())?;
|
||||
drop(repos);
|
||||
} else {
|
||||
std::fs::write(
|
||||
index_directory()?.join("REPOS"),
|
||||
format!("{}-{}: {}\n", repo.repo(), repo.arch(), digest),
|
||||
)?;
|
||||
}
|
||||
|
||||
let _ = std::fs::remove_dir_all(get_repo_index_dir(repo.clone()));
|
||||
|
||||
std::os::unix::fs::symlink(index_dir, get_repo_index_dir(repo.clone()))?;
|
||||
|
||||
Ok(repo_index_dir(repo)?)
|
||||
} else {
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::HostUnreachable,
|
||||
"mirror does not have repo",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a list of hashes provided by the repo.
|
||||
#[allow(clippy::missing_panics_doc)] // Shouldn't ever panic, but could be wrong.
|
||||
pub fn read_repos(repo: RepoDescriptor) -> std::io::Result<Vec<PathBuf>> {
|
||||
let mut repos = std::fs::OpenOptions::new()
|
||||
.read(true)
|
||||
.open(index_directory()?.join("REPOS"))?;
|
||||
|
||||
let mut repos_data = String::new();
|
||||
|
||||
repos.read_to_string(&mut repos_data)?;
|
||||
|
||||
drop(repos);
|
||||
|
||||
let repos = repos_data;
|
||||
for line in repos.split("\n") {
|
||||
if line.starts_with(&format!("{}-{}: ", repo.repo(), repo.arch())) {
|
||||
let mut strings: Vec<String> = vec![];
|
||||
for hash in line.split_once(": ").unwrap().1.split(" ") {
|
||||
strings.push(hash.to_owned() + &repo.format());
|
||||
}
|
||||
let mut out: Vec<PathBuf> = vec![];
|
||||
for ele in strings {
|
||||
out.push(index_directory()?.join(PathBuf::from(ele)));
|
||||
}
|
||||
return Ok(out);
|
||||
}
|
||||
}
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"repo not found in REPOS file",
|
||||
))
|
||||
}
|
||||
|
||||
/// Get a list of (Package name and version, repo, path to index directory with the package)
|
||||
/// for a certain package name. This function uses [read_desc] internally after performing
|
||||
/// preliminary checks to make sure that only packages that have the exact name of
|
||||
/// package_name will be returned. If this was not used, then this would create false
|
||||
/// positives of any package that is in the format `(package_name)-.*` in a psuedo-regex.
|
||||
#[allow(clippy::missing_panics_doc)] // will never panic
|
||||
pub fn locate_package(
|
||||
package_name: String,
|
||||
) -> std::io::Result<Vec<(String, RepoDescriptor, PathBuf)>> {
|
||||
let mut packages = std::fs::OpenOptions::new()
|
||||
.read(true)
|
||||
.open(index_directory()?.join("PACKAGES"))?;
|
||||
|
||||
let mut packages_data = String::new();
|
||||
|
||||
packages.read_to_string(&mut packages_data)?;
|
||||
|
||||
drop(packages);
|
||||
|
||||
let packages = packages_data.split("\n");
|
||||
|
||||
let mut out: Vec<(String, RepoDescriptor, PathBuf)> = vec![];
|
||||
|
||||
for line in packages {
|
||||
let segments = line.split_once(": ").unwrap();
|
||||
let prefix = segments.0;
|
||||
let suffix = segments.1;
|
||||
if suffix.contains(&(" ".to_string() + &package_name + "-")) {
|
||||
let arch = prefix.split("-").last().unwrap().to_string();
|
||||
let repo = prefix
|
||||
.strip_suffix(&("-".to_string() + &arch))
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
let descriptor = RepoDescriptor::new()
|
||||
.set_arch(arch.clone())
|
||||
.set_repo(repo.clone())
|
||||
.clone();
|
||||
|
||||
let mut package = String::new();
|
||||
let start = suffix
|
||||
.find(&(" ".to_string() + &package_name + "-"))
|
||||
.unwrap()
|
||||
+ 1; // add one because otherwise that would be the space
|
||||
|
||||
for char in suffix[start..].chars() {
|
||||
if char == ' ' {
|
||||
break;
|
||||
}
|
||||
package += &char.to_string();
|
||||
}
|
||||
|
||||
if read_desc(package.clone(), descriptor.clone())?.name != package_name {
|
||||
continue;
|
||||
}
|
||||
|
||||
out.push((
|
||||
package.clone(),
|
||||
descriptor,
|
||||
index_directory()?
|
||||
.join(format!("{}-{}", repo, arch))
|
||||
.join(package),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if !out.is_empty() {
|
||||
return Ok(out);
|
||||
}
|
||||
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"cannot find package",
|
||||
))
|
||||
}
|
||||
|
||||
/// Information provided by a packages desc file.
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub struct PackageDesc {
|
||||
/// The filename of the package in the repo.
|
||||
pub filename: String,
|
||||
/// The name of the package.
|
||||
pub name: String,
|
||||
/// Base name for split packages. Unknown how common?
|
||||
pub base: Option<String>,
|
||||
/// The version of the package. Generally semver, unknown if must be?
|
||||
pub version: String,
|
||||
/// Description of the package.
|
||||
pub description: Option<String>,
|
||||
/// Download size.
|
||||
pub csize: Option<usize>,
|
||||
/// Installed size.
|
||||
pub isize: Option<usize>,
|
||||
/// MD5 checksum of the package. Recommended not to use and to instead use [`sha256sum`].
|
||||
/// Stored as hex in the desc file.
|
||||
/// [`sha256sum`]: [PackageDesc::sha256sum]
|
||||
pub md5sum: Vec<u8>,
|
||||
/// SHA256 checksum of the package.
|
||||
/// Stored as hex in the desc file.
|
||||
pub sha256sum: Vec<u8>,
|
||||
/// PGP signature of the package.
|
||||
/// Stored as base64 in the desc file.
|
||||
pub pgpsig: Vec<u8>,
|
||||
/// URL to a website for the package.
|
||||
pub url: Option<String>,
|
||||
/// An array of licenses for the package.
|
||||
pub license: Vec<String>,
|
||||
/// The architecture of the package. Not always the same as the repos; if not, generally
|
||||
/// something like `any`.
|
||||
pub arch: Option<String>,
|
||||
/// The build date of the package as a unix timestamp.
|
||||
pub builddate: Option<u64>,
|
||||
/// The packager of the package. Generally in the format `(name) <(email)>` (may be enforced?
|
||||
/// unclear).
|
||||
pub packager: Option<String>,
|
||||
/// Any packages the package depends on.
|
||||
pub depends: Vec<String>,
|
||||
/// Packages required for the package's testing suite. Unsure if tests can be performed even?
|
||||
pub checkdepends: Vec<String>,
|
||||
/// Packages optionally required. The first field is the package name, and the second is
|
||||
/// the human-readable reason.
|
||||
pub optdepends: Vec<(String, String)>,
|
||||
/// Packages required to build the package. May be useless.
|
||||
pub makedepends: Vec<String>,
|
||||
}
|
||||
|
||||
/// Reads the desc file of the package into a [PackageDesc]. Will return an error if required
|
||||
/// fields don't exist. If they have an empty value, it will work. An error will also be
|
||||
/// returned if any calls such as opening a file fail.
|
||||
#[allow(clippy::missing_panics_doc)] // as far as i know shouldn't ever panic
|
||||
pub fn read_desc(package: String, repo: RepoDescriptor) -> std::io::Result<PackageDesc> {
|
||||
let path = index_directory()?
|
||||
.join(repo.format())
|
||||
.join(package)
|
||||
.join("desc");
|
||||
|
||||
let mut desc = std::fs::OpenOptions::new().read(true).open(path)?;
|
||||
|
||||
let mut desc_data = String::new();
|
||||
|
||||
desc.read_to_string(&mut desc_data)?;
|
||||
|
||||
drop(desc);
|
||||
|
||||
let desc_data = desc_data.split("\n").collect::<Vec<&str>>();
|
||||
|
||||
let mut out = PackageDesc::default();
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%FILENAME%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
out.filename = desc_data[loc].to_string();
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%NAME%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
out.name = desc_data[loc].to_string();
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%VERSION%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
out.version = desc_data[loc].to_string();
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%BASE%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
out.base = Some(desc_data[loc].to_string());
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%DESC%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
out.description = Some(desc_data[loc].to_string());
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%CSIZE%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
let val = desc_data[loc].parse();
|
||||
if val.is_err() {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"csize is invalid value",
|
||||
));
|
||||
}
|
||||
out.csize = Some(val.unwrap());
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%ISIZE%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
let val = desc_data[loc].parse();
|
||||
if val.is_err() {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"isize is invalid value",
|
||||
));
|
||||
}
|
||||
out.isize = Some(val.unwrap());
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%MD5SUM%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
out.md5sum = desc_data[loc].as_bytes().to_vec();
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%SHA256SUM%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
out.sha256sum = desc_data[loc].as_bytes().to_vec();
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%PGPSIG%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
let val =
|
||||
base64::engine::general_purpose::STANDARD.decode(desc_data[loc].as_bytes());
|
||||
if val.is_err() {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"pgpsig is invalid value",
|
||||
));
|
||||
}
|
||||
out.pgpsig = val.unwrap();
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%URL%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
out.url = Some(desc_data[loc].to_string());
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%LICENSE%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
let mut val: Vec<String> = vec![];
|
||||
|
||||
for line in &desc_data[loc..] {
|
||||
let line = *line;
|
||||
if line.is_empty() {
|
||||
break;
|
||||
}
|
||||
val.push(line.to_string());
|
||||
}
|
||||
|
||||
out.license = val;
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%ARCH%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
out.arch = Some(desc_data[loc].to_string());
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%BUILDDATE%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
let val = desc_data[loc].parse();
|
||||
if val.is_err() {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"builddate is invalid value",
|
||||
));
|
||||
}
|
||||
out.builddate = Some(val.unwrap());
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%PACKAGER%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
out.packager = Some(desc_data[loc].to_string());
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%DEPENDS%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
let mut val: Vec<String> = vec![];
|
||||
|
||||
for line in &desc_data[loc..] {
|
||||
let line = *line;
|
||||
if line.is_empty() {
|
||||
break;
|
||||
}
|
||||
val.push(line.to_string());
|
||||
}
|
||||
|
||||
out.depends = val;
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data
|
||||
.iter()
|
||||
.position(|item| (*item) == "%CHECKDEPENDS%")
|
||||
{
|
||||
let loc = loc + 1; // value is on next line
|
||||
let mut val: Vec<String> = vec![];
|
||||
|
||||
for line in &desc_data[loc..] {
|
||||
let line = *line;
|
||||
if line.is_empty() {
|
||||
break;
|
||||
}
|
||||
val.push(line.to_string());
|
||||
}
|
||||
|
||||
out.checkdepends = val;
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%MAKEDEPENDS%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
let mut val: Vec<String> = vec![];
|
||||
|
||||
for line in &desc_data[loc..] {
|
||||
let line = *line;
|
||||
if line.is_empty() {
|
||||
break;
|
||||
}
|
||||
val.push(line.to_string());
|
||||
}
|
||||
|
||||
out.makedepends = val;
|
||||
}
|
||||
|
||||
if let Some(loc) = desc_data.iter().position(|item| (*item) == "%OPTDEPENDS%") {
|
||||
let loc = loc + 1; // value is on next line
|
||||
let mut val: Vec<(String, String)> = vec![];
|
||||
|
||||
for line in &desc_data[loc..] {
|
||||
let line = line.to_string();
|
||||
if line.is_empty() {
|
||||
break;
|
||||
}
|
||||
let l = line.split_once(": ").unwrap();
|
||||
let l = (l.0.to_string(), l.1.to_string());
|
||||
val.push(l);
|
||||
}
|
||||
|
||||
out.optdepends = val;
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
} else {
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"cannot find version in desc file of package",
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"cannot find name in desc file of package",
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"cannot find filename in desc file of package",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn receive_package(
|
||||
mirror: Mirror,
|
||||
repo: RepoDescriptor,
|
||||
package: String,
|
||||
system: bool,
|
||||
) -> std::io::Result<PathBuf> {
|
||||
if repo.hash().is_none() {
|
||||
return Err(std::io::Error::new(std::io::ErrorKind::InvalidInput, "need hash in repo descriptor"));
|
||||
}
|
||||
|
||||
let path = index_directory()?
|
||||
.join(repo.format())
|
||||
.join(package)
|
||||
.join("desc");
|
||||
|
||||
let mut desc = std::fs::OpenOptions::new().read(true).open(path)?;
|
||||
|
||||
let mut desc_data = String::new();
|
||||
|
||||
desc.read_to_string(&mut desc_data)?;
|
||||
|
||||
drop(desc);
|
||||
|
||||
let desc_hash = sha256::digest(desc_data);
|
||||
|
||||
let desc = read_desc(package, repo)?;
|
||||
|
||||
let url = mirror.substitute(&repo.repo(), &repo.arch(), &desc.filename);
|
||||
|
||||
let dir = repo_store_directory(system, repo)?.join(format!("{}-{}", desc_hash, package));
|
||||
|
||||
}
|
||||
|
||||
/// Creates all of the necessary directories, for the system if root or the pacwoman user, and
|
||||
/// for the current user if not. If [std::fs::create_dir_all] returns an error, it will be
|
||||
/// propagated.
|
||||
pub fn create_directories() -> Result<(), std::io::Error> {
|
||||
if users::get_effective_uid() == 0 || get_current_user() == "pacwoman" {
|
||||
store_directory(false)?;
|
||||
config_directory(false)?;
|
||||
index_directory()?;
|
||||
} else {
|
||||
store_directory(true)?;
|
||||
config_directory(true)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
17
src/main.rs
Normal file
17
src/main.rs
Normal file
|
@ -0,0 +1,17 @@
|
|||
//! Main binary entrypoint.
|
||||
#![warn(
|
||||
missing_docs,
|
||||
clippy::missing_docs_in_private_items,
|
||||
clippy::empty_docs
|
||||
)]
|
||||
|
||||
fn main() {
|
||||
pacwoman::populate_index(
|
||||
pacwoman::Mirror::new(url::Url::parse(
|
||||
"https://geo.mirror.pkgbuild.com/$repo/os/$arch",
|
||||
).unwrap()),
|
||||
pacwoman::RepoDescriptor::new()
|
||||
.set_repo("core".to_string())
|
||||
.set_arch("x86_64".to_string()).clone(),
|
||||
).unwrap();
|
||||
}
|
Loading…
Add table
Reference in a new issue