Compare commits

..

11 commits
0.4.2 ... main

16 changed files with 1664 additions and 214 deletions

View file

@ -7,6 +7,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased](https://git.rustybever.be/Chewing_Bever/alex/src/branch/dev)
### Added
* Debian packages are now available in the [package registry](https://git.rustybever.be/Chewing_Bever/alex/packages)
## [0.5.0](https://git.rustybever.be/Chewing_Bever/alex/src/tag/0.5.0)
### Added
* CLI commands to interact with the PaperMC API
* list and view available Minecraft versions and builds per version
* Download jars to simplify manual server updating
## [0.4.2](https://git.rustybever.be/Chewing_Bever/alex/src/tag/0.4.2)
### Fixed

940
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -2,13 +2,15 @@
resolver = "2"
members = [
'backup',
'alex'
'alex',
'papermc-api'
]
[workspace.package]
version = "0.4.2"
version = "0.5.0"
authors = ["Jef Roosens"]
edition = "2021"
license-file = "LICENSE"
[workspace.dependencies]
chrono = { version = "0.4.26", features = ["serde"] }

View file

@ -1,20 +1,25 @@
# Build the local development build
[group('build')]
build:
cargo build --frozen --workspace
alias b := build
# Build release binaries for the supported architectures
[group('build')]
build-release target:
build-release:
cargo build \
--release \
--frozen \
--workspace \
--target '{{ target }}'
--target x86_64-unknown-linux-musl \
--target aarch64-unknown-linux-musl
# Run all tests in the workspace
test:
cargo test --frozen --workspace
alias t := test
# Run cargofmt and clippy
check:
cargo fmt --check --all
cargo clippy \
@ -23,6 +28,7 @@ check:
--no-deps \
--deny 'clippy::all'
alias c := check
alias lint := check
fetch:
cargo fetch --locked
@ -43,18 +49,33 @@ run:
--java '/usr/lib/jvm/java-21-openjdk/bin/java' \
--layers '2min,2,4,4;3min,3,2,2'
publish-release-binaries tag: (build-release 'x86_64-unknown-linux-musl') (build-release 'aarch64-unknown-linux-musl')
# Package the static release binaries as a Debian package
[group('package')]
package-deb: build-release
cargo deb \
--package alex \
--frozen \
--no-build \
--target x86_64-unknown-linux-musl \
--target aarch64-unknown-linux-musl
# Publish the binaries and packages for a new release
[group('package')]
publish-release tag: build-release package-deb
# Check the binaries are proper static binaries
[ "$(readelf -d target/x86_64-unknown-linux-musl/release/alex | grep NEEDED | wc -l)" = 0 ]
[ "$(readelf -d target/aarch64-unknown-linux-musl/release/alex | grep NEEDED | wc -l)" = 0 ]
curl \
--netrc \
--fail \
--upload-file target/x86_64-unknown-linux-musl/release/alex \
https://git.rustybever.be/api/packages/Chewing_Bever/generic/alex/"{{ tag }}"/alex-linux-amd64
curl \
--netrc \
--fail \
--upload-file target/aarch64-unknown-linux-musl/release/alex \
https://git.rustybever.be/api/packages/Chewing_Bever/generic/alex/"{{ tag }}"/alex-linux-arm64
--parallel --fail-early \
--netrc --upload-file target/x86_64-unknown-linux-musl/release/alex \
https://git.rustybever.be/api/packages/Chewing_Bever/generic/alex/"{{ tag }}"/alex-linux-amd64 \
--next \
--netrc --upload-file target/aarch64-unknown-linux-musl/release/alex \
https://git.rustybever.be/api/packages/Chewing_Bever/generic/alex/"{{ tag }}"/alex-linux-arm64 \
--next \
--netrc --upload-file target/debian/alex_{{ tag }}-1_amd64.deb \
https://git.rustybever.be/api/packages/Chewing_Bever/debian/pool/any/main/upload \
--next \
--netrc --upload-file target/debian/alex_{{ tag }}-1_arm64.deb \
https://git.rustybever.be/api/packages/Chewing_Bever/debian/pool/any/main/upload > /dev/null

View file

@ -3,9 +3,12 @@ name = "alex"
description = "Wrapper around Minecraft server processes, designed to complement Docker image installations."
version.workspace = true
edition.workspace = true
authors.workspace = true
license-file.workspace = true
[dependencies]
backup = { path = "../backup" }
papermc-api = { path = "../papermc-api" }
chrono.workspace = true
serde.workspace = true
@ -13,3 +16,4 @@ serde.workspace = true
clap = { version = "4.5.37", features = ["derive", "env"] }
signal-hook = "0.3.15"
figment = { version = "0.10.10", features = ["env", "toml"] }
ureq = "3.3.0"

View file

@ -1,5 +1,6 @@
mod backup;
mod config;
mod papermc;
mod run;
use std::{path::PathBuf, str::FromStr};
@ -72,6 +73,9 @@ pub enum Commands {
Run(RunCli),
/// Interact with the backup system without starting a server
Backup(BackupArgs),
/// Interact with the PaperMC API and download new JARs
#[command(name = "papermc")]
PaperMC(papermc::Cli),
}
impl Cli {
@ -81,6 +85,7 @@ impl Cli {
match &self.command {
Commands::Run(args) => args.run(self, &config),
Commands::Backup(args) => Ok(args.run(&config)?),
Commands::PaperMC(cli) => cli.run(),
}
}

213
alex/src/cli/papermc.rs Normal file
View file

@ -0,0 +1,213 @@
use std::path::{Path, PathBuf};
use chrono::Local;
use clap::{Args, Subcommand};
#[derive(Args)]
pub struct Cli {
#[command(subcommand)]
pub command: Commands,
}
#[derive(Subcommand)]
pub enum Commands {
/// Show information or a specific version or build
Show(ShowArgs),
/// List the available versions, or builds for a specific version
List(ListArgs),
/// Download the jar for a specific build
Download(DownloadBuildArgs),
}
#[derive(Args)]
pub struct ShowArgs {
/// Version to show information for
version: String,
/// Build within version to show information for
build: Option<String>,
}
#[derive(Args)]
pub struct ListArgs {
/// If provided, list the available builds for this version
version: Option<String>,
}
#[derive(Args)]
pub struct DownloadBuildArgs {
/// Version of build to download
version: String,
/// Build number for build to download
build: String,
/// Path to store the new JAR file in; stores JAR in the local directory if not specified
#[arg(short, long, value_name = "OUT_PATH")]
out: Option<PathBuf>,
}
impl Cli {
pub fn run(&self) -> crate::Result<()> {
match &self.command {
Commands::Show(ShowArgs {
version,
build: None,
}) => show_version(&version),
Commands::List(ListArgs { version: None }) => list_versions(),
Commands::List(ListArgs {
version: Some(version),
}) => list_builds(version),
Commands::Show(ShowArgs {
version,
build: Some(build),
}) => show_build(&version, &build),
Commands::Download(args) => {
download_build(&args.version, &args.build, args.out.as_deref())
}
}
Ok(())
}
}
fn show_version(version_str: &str) {
let client = papermc_api::Client::new();
let version = match client.project("paper").version(version_str).info() {
Ok(version) => version,
Err(err) => {
println!("failed to query API: {err}");
return;
}
};
println!("id : {}", version.id);
println!("status : {}", version.support_status);
println!("builds : {}", version.builds.len());
println!("Min. Java : {}", version.java.minimum_version);
println!("Java flags: {}", version.java.recommended_flags.join(" "))
}
fn show_build(version_str: &str, build_str: &str) {
let client = papermc_api::Client::new();
let build = match client
.project("paper")
.version(version_str)
.build(build_str)
{
Ok(build) => build,
Err(err) => {
println!("failed to query API: {err}");
return;
}
};
println!("id : {}", build.id);
println!("time : {}", build.time.with_timezone(&Local));
println!("channel: {}", build.channel);
println!("commits:");
for commit in build.commits {
println!("- SHA : {}", commit.sha);
println!(" time : {}", commit.time.with_timezone(&Local));
println!(" message: {}", commit.message);
}
println!("downloads:");
for (name, download) in build.downloads.iter() {
println!(" {name}:");
println!(" name: {}", download.name);
println!(" size: {}", download.size);
println!(" URL : {}", download.url);
println!(" checksums:");
for (name, value) in download.checksums.iter() {
println!(" - {}: {}", name, value);
}
}
}
fn list_versions() {
let client = papermc_api::Client::new();
match client.project("paper").versions() {
Ok(versions) => {
for version in versions {
println!("{} ({})", version.id, version.support_status);
}
}
Err(err) => {
println!("failed to query API: {err}");
}
}
}
fn list_builds(version: &str) {
let client = papermc_api::Client::new();
match client.project("paper").version(version).builds() {
Ok(builds) => {
for build in builds {
println!("- id : {}", build.id);
println!(" time : {}", build.time.with_timezone(&Local));
println!(" channel: {}", build.channel);
}
}
Err(err) => {
println!("failed to query API: {err}");
}
}
}
fn download_build(version: &str, build: &str, out_path: Option<&Path>) {
let client = papermc_api::Client::new();
let build = match client.project("paper").version(version).build(build) {
Ok(build) => build,
Err(err) => {
println!("failed to query API: {err}");
return;
}
};
let filename = format!("paper-{}-{}.jar", version, build.id);
let dest_path = match out_path {
Some(path) if path.is_dir() => path.join(filename),
Some(path) => path.to_path_buf(),
None => PathBuf::from(filename),
};
let download_url = match build
.downloads
.get("server:default")
.or(build.downloads.values().next())
{
Some(download) => &download.url,
None => {
println!("no download URLs found for build.");
return;
}
};
let mut f = match std::fs::File::create(dest_path) {
Ok(f) => f,
Err(err) => {
println!("failed to create destination file: {err}");
return;
}
};
let mut res = match ureq::get(download_url).call() {
Ok(res) => res,
Err(err) => {
println!("failed to download file: {err}");
return;
}
};
let mut reader = res.body_mut().as_reader();
if let Err(err) = std::io::copy(&mut reader, &mut f) {
println!("failed to download file: {err}");
}
}

View file

@ -6,6 +6,7 @@ pub type Result<T> = std::result::Result<T, Error>;
pub enum Error {
IO(io::Error),
Figment(figment::Error),
Other(Box<dyn std::error::Error>),
}
impl fmt::Display for Error {
@ -13,6 +14,7 @@ impl fmt::Display for Error {
match self {
Error::IO(err) => write!(fmt, "{}", err),
Error::Figment(err) => write!(fmt, "{}", err),
Error::Other(err) => write!(fmt, "{}", err),
}
}
}

View file

@ -1,11 +1,11 @@
use std::{borrow::Borrow, fmt};
use std::{borrow::Borrow, fmt, path::PathBuf};
use serde::{Deserialize, Serialize};
use super::State;
/// Represents the changes relative to the previous backup
#[derive(Debug, Serialize, Deserialize, Clone, Default)]
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq)]
pub struct Delta {
/// What files were added/modified in each part of the tarball.
pub added: State,
@ -19,7 +19,6 @@ pub struct Delta {
impl Delta {
/// Returns whether the delta is empty by checking whether both its added and removed state
/// return true for their `is_empty`.
#[allow(dead_code)]
pub fn is_empty(&self) -> bool {
self.added.is_empty() && self.removed.is_empty()
}
@ -111,20 +110,19 @@ impl Delta {
out
}
/// Given a chain of deltas, ordered from last to first, calculate the "contribution" for each
/// state.
/// Given a chain of deltas, calculate the "contribution" for each state.
///
/// The contribution of a delta in a given chain is defined as the parts of the state produced
/// by this chain that are actually provided by this delta. This comes down to calculating the
/// strict difference of this delta and all of its successive deltas.
/// For each delta, its contribution is the part of its added and removed files that isn't
/// overwritten by any of its following deltas.
pub fn contributions<I>(deltas: I) -> Vec<State>
where
I: IntoIterator,
I::IntoIter: DoubleEndedIterator,
I::Item: Borrow<Delta>,
{
let mut contributions: Vec<State> = Vec::new();
let mut deltas = deltas.into_iter();
let mut deltas = deltas.into_iter().rev();
if let Some(first_delta) = deltas.next() {
// From last to first, we calculate the strict difference of the delta with the union of all its
@ -139,10 +137,47 @@ impl Delta {
}
}
// contributions.reverse();
contributions.reverse();
contributions
}
/// Append the given files to the directory's list of added files
pub fn append_added<I>(&mut self, dir: impl Into<PathBuf>, files: I)
where
I: IntoIterator,
I::Item: Into<PathBuf>,
{
self.added.append_dir(dir, files);
}
/// Wrapper around the `append_added` method for a builder-style construction of delta's
pub fn with_added<I>(mut self, dir: impl Into<PathBuf>, files: I) -> Self
where
I: IntoIterator,
I::Item: Into<PathBuf>,
{
self.append_added(dir, files);
self
}
/// Append the given files to the directory's list of removed files
pub fn append_removed<I>(&mut self, dir: impl Into<PathBuf>, files: I)
where
I: IntoIterator,
I::Item: Into<PathBuf>,
{
self.removed.append_dir(dir, files);
}
/// Wrapper around the `append_removed` method for a builder-style construction of delta's
pub fn with_removed<I>(mut self, dir: impl Into<PathBuf>, files: I) -> Self
where
I: IntoIterator,
I::Item: Into<PathBuf>,
{
self.append_removed(dir, files);
self
}
}
impl fmt::Display for Delta {
@ -153,3 +188,108 @@ impl fmt::Display for Delta {
write!(f, "+{}-{}", added_count, removed_count)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_union_disjunct_dirs() {
let a = Delta::default()
.with_added("dir_added_1", ["file1", "file2"])
.with_removed("dir_removed_1", ["file1", "file2"]);
let b = Delta::default()
.with_added("dir_added_3", ["file1", "file2"])
.with_removed("dir_removed_3", ["file1", "file2"]);
let expected = Delta::default()
.with_added("dir_added_1", ["file1", "file2"])
.with_added("dir_added_3", ["file1", "file2"])
.with_removed("dir_removed_1", ["file1", "file2"])
.with_removed("dir_removed_3", ["file1", "file2"]);
assert_eq!(expected, a.union(&b));
assert_eq!(expected, b.union(&a));
}
#[test]
fn test_union_disjunct_files() {
let a = Delta::default()
.with_added("dir_added_1", ["file1", "file2"])
.with_removed("dir_removed_1", ["file1", "file2"]);
let b = Delta::default()
.with_added("dir_added_1", ["file3", "file4"])
.with_removed("dir_removed_1", ["file3", "file4"]);
let expected = Delta::default()
.with_added("dir_added_1", ["file1", "file2", "file3", "file4"])
.with_removed("dir_removed_1", ["file1", "file2", "file3", "file4"]);
assert_eq!(expected, a.union(&b));
assert_eq!(expected, b.union(&a));
}
#[test]
fn test_union_full_revert() {
let a = Delta::default().with_added("dir_1", ["file1", "file2"]);
let b = Delta::default().with_removed("dir_1", ["file1", "file2"]);
let expected = Delta::default().with_removed("dir_1", ["file1", "file2"]);
assert_eq!(expected, a.union(&b));
let expected = Delta::default().with_added("dir_1", ["file1", "file2"]);
assert_eq!(expected, b.union(&a));
}
#[test]
fn test_difference() {
let a = Delta::default()
.with_added("dir1", ["file1", "file2"])
.with_removed("dir1", ["file3", "file4"]);
let b = Delta::default()
.with_added("dir1", ["file1"])
.with_removed("dir1", ["file3"]);
let expected = Delta::default()
.with_added("dir1", ["file2"])
.with_removed("dir1", ["file4"]);
assert_eq!(a.difference(&b), expected);
assert_eq!(b.difference(&a), Delta::default());
}
#[test]
fn test_strict_difference() {
let a = Delta::default()
.with_added("dir1", ["file1", "file2"])
.with_removed("dir1", ["file3", "file4"]);
let b = Delta::default()
.with_added("dir1", ["file1", "file4"])
.with_removed("dir1", ["file3"]);
let expected = Delta::default().with_added("dir1", ["file2"]);
assert_eq!(a.strict_difference(&b), expected);
assert_eq!(b.strict_difference(&a), Delta::default());
}
#[test]
fn test_contributions() {
let deltas = [
Delta::default().with_added("dir1", ["file4"]),
Delta::default().with_added("dir1", ["file1", "file2"]),
Delta::default()
.with_added("dir1", ["file1"])
.with_added("dir2", ["file3"]),
Delta::default()
.with_added("dir1", ["file2"])
.with_removed("dir2", ["file3"]),
];
let expected = [
State::default().with_dir("dir1", ["file4"]),
State::default(),
State::default().with_dir("dir1", ["file1"]),
State::default().with_dir("dir1", ["file2"]),
];
assert_eq!(Delta::contributions(deltas), expected);
}
}

View file

@ -229,9 +229,8 @@ where
.map(|_| ()),
// Incremental backups are exported one by one according to their contribution
BackupType::Incremental => {
let contributions = Delta::contributions(
chain.iter().take(index + 1).map(|b| &b.delta).rev(),
);
let contributions =
Delta::contributions(chain.iter().take(index + 1).map(|b| &b.delta));
let tar_gz = OpenOptions::new()
.write(true)
@ -245,7 +244,6 @@ where
// overwritten by their successors anyways.
for (contribution, backup) in contributions
.iter()
.rev()
.zip(chain.iter().take(index + 1))
.filter(|(contribution, _)| !contribution.is_empty())
{

View file

@ -11,7 +11,7 @@ use crate::Delta;
/// Struct that represents a current state for a backup. This struct acts as a smart pointer around
/// a HashMap.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct State(HashMap<PathBuf, HashSet<PathBuf>>);
impl State {
@ -49,8 +49,52 @@ impl State {
pub fn is_empty(&self) -> bool {
self.0.values().all(|s| s.is_empty())
}
pub fn append_dir<I>(&mut self, dir: impl Into<PathBuf>, files: I)
where
I: IntoIterator,
I::Item: Into<PathBuf>,
{
let dir = dir.into();
let files = files.into_iter().map(Into::into);
if let Some(dir_files) = self.0.get_mut(&dir) {
dir_files.extend(files);
} else {
self.0.insert(dir, files.collect());
}
}
pub fn with_dir<I>(mut self, dir: impl Into<PathBuf>, files: I) -> Self
where
I: IntoIterator,
I::Item: Into<PathBuf>,
{
self.append_dir(dir, files);
self
}
}
impl PartialEq for State {
fn eq(&self, other: &Self) -> bool {
let self_non_empty = self.0.values().filter(|files| !files.is_empty()).count();
let other_non_empty = other.0.values().filter(|files| !files.is_empty()).count();
if self_non_empty != other_non_empty {
return false;
}
// If both states have the same number of non-empty directories, then comparing each
// directory of one with the other will only be true if their list of non-empty directories
// is identical.
self.0
.iter()
.all(|(dir, files)| files.is_empty() || other.0.get(dir).map_or(false, |v| v == files))
}
}
impl Eq for State {}
impl<T> From<T> for State
where
T: IntoIterator,
@ -86,3 +130,33 @@ impl DerefMut for State {
&mut self.0
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_eq() {
let a = State::default().with_dir("dir1", ["file1", "file2"]);
let b = State::default().with_dir("dir1", ["file1", "file2"]);
assert_eq!(a, b);
let b = b.with_dir("dir2", ["file3"]);
assert_ne!(a, b);
}
#[test]
fn test_eq_empty_dirs() {
let a = State::default().with_dir("dir1", ["file1", "file2"]);
let b = State::default()
.with_dir("dir1", ["file1", "file2"])
.with_dir("dir2", Vec::<PathBuf>::new());
assert_eq!(a, b);
let b = b.with_dir("dir2", ["file3"]);
assert_ne!(a, b);
}
}

10
papermc-api/Cargo.toml Normal file
View file

@ -0,0 +1,10 @@
[package]
name = "papermc-api"
version.workspace = true
edition.workspace = true
[dependencies]
serde.workspace = true
chrono.workspace = true
serde_json = "1.0.149"
ureq = { version = "3.3.0", features = ["json"] }

View file

@ -0,0 +1,19 @@
fn main() {
let client = papermc_api::Client::new();
let projects = client.projects().unwrap();
for project in projects {
println!("project: {:?}", project);
}
let versions = client.project("paper").versions().unwrap();
for version in versions {
println!("version: {:?}", version);
}
let latest = client.project("paper").version("1.21.1").latest().unwrap();
println!("latest: {:?}", latest);
let builds = client.project("paper").version("1.21.10").builds().unwrap();
println!("number of builds: {}", builds.len());
}

22
papermc-api/src/error.rs Normal file
View file

@ -0,0 +1,22 @@
#[derive(Debug)]
pub enum Error {
Ureq(ureq::Error),
BadBody,
}
impl std::error::Error for Error {}
impl From<ureq::Error> for Error {
fn from(value: ureq::Error) -> Self {
Self::Ureq(value)
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Ureq(err) => err.fmt(f),
Self::BadBody => f.write_str("bad response body"),
}
}
}

247
papermc-api/src/lib.rs Normal file
View file

@ -0,0 +1,247 @@
use serde_json::Value;
pub use error::Error;
use crate::models::{Build, BuildCommit, BuildDownload, Java, Project, Version};
mod error;
mod models;
pub struct Client {
agent: ureq::Agent,
}
pub const BASE_URL: &str = "https://fill.papermc.io/v3";
impl Default for Client {
fn default() -> Self {
Self::new()
}
}
impl Client {
pub fn new() -> Self {
Self {
agent: ureq::agent(),
}
}
pub fn projects(&self) -> Result<Vec<Project>, Error> {
let mut res = self.agent.get(format!("{}/projects", BASE_URL)).call()?;
let body_json: Value = res.body_mut().read_json()?;
let projects = body_json["projects"].as_array().ok_or(Error::BadBody)?;
projects.iter().map(parse_project_json).collect()
}
pub fn project<'a>(&'a self, project: &'a str) -> ProjectQuery<'a> {
ProjectQuery {
agent: &self.agent,
project,
}
}
}
pub struct ProjectQuery<'a> {
agent: &'a ureq::Agent,
project: &'a str,
}
impl<'a> ProjectQuery<'a> {
pub fn info(&self) -> Result<Project, Error> {
let mut res = self
.agent
.get(format!("{}/projects/{}", BASE_URL, self.project))
.call()?;
let body_json: Value = res.body_mut().read_json()?;
parse_project_json(&body_json)
}
pub fn versions(&self) -> Result<Vec<Version>, Error> {
let mut res = self
.agent
.get(format!("{}/projects/{}/versions", BASE_URL, self.project))
.call()?;
let body_json: Value = res.body_mut().read_json()?;
let versions = body_json["versions"].as_array().ok_or(Error::BadBody)?;
versions.iter().map(parse_version_json).collect()
}
pub fn version(&self, version: &'a str) -> VersionQuery<'a> {
VersionQuery {
agent: self.agent,
project: self.project,
version,
}
}
}
pub struct VersionQuery<'a> {
agent: &'a ureq::Agent,
project: &'a str,
version: &'a str,
}
impl<'a> VersionQuery<'a> {
pub fn info(&self) -> Result<Version, Error> {
let mut res = self
.agent
.get(format!(
"{}/projects/{}/versions/{}",
BASE_URL, self.project, self.version
))
.call()?;
let body_json: Value = res.body_mut().read_json()?;
parse_version_json(&body_json)
}
pub fn builds(&self) -> Result<Vec<Build>, Error> {
let mut res = self
.agent
.get(format!(
"{}/projects/{}/versions/{}/builds",
BASE_URL, self.project, self.version
))
.call()?;
let body_json: Value = res.body_mut().read_json()?;
body_json
.as_array()
.ok_or(Error::BadBody)?
.iter()
.map(parse_build_json)
.collect()
}
pub fn build(&self, build: &str) -> Result<Build, Error> {
let mut res = self
.agent
.get(format!(
"{}/projects/{}/versions/{}/builds/{}",
BASE_URL, self.project, self.version, build
))
.call()?;
let body_json: Value = res.body_mut().read_json()?;
parse_build_json(&body_json)
}
pub fn latest(&self) -> Result<Build, Error> {
self.build("latest")
}
}
fn parse_project_json(value: &Value) -> Result<Project, Error> {
Ok(Project {
id: value["project"]["id"]
.as_str()
.map(|s| s.to_string())
.ok_or(Error::BadBody)?,
name: value["project"]["name"]
.as_str()
.map(|s| s.to_string())
.ok_or(Error::BadBody)?,
// Flatten map of versions into one array
versions: value["versions"]
.as_object()
.ok_or(Error::BadBody)?
.iter()
.map(|(_, versions)| versions.as_array().ok_or(Error::BadBody))
// Collect into error to propagate error of any of the versions
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.flatten()
.map(|v| v.as_str().ok_or(Error::BadBody).map(|s| s.to_string()))
.collect::<Result<_, _>>()?,
})
}
fn parse_version_json(value: &Value) -> Result<Version, Error> {
Ok(Version {
id: value["version"]["id"]
.as_str()
.map(String::from)
.ok_or(Error::BadBody)?,
support_status: value["version"]["support"]["status"]
.as_str()
.ok_or(Error::BadBody)?
.parse()
.map_err(|_| Error::BadBody)?,
java: Java {
minimum_version: value["version"]["java"]["version"]["minimum"]
.as_u64()
.ok_or(Error::BadBody)?,
recommended_flags: value["version"]["java"]["flags"]["recommended"]
.as_array()
.ok_or(Error::BadBody)?
.iter()
.map(|v| v.as_str().map(String::from).ok_or(Error::BadBody))
.collect::<Result<_, _>>()?,
},
builds: value["builds"]
.as_array()
.ok_or(Error::BadBody)?
.iter()
.map(|v| v.as_u64().ok_or(Error::BadBody))
.collect::<Result<_, _>>()?,
})
}
fn parse_build_json(value: &Value) -> Result<Build, Error> {
Ok(Build {
id: value["id"].as_u64().ok_or(Error::BadBody)?,
time: chrono::DateTime::parse_from_rfc3339(value["time"].as_str().ok_or(Error::BadBody)?)
.map_err(|_| Error::BadBody)?
.into(),
channel: value["channel"]
.as_str()
.ok_or(Error::BadBody)?
.parse()
.map_err(|_| Error::BadBody)?,
commits: value["commits"]
.as_array()
.ok_or(Error::BadBody)?
.iter()
.map(|build| {
Ok(BuildCommit {
sha: build["sha"].as_str().ok_or(Error::BadBody)?.to_string(),
time: chrono::DateTime::parse_from_rfc3339(
build["time"].as_str().ok_or(Error::BadBody)?,
)
.map_err(|_| Error::BadBody)?
.into(),
message: build["message"].as_str().ok_or(Error::BadBody)?.to_string(),
})
})
.collect::<Result<_, Error>>()?,
downloads: value["downloads"]
.as_object()
.ok_or(Error::BadBody)?
.iter()
.map(|(key, build)| {
Ok((
key.to_string(),
BuildDownload {
name: build["name"].as_str().ok_or(Error::BadBody)?.to_string(),
size: build["size"].as_u64().ok_or(Error::BadBody)?,
url: build["url"].as_str().ok_or(Error::BadBody)?.to_string(),
checksums: build["checksums"]
.as_object()
.ok_or(Error::BadBody)?
.into_iter()
.map(|(key, value)| {
Ok((
key.to_string(),
value.as_str().ok_or(Error::BadBody)?.to_string(),
))
})
.collect::<Result<_, Error>>()?,
},
))
})
.collect::<Result<_, Error>>()?,
})
}

109
papermc-api/src/models.rs Normal file
View file

@ -0,0 +1,109 @@
use std::{collections::HashMap, fmt::Display, str::FromStr};
use chrono::{DateTime, Utc};
#[derive(Debug)]
pub struct Project {
pub id: String,
pub name: String,
pub versions: Vec<String>,
}
#[derive(Debug)]
pub enum SupportStatus {
Supported,
Unsupported,
}
pub struct SupportStatusParseError;
impl FromStr for SupportStatus {
type Err = SupportStatusParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"SUPPORTED" => Ok(Self::Supported),
"UNSUPPORTED" => Ok(Self::Unsupported),
_ => Err(SupportStatusParseError),
}
}
}
impl Display for SupportStatus {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Supported => f.write_str("SUPPORTED"),
Self::Unsupported => f.write_str("UNSUPPORTED"),
}
}
}
#[derive(Debug)]
pub struct Java {
pub minimum_version: u64,
pub recommended_flags: Vec<String>,
}
#[derive(Debug)]
pub struct Version {
pub id: String,
pub support_status: SupportStatus,
pub java: Java,
pub builds: Vec<u64>,
}
#[derive(Debug)]
pub struct Build {
pub id: u64,
pub time: DateTime<Utc>,
pub channel: BuildChannel,
pub commits: Vec<BuildCommit>,
pub downloads: HashMap<String, BuildDownload>,
}
#[derive(Debug)]
pub enum BuildChannel {
Alpha,
Beta,
Stable,
}
pub struct BuildChannelParseError;
impl FromStr for BuildChannel {
type Err = BuildChannelParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"STABLE" => Ok(Self::Stable),
"BETA" => Ok(Self::Beta),
"ALPHA" => Ok(Self::Alpha),
_ => Err(BuildChannelParseError),
}
}
}
impl Display for BuildChannel {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Alpha => f.write_str("ALPHA"),
Self::Beta => f.write_str("BETA"),
Self::Stable => f.write_str("STABLE"),
}
}
}
#[derive(Debug)]
pub struct BuildCommit {
pub sha: String,
pub time: DateTime<Utc>,
pub message: String,
}
#[derive(Debug)]
pub struct BuildDownload {
pub name: String,
pub checksums: HashMap<String, String>,
pub size: u64,
pub url: String,
}