Cleanup and modernize

+ Bump to rust 2024
+ Cleaned up old code
+ Added more fail safes in case certain files are missing
This commit is contained in:
Joey Hines 2025-07-26 13:14:52 -06:00
parent 8378f2ead3
commit a9cf1d5c6f
Signed by: joeyahines
GPG Key ID: 38BA6F25C94C9382
13 changed files with 1999 additions and 864 deletions

2382
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,22 +1,24 @@
[package] [package]
name = "albatross" name = "albatross"
version = "0.5.0" version = "0.6.0"
authors = ["Joey Hines <joey@ahines.net>"] authors = ["Joey Hines <joey@ahines.net>"]
edition = "2018" edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
structopt = "0.3.20" structopt = "0.3.20"
serde = { version="1.0.116", features=["derive"] } serde = { version="1.0.116", features=["derive"] }
config = "0.9" config = "0.15.13"
log = "0.4.8" log = "0.4.8"
chrono = "0.4" chrono = "0.4"
regex = "1.3.9" regex = "1.3.9"
flate2 = "1.0.14" flate2 = "1.0.14"
tar = "0.4.28" tar = "0.4.28"
reqwest = { version = "0.10", features = ["blocking", "json"] } reqwest = { version = "0.12.22", features = ["blocking", "json"] }
discord-hooks-rs = { git = "https://github.com/joeyahines/discord-hooks-rs" } discord-hooks-rs = { git = "https://github.com/joeyahines/discord-hooks-rs" }
anvil-region = "0.4.0" anvil-region = "0.8.1"
ssh2 = "0.9.1" ssh2 = "0.9.1"
ftp = "3.0.1" ftp = "3.0.1"
env_logger = "0.11.8"
thiserror = "2.0.12"

View File

@ -1,19 +1,19 @@
use crate::backup;
use crate::config::{AlbatrossConfig, RemoteBackupConfig, WorldConfig, WorldType}; use crate::config::{AlbatrossConfig, RemoteBackupConfig, WorldConfig, WorldType};
use crate::discord::send_webhook; use crate::discord::send_webhook;
use crate::error::Result; use crate::error::Result;
use crate::region::Region; use crate::region::Region;
use crate::remote::RemoteBackupSite;
use crate::remote::file::FileBackup; use crate::remote::file::FileBackup;
use crate::remote::ftp::FTPBackup; use crate::remote::ftp::FTPBackup;
use crate::remote::sftp::SFTPBackup; use crate::remote::sftp::SFTPBackup;
use crate::remote::RemoteBackupSite;
use chrono::Utc; use chrono::Utc;
use flate2::Compression;
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
use flate2::write::GzEncoder; use flate2::write::GzEncoder;
use flate2::Compression; use log::{error, info, warn};
use std::convert::TryFrom; use std::convert::TryFrom;
use std::fs::{copy, create_dir, create_dir_all, remove_dir_all, rename, File}; use std::fs::{File, copy, create_dir, create_dir_all, remove_dir_all, rename};
use std::path::PathBuf; use std::path::{Path, PathBuf};
use std::time::Instant; use std::time::Instant;
use tar::Archive; use tar::Archive;
@ -23,13 +23,13 @@ use tar::Archive;
/// * `file_name` - file name /// * `file_name` - file name
/// * `world_path` - path to the world folder /// * `world_path` - path to the world folder
/// * `backup_path` - path to the backup folder /// * `backup_path` - path to the backup folder
pub fn backup_file( pub fn backup_file(file_name: &str, world_path: &Path, backup_path: &Path) -> Result<u64> {
file_name: &str, let world_path = world_path.join(file_name);
mut world_path: PathBuf, let backup_path = backup_path.join(file_name);
mut backup_path: PathBuf,
) -> Result<u64> { if !world_path.exists() {
world_path.push(file_name); warn!("File '{world_path:?}' does not exist.");
backup_path.push(file_name); }
Ok(copy(world_path, backup_path)?) Ok(copy(world_path, backup_path)?)
} }
@ -40,11 +40,15 @@ pub fn backup_file(
/// * `dir_name` - directory name /// * `dir_name` - directory name
/// * `world_path` - path to the world folder /// * `world_path` - path to the world folder
/// * `backup_path` - path to the backup folder /// * `backup_path` - path to the backup folder
pub fn backup_dir(dir_name: &str, world_path: &PathBuf, backup_path: &PathBuf) -> Result<u64> { pub fn backup_dir(dir_name: &str, world_path: &Path, backup_path: &Path) -> Result<u64> {
let mut src_dir = world_path.clone(); let src_dir = world_path.join(dir_name);
src_dir.push(dir_name);
let mut backup_dir = backup_path.clone(); if !src_dir.exists() {
backup_dir.push(dir_name); warn!("Directory '{dir_name}' does not exist in '{world_path:?}'");
return Ok(0);
}
let backup_dir = backup_path.join(dir_name);
create_dir(&backup_dir)?; create_dir(&backup_dir)?;
let mut file_count = 0; let mut file_count = 0;
@ -70,14 +74,18 @@ pub fn backup_dir(dir_name: &str, world_path: &PathBuf, backup_path: &PathBuf) -
pub fn backup_region( pub fn backup_region(
dir_name: &str, dir_name: &str,
save_radius: u64, save_radius: u64,
world_path: &PathBuf, world_path: &Path,
backup_path: &PathBuf, backup_path: &Path,
) -> Result<u64> { ) -> Result<u64> {
let mut count: u64 = 0; let mut count: u64 = 0;
let mut src_dir = world_path.clone(); let src_dir = world_path.join(dir_name);
src_dir.push(dir_name);
let mut backup_dir = backup_path.clone(); if !src_dir.exists() {
backup_dir.push(dir_name); warn!("Region directory '{dir_name}' does not exist in '{world_path:?}'");
return Ok(0);
}
let backup_dir = backup_path.join(dir_name);
create_dir(&backup_dir)?; create_dir(&backup_dir)?;
let save_radius = (save_radius as f64 / 512.0).ceil() as i64; let save_radius = (save_radius as f64 / 512.0).ceil() as i64;
@ -107,21 +115,15 @@ pub fn backup_region(
/// * `backup_path` - path to the backup folder /// * `backup_path` - path to the backup folder
/// * `world_config` - world config options /// * `world_config` - world config options
pub fn backup_world( pub fn backup_world(
world_path: PathBuf, world_path: &Path,
mut backup_path: PathBuf, backup_path: &Path,
world_config: &WorldConfig, world_config: &WorldConfig,
) -> Result<u64> { ) -> Result<u64> {
let region_count; let backup_path = backup_path.join(&world_config.world_name);
backup_path.push(&world_config.world_name);
create_dir(backup_path.as_path())?; create_dir(backup_path.as_path())?;
backup_region("poi", world_config.save_radius, &world_path, &backup_path)?; backup_region("poi", world_config.save_radius, world_path, &backup_path)?;
region_count = backup_region( let region_count = backup_region("region", world_config.save_radius, world_path, &backup_path)?;
"region",
world_config.save_radius,
&world_path,
&backup_path,
)?;
Ok(region_count) Ok(region_count)
} }
@ -132,19 +134,19 @@ pub fn backup_world(
/// * `backup_path` - path to the backup folder /// * `backup_path` - path to the backup folder
/// * `world_config` - world config options /// * `world_config` - world config options
pub fn backup_overworld( pub fn backup_overworld(
world_path: PathBuf, world_path: &Path,
backup_path: PathBuf, backup_path: &Path,
world_config: &WorldConfig, world_config: &WorldConfig,
) -> Result<(u64, u64)> { ) -> Result<(u64, u64)> {
backup_dir("data", &world_path, &backup_path)?; backup_dir("data", world_path, backup_path)?;
backup_dir("stats", &world_path, &backup_path).ok(); backup_dir("stats", world_path, backup_path).ok();
backup_file("level.dat", world_path.clone(), backup_path.clone())?; backup_file("level.dat", world_path, backup_path)?;
backup_file("level.dat_old", world_path.clone(), backup_path.clone()).ok(); backup_file("level.dat_old", world_path, backup_path).ok();
backup_file("session.lock", world_path.clone(), backup_path.clone()).ok(); backup_file("session.lock", world_path, backup_path).ok();
backup_file("uid.dat", world_path.clone(), backup_path.clone())?; backup_file("uid.dat", world_path, backup_path)?;
let player_count = backup_dir("playerdata", &world_path, &backup_path)?; let player_count = backup_dir("playerdata", world_path, backup_path)?;
let region_count = backup_world(world_path, backup_path, world_config)?; let region_count = backup_world(world_path, backup_path, world_config)?;
Ok((region_count, player_count)) Ok((region_count, player_count))
@ -157,14 +159,13 @@ pub fn backup_overworld(
/// * `backup_path` - path to the backup folder /// * `backup_path` - path to the backup folder
/// * `world_config` - world config options /// * `world_config` - world config options
pub fn backup_nether( pub fn backup_nether(
world_path: PathBuf, world_path: &Path,
backup_path: PathBuf, backup_path: &Path,
world_config: &WorldConfig, world_config: &WorldConfig,
) -> Result<u64> { ) -> Result<u64> {
let mut nether_path = world_path; let nether_path = world_path.join(WorldType::Nether.dim_name());
nether_path.push("DIM-1");
backup_world(nether_path, backup_path, world_config) backup_world(&nether_path, backup_path, world_config)
} }
/// Backup the end /// Backup the end
@ -174,14 +175,13 @@ pub fn backup_nether(
/// * `backup_path` - path to the backup folder /// * `backup_path` - path to the backup folder
/// * `world_config` - world config options /// * `world_config` - world config options
pub fn backup_end( pub fn backup_end(
world_path: PathBuf, world_path: &Path,
backup_path: PathBuf, backup_path: &Path,
world_config: &WorldConfig, world_config: &WorldConfig,
) -> Result<u64> { ) -> Result<u64> {
let mut end_path = world_path; let end_path = world_path.join(WorldType::End.dim_name());
end_path.push("DIM1");
backup_world(end_path, backup_path, world_config) backup_world(&end_path, backup_path, world_config)
} }
/// Compress the backup after the files have been copied /// Compress the backup after the files have been copied
@ -189,7 +189,7 @@ pub fn backup_end(
/// # Param /// # Param
/// * `tmp_dir`: tmp directory with the backed up files /// * `tmp_dir`: tmp directory with the backed up files
/// * `output_file`: output archive /// * `output_file`: output archive
pub fn compress_backup(tmp_dir: &PathBuf, output_file: &PathBuf) -> Result<()> { pub fn compress_backup(tmp_dir: &Path, output_file: &Path) -> Result<()> {
let archive = File::create(output_file)?; let archive = File::create(output_file)?;
let enc = GzEncoder::new(archive, Compression::default()); let enc = GzEncoder::new(archive, Compression::default());
let mut tar_builder = tar::Builder::new(enc); let mut tar_builder = tar::Builder::new(enc);
@ -197,7 +197,7 @@ pub fn compress_backup(tmp_dir: &PathBuf, output_file: &PathBuf) -> Result<()> {
Ok(()) Ok(())
} }
pub fn uncompress_backup(backup: &PathBuf) -> Result<PathBuf> { pub fn uncompress_backup(backup: &Path) -> Result<PathBuf> {
let backup_file = File::open(backup)?; let backup_file = File::open(backup)?;
let dec = GzDecoder::new(backup_file); let dec = GzDecoder::new(backup_file);
let mut extract = Archive::new(dec); let mut extract = Archive::new(dec);
@ -213,30 +213,23 @@ pub fn uncompress_backup(backup: &PathBuf) -> Result<PathBuf> {
/// * config - Albatross config /// * config - Albatross config
/// * backup - path of the backup to convert /// * backup - path of the backup to convert
/// * output - output path /// * output - output path
pub fn convert_backup_to_sp( pub fn convert_backup_to_sp(config: &AlbatrossConfig, backup: &Path, output: &Path) -> Result<()> {
config: &AlbatrossConfig,
backup: &PathBuf,
output: &PathBuf,
) -> Result<()> {
let extract_path = uncompress_backup(backup)?; let extract_path = uncompress_backup(backup)?;
if let Some(worlds) = &config.world_config { if let Some(worlds) = &config.world_config {
for world in worlds { for world in worlds {
let world_type = match world.world_type.clone() { let world_type = world.world_type.clone().unwrap_or(WorldType::Overworld);
Some(world_type) => world_type,
None => WorldType::OVERWORLD,
};
let src = PathBuf::from(&extract_path).join(&world.world_name); let src = PathBuf::from(&extract_path).join(&world.world_name);
let dest = PathBuf::from(&extract_path); let dest = PathBuf::from(&extract_path);
match world_type { match world_type {
WorldType::OVERWORLD => { WorldType::Overworld => {
rename(src.clone().join("poi"), dest.clone().join("poi"))?; rename(src.clone().join("poi"), dest.clone().join("poi"))?;
rename(src.clone().join("region"), dest.clone().join("region"))?; rename(src.clone().join("region"), dest.clone().join("region"))?;
} }
WorldType::NETHER => { WorldType::Nether => {
rename(src, dest.clone().join("DIM-1"))?; rename(src, dest.clone().join("DIM-1"))?;
} }
WorldType::END => { WorldType::End => {
rename(src, dest.clone().join("DIM1"))?; rename(src, dest.clone().join("DIM1"))?;
} }
} }
@ -262,8 +255,7 @@ pub fn do_remote_backup(
let mut ftps_backup = FTPBackup::new(config, remote_backup_cfg.backups_to_keep)?; let mut ftps_backup = FTPBackup::new(config, remote_backup_cfg.backups_to_keep)?;
ftps_backup.backup_to_remote(backup_path)?; ftps_backup.backup_to_remote(backup_path)?;
ftps_backup.cleanup()?; ftps_backup.cleanup()?;
} } else if let Some(config) = &remote_backup_cfg.file {
else if let Some(config) = &remote_backup_cfg.file {
let mut file_backup = FileBackup::new(config, remote_backup_cfg.backups_to_keep)?; let mut file_backup = FileBackup::new(config, remote_backup_cfg.backups_to_keep)?;
file_backup.backup_to_remote(backup_path)?; file_backup.backup_to_remote(backup_path)?;
file_backup.cleanup()?; file_backup.cleanup()?;
@ -280,7 +272,7 @@ pub fn do_backup(cfg: AlbatrossConfig, output: Option<PathBuf>) -> Result<()> {
let server_base_dir = cfg.backup.minecraft_dir.clone(); let server_base_dir = cfg.backup.minecraft_dir.clone();
let worlds = cfg.world_config.clone().expect("No worlds configured"); let worlds = cfg.world_config.clone().expect("No worlds configured");
let time_str = Utc::now().format("%d-%m-%y_%H.%M.%S").to_string(); let time_str = Utc::now().format("%d-%m-%y_%H.%M.%S").to_string();
let backup_name = format!("{}_backup.tar.gz", time_str); let backup_name = format!("{time_str}_backup.tar.gz");
let mut output_archive = match output { let mut output_archive = match output {
Some(out_path) => out_path, Some(out_path) => out_path,
None => cfg.backup.output_config.path.clone(), None => cfg.backup.output_config.path.clone(),
@ -296,35 +288,36 @@ pub fn do_backup(cfg: AlbatrossConfig, output: Option<PathBuf>) -> Result<()> {
send_webhook("**Albatross is swooping in to backup your worlds!**", &cfg); send_webhook("**Albatross is swooping in to backup your worlds!**", &cfg);
backup_worlds(&cfg, server_base_dir, worlds, &mut tmp_dir).map_err(|e| { backup_worlds(&cfg, server_base_dir, worlds, &tmp_dir).map_err(|e| {
send_webhook("Failed to copy worlds to backup location", &cfg); send_webhook("Failed to copy worlds to backup location", &cfg);
println!("Failed to copy worlds: {}", e); error!("Failed to copy worlds: {e}");
e e
})?; })?;
backup::compress_backup(&tmp_dir, &output_archive).map_err(|e| { compress_backup(&tmp_dir, &output_archive).map_err(|e| {
send_webhook("Failed to compress backup", &cfg); send_webhook("Failed to compress backup", &cfg);
println!("Failed to compress backup: {}", e); error!("Failed to compress backup: {e}");
e e
})?; })?;
remove_dir_all(&tmp_dir)?; remove_dir_all(&tmp_dir)?;
let mut local_backup = FileBackup::new(&cfg.backup.output_config, cfg.backup.backups_to_keep).unwrap(); let mut local_backup =
FileBackup::new(&cfg.backup.output_config, cfg.backup.backups_to_keep).unwrap();
match local_backup.cleanup() { match local_backup.cleanup() {
Ok(backups_removed) => { Ok(backups_removed) => {
if backups_removed > 0 { if backups_removed > 0 {
let msg = format!( let msg = format!(
"Albatross mistook **{}** of your old backups for some french fries and ate them!! SKRAWWWW", "Albatross mistook **{backups_removed}** of your old backups for some french fries and ate them!! SKRAWWWW"
backups_removed
); );
send_webhook(msg.as_str(), &cfg); send_webhook(msg.as_str(), &cfg);
info!("Removing {backups_removed} backups...")
} }
} }
Err(e) => { Err(e) => {
send_webhook("Failed to remove old backups!", &cfg); send_webhook("Failed to remove old backups!", &cfg);
println!("Failed to remove old backups: {}", e) error!("Failed to remove old backups: {e}")
} }
} }
@ -335,16 +328,17 @@ pub fn do_backup(cfg: AlbatrossConfig, output: Option<PathBuf>) -> Result<()> {
} }
Err(e) => { Err(e) => {
send_webhook("Remote backup failed!", &cfg); send_webhook("Remote backup failed!", &cfg);
println!("Remote backup failed with error: {}", e); error!("Remote backup failed with error: {e}");
} }
} }
} }
let secs = timer.elapsed().as_secs(); let secs = timer.elapsed().as_secs();
send_webhook( send_webhook(
format!("**Full backup completed in {}s**! *SKREEEEEEEEEE*", secs).as_str(), format!("**Full backup completed in {secs}s**! *SKREEEEEEEEEE*").as_str(),
&cfg, &cfg,
); );
info!("Full backup completed in {secs}s!");
Ok(()) Ok(())
} }
@ -352,45 +346,38 @@ fn backup_worlds(
cfg: &AlbatrossConfig, cfg: &AlbatrossConfig,
server_base_dir: PathBuf, server_base_dir: PathBuf,
worlds: Vec<WorldConfig>, worlds: Vec<WorldConfig>,
tmp_dir: &mut PathBuf, tmp_dir: &Path,
) -> Result<()> { ) -> Result<()> {
for world in worlds { for world in worlds {
let mut world_dir = server_base_dir.clone(); let mut world_dir = server_base_dir.clone();
let world_name = world.world_name.clone(); let world_name = world.world_name.clone();
let world_type = match world.world_type.clone() { let world_type = world.world_type.clone().unwrap_or(WorldType::Overworld);
Some(world_type) => world_type,
None => WorldType::OVERWORLD,
};
world_dir.push(world_name.clone()); world_dir.push(world_name.clone());
if world_dir.exists() && world_dir.is_dir() { if world_dir.exists() && world_dir.is_dir() {
send_webhook( send_webhook(format!("Starting backup of **{world_name}**").as_str(), cfg);
format!("Starting backup of **{}**", world_name).as_str(), info!("Starting backup of {world_name}.");
&cfg,
);
let webhook_msg = match world_type { let webhook_msg = match world_type {
WorldType::OVERWORLD => { WorldType::Overworld => {
let (region_count, player_count) = let (region_count, player_count) =
backup_overworld(world_dir.clone(), tmp_dir.clone(), &world)?; backup_overworld(&world_dir.clone(), tmp_dir, &world)?;
format!( format!("{region_count} regions and {player_count} player files backed up.")
"{} regions and {} player files backed up.",
region_count, player_count
)
} }
WorldType::NETHER => { WorldType::Nether => {
let region_count = backup_nether(world_dir, tmp_dir.clone(), &world)?; let region_count = backup_nether(&world_dir, tmp_dir, &world)?;
format!("{} regions backed up.", region_count) format!("{region_count} regions backed up.")
} }
WorldType::END => { WorldType::End => {
let region_count = backup_end(world_dir, tmp_dir.clone(), &world)?; let region_count = backup_end(&world_dir, tmp_dir, &world)?;
format!("{} regions backed up.", region_count) format!("{region_count} regions backed up.")
} }
}; };
send_webhook(&webhook_msg, &cfg); send_webhook(&webhook_msg, cfg);
info!("{webhook_msg}");
} else { } else {
send_webhook(format!("Error: {} not found.", world_name).as_str(), &cfg); send_webhook(format!("Error: {world_name} not found.").as_str(), cfg);
println!("World \"{}\" not found", world_name.clone()); error!("World \"{world_name}\" not found");
} }
} }

View File

@ -1,41 +1,20 @@
use regex::Regex; use regex::Regex;
use std::error::Error;
use std::fmt;
use std::num::ParseIntError; use std::num::ParseIntError;
use std::str::FromStr; use std::str::FromStr;
use thiserror::Error;
/// Chunk error /// Chunk error
#[derive(Debug)] #[derive(Debug, Error)]
pub enum ChunkCoordinateErr { pub enum ChunkCoordinateErr {
/// Error parsing integer #[error("Failed to parse int: {0}")]
ParseIntError(ParseIntError), ParseIntError(#[from] ParseIntError),
/// Regex error #[error("Regex error: {0}")]
RegexError(regex::Error), RegexError(#[from] regex::Error),
/// Invalid chunk coordinate given #[error("Invalid chunk coordinate")]
InvalidChunk, InvalidChunk,
} }
impl From<ParseIntError> for ChunkCoordinateErr { /// Chunk Coordinate pair
fn from(e: ParseIntError) -> Self {
Self::ParseIntError(e)
}
}
impl From<regex::Error> for ChunkCoordinateErr {
fn from(e: regex::Error) -> Self {
Self::RegexError(e)
}
}
impl fmt::Display for ChunkCoordinateErr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Unable to parse chunk range: {:?}", self)
}
}
impl Error for ChunkCoordinateErr {}
/// Chunk Coordinate paiir
#[derive(Debug)] #[derive(Debug)]
pub struct ChunkCoordinate { pub struct ChunkCoordinate {
/// X Coordinate /// X Coordinate
@ -48,7 +27,7 @@ impl FromStr for ChunkCoordinate {
type Err = ChunkCoordinateErr; type Err = ChunkCoordinateErr;
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
let re = Regex::new(r"\((?P<x>-?[0-9]*),(?P<z>-?[0-9]*)\)").unwrap(); let re = Regex::new(r"\((?P<x>-?[0-9]*),(?P<z>-?[0-9]*)\)")?;
if let Some(cap) = re.captures(s) { if let Some(cap) = re.captures(s) {
let x = cap["x"].parse::<i32>()?; let x = cap["x"].parse::<i32>()?;
@ -60,3 +39,26 @@ impl FromStr for ChunkCoordinate {
} }
} }
} }
#[cfg(test)]
mod test {
use crate::chunk_coordinate::ChunkCoordinate;
use std::str::FromStr;
#[test]
fn parse_chunk_coordinate_success() {
let chunk_x = 5;
let chunk_z = -15;
let chunk_string = format!("({chunk_x},{chunk_z})");
let chunk = ChunkCoordinate::from_str(chunk_string.as_str()).unwrap();
assert_eq!(chunk.x, chunk_x);
assert_eq!(chunk.z, chunk_z);
}
#[test]
fn parse_chunk_coordinate_failure() {
assert!(ChunkCoordinate::from_str("(nan,5)").is_err());
}
}

View File

@ -1,6 +1,6 @@
pub(crate) mod remote; pub(crate) mod remote;
use crate::config::remote::{FTPConfig, SFTPConfig, FileConfig}; use crate::config::remote::{FTPConfig, FileConfig, SFTPConfig};
use config::{Config, ConfigError, File}; use config::{Config, ConfigError, File};
use serde::Deserialize; use serde::Deserialize;
use std::path::PathBuf; use std::path::PathBuf;
@ -9,24 +9,38 @@ use std::path::PathBuf;
#[derive(Debug, Deserialize, Clone)] #[derive(Debug, Deserialize, Clone)]
pub enum WorldType { pub enum WorldType {
/// The End (DIM1) /// The End (DIM1)
END, End,
/// Nether (DIM-1) /// Nether (DIM-1)
NETHER, Nether,
/// Overworld /// Overworld
OVERWORLD, Overworld,
} }
impl From<String> for WorldType { impl From<String> for WorldType {
/// Convert config strings to WorldType /// Convert config strings to WorldType
fn from(string: String) -> Self { fn from(string: String) -> Self {
match string.as_str() { match string.as_str() {
"END" => WorldType::END, "END" => WorldType::End,
"NETHER" => WorldType::NETHER, "NETHER" => WorldType::Nether,
_ => WorldType::OVERWORLD, _ => WorldType::Overworld,
} }
} }
} }
impl WorldType {
pub fn dim_number(&self) -> i8 {
match self {
WorldType::End => -1,
WorldType::Nether => 1,
WorldType::Overworld => 0,
}
}
pub fn dim_name(&self) -> String {
format!("DIM{}", self.dim_number())
}
}
/// Config for individual world configuration /// Config for individual world configuration
#[derive(Debug, Deserialize, Clone)] #[derive(Debug, Deserialize, Clone)]
pub struct WorldConfig { pub struct WorldConfig {
@ -50,7 +64,7 @@ pub struct RemoteBackupConfig {
pub backups_to_keep: usize, pub backups_to_keep: usize,
pub sftp: Option<SFTPConfig>, pub sftp: Option<SFTPConfig>,
pub ftp: Option<FTPConfig>, pub ftp: Option<FTPConfig>,
pub file: Option<FileConfig> pub file: Option<FileConfig>,
} }
/// Configs /// Configs
@ -64,9 +78,10 @@ pub struct AlbatrossConfig {
impl AlbatrossConfig { impl AlbatrossConfig {
/// Create new backup from file /// Create new backup from file
pub fn new(config_path: &str) -> Result<Self, ConfigError> { pub fn new(config_path: &str) -> Result<Self, ConfigError> {
let mut cfg = Config::new(); let cfg = Config::builder()
cfg.merge(File::with_name(config_path))?; .add_source(File::with_name(config_path))
.build()?;
cfg.try_into() cfg.try_deserialize()
} }
} }

View File

@ -1,5 +1,6 @@
use crate::config::AlbatrossConfig; use crate::config::AlbatrossConfig;
use discord_hooks_rs::DiscordWebhook; use discord_hooks_rs::DiscordWebhook;
use log::{debug, error};
/// Sends a webhook to Discord if its configured /// Sends a webhook to Discord if its configured
/// ///
@ -11,6 +12,13 @@ pub fn send_webhook(msg: &str, cfg: &AlbatrossConfig) {
let json = DiscordWebhook::new().content(msg); let json = DiscordWebhook::new().content(msg);
let client = reqwest::blocking::Client::new(); let client = reqwest::blocking::Client::new();
client.post(webhook).json(&json).send().ok(); match client.post(webhook).json(&json).send() {
Ok(_) => {
debug!("Sent webhook with message '{msg}'")
}
Err(err) => {
error!("Failed to send webhook: '{err:?}'")
}
}
} }
} }

View File

@ -7,7 +7,7 @@ pub enum AlbatrossError {
FileError(std::io::Error), FileError(std::io::Error),
SSHError(ssh2::Error), SSHError(ssh2::Error),
ChunkParseError(crate::chunk_coordinate::ChunkCoordinateErr), ChunkParseError(crate::chunk_coordinate::ChunkCoordinateErr),
RegionParseError(crate::region::RegionParseError), RegionParseError(RegionParseError),
ChronoParseError(chrono::ParseError), ChronoParseError(chrono::ParseError),
NoSSHAuth, NoSSHAuth,
FTPError(ftp::FtpError), FTPError(ftp::FtpError),
@ -18,15 +18,15 @@ impl std::error::Error for AlbatrossError {}
impl std::fmt::Display for AlbatrossError { impl std::fmt::Display for AlbatrossError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self { match self {
AlbatrossError::FileError(e) => write!(f, "File I/O error: {}", e), AlbatrossError::FileError(e) => write!(f, "File I/O error: {e}"),
AlbatrossError::SSHError(e) => write!(f, "SSH error: {}", e), AlbatrossError::SSHError(e) => write!(f, "SSH error: {e}"),
AlbatrossError::ChunkParseError(e) => { AlbatrossError::ChunkParseError(e) => {
write!(f, "Unable to parse chunk coordinate: {}", e) write!(f, "Unable to parse chunk coordinate: {e}")
} }
AlbatrossError::RegionParseError(e) => write!(f, "Unable to parse region name: {}", e), AlbatrossError::RegionParseError(e) => write!(f, "Unable to parse region name: {e}"),
AlbatrossError::ChronoParseError(e) => write!(f, "Unable to parse time: {}", e), AlbatrossError::ChronoParseError(e) => write!(f, "Unable to parse time: {e}"),
AlbatrossError::NoSSHAuth => write!(f, "No SSH auth methods provided in the config"), AlbatrossError::NoSSHAuth => write!(f, "No SSH auth methods provided in the config"),
AlbatrossError::FTPError(e) => write!(f, "FTP error: {}", e), AlbatrossError::FTPError(e) => write!(f, "FTP error: {e}"),
} }
} }
} }

View File

@ -1,3 +1,4 @@
use log::{LevelFilter, info};
use std::path::PathBuf; use std::path::PathBuf;
use structopt::StructOpt; use structopt::StructOpt;
@ -63,6 +64,11 @@ enum SubCommand {
} }
fn main() { fn main() {
env_logger::builder()
.filter_level(LevelFilter::Info)
.parse_default_env()
.build();
let opt = Albatross::from_args(); let opt = Albatross::from_args();
let cfg = AlbatrossConfig::new(opt.config_path.into_os_string().to_str().unwrap()) let cfg = AlbatrossConfig::new(opt.config_path.into_os_string().to_str().unwrap())
@ -71,20 +77,20 @@ fn main() {
if cfg.world_config.is_some() { if cfg.world_config.is_some() {
match opt.sub_command { match opt.sub_command {
SubCommand::Backup { output } => { SubCommand::Backup { output } => {
println!("Starting backup"); info!("Starting backup");
match do_backup(cfg, output) { match do_backup(cfg, output) {
Ok(_) => println!("Backup complete!"), Ok(_) => info!("Backup complete!"),
Err(e) => println!("Error doing backup: {:?}", e), Err(e) => info!("Error doing backup: {e:?}"),
}; };
} }
SubCommand::Export { SubCommand::Export {
input_backup, input_backup,
output, output,
} => { } => {
println!("Starting export"); info!("Starting export");
match convert_backup_to_sp(&cfg, &input_backup, &output) { match convert_backup_to_sp(&cfg, &input_backup, &output) {
Ok(_) => println!("Export complete!"), Ok(_) => info!("Export complete!"),
Err(e) => println!("Error exporting backup: {:?}", e), Err(e) => info!("Error exporting backup: {e:?}"),
}; };
} }
SubCommand::Restore { SubCommand::Restore {
@ -94,7 +100,7 @@ fn main() {
chunk, chunk,
upper_bound, upper_bound,
} => { } => {
println!("Starting restore"); info!("Starting restore");
let server_directory = match server_directory { let server_directory = match server_directory {
Some(dir) => dir, Some(dir) => dir,
@ -109,8 +115,8 @@ fn main() {
&backup_path, &backup_path,
&server_directory, &server_directory,
) { ) {
Ok(count) => println!("Restored {} chunks!", count), Ok(count) => info!("Restored {count} chunks!"),
Err(e) => println!("Error restoring backup: {:?}", e), Err(e) => info!("Error restoring backup: {e:?}"),
}; };
} else { } else {
match restore_chunk_from_backup( match restore_chunk_from_backup(
@ -119,13 +125,13 @@ fn main() {
&backup_path, &backup_path,
&server_directory, &server_directory,
) { ) {
Ok(_) => println!("Restored chunk!"), Ok(_) => info!("Restored chunk!"),
Err(e) => println!("Error restoring backup: {:?}", e), Err(e) => info!("Error restoring backup: {e:?}"),
}; };
} }
} }
} }
} else { } else {
println!("No worlds specified in config file!") info!("No worlds specified in config file!")
} }
} }

View File

@ -1,18 +1,17 @@
use regex::Regex; use regex::Regex;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::error::Error; use std::num::ParseIntError;
use std::fmt; use thiserror::Error;
#[derive(Debug, Clone)] #[derive(Debug, Clone, Error)]
pub struct RegionParseError; pub enum RegionParseError {
#[error("Regex Error '{0}'")]
impl fmt::Display for RegionParseError { RegexError(#[from] regex::Error),
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { #[error("Int parse error '{0}'")]
write!(f, "Unable to parse region file name") IntParseError(#[from] ParseIntError),
#[error("Cannot parse region file name '{0}'")]
RegionNameParseFailure(String),
} }
}
impl Error for RegionParseError {}
/// Struct to store information about the region /// Struct to store information about the region
pub struct Region { pub struct Region {
@ -27,16 +26,41 @@ impl TryFrom<String> for Region {
/// Try from string /// Try from string
fn try_from(value: String) -> Result<Self, Self::Error> { fn try_from(value: String) -> Result<Self, Self::Error> {
let re = Regex::new(r"r\.(?P<x>-?[0-9]*)+\.(?P<y>-?[0-9]*)").unwrap(); let re = Regex::new(r"r\.(?P<x>-?[0-9]*)+\.(?P<y>-?[0-9]*)")?;
if re.is_match(&value) { if re.is_match(&value) {
let captures = re.captures(value.as_str()).unwrap(); let captures = re.captures(value.as_str()).unwrap();
return Ok(Region { return Ok(Region {
x: captures["x"].parse::<i64>().unwrap(), x: captures["x"].parse::<i64>()?,
y: captures["y"].parse::<i64>().unwrap(), y: captures["y"].parse::<i64>()?,
}); });
} }
Err(RegionParseError) Err(RegionParseError::RegionNameParseFailure(value))
}
}
#[cfg(test)]
mod test {
use crate::region::Region;
#[test]
fn test_parse_success() {
let region_x = 5;
let region_y = -15;
let region_string = format!("r.{region_x}.{region_y}");
let region = Region::try_from(region_string).unwrap();
assert_eq!(region.x, region_x);
assert_eq!(region.y, region_y);
}
#[test]
fn test_parse_failure() {
let region_y = -15;
let region_string = format!("r.pb.{region_y}");
assert!(Region::try_from(region_string).is_err());
} }
} }

View File

@ -1,9 +1,9 @@
use ftp::FtpStream; use ftp::FtpStream;
use std::path::PathBuf; use std::path::PathBuf;
use crate::config::remote::FTPConfig;
use crate::error; use crate::error;
use crate::remote::{PathLocation, RemoteBackupSite}; use crate::remote::{PathLocation, RemoteBackupSite};
use crate::config::remote::FTPConfig;
/// FTP Remote Site /// FTP Remote Site
pub struct FTPBackup { pub struct FTPBackup {

View File

@ -8,6 +8,7 @@ pub mod file;
pub mod ftp; pub mod ftp;
pub mod sftp; pub mod sftp;
#[allow(dead_code)]
pub trait RemoteBackupFile { pub trait RemoteBackupFile {
/// Type containing the location of the remote_backup backup /// Type containing the location of the remote_backup backup
type LocationType; type LocationType;
@ -16,14 +17,14 @@ pub trait RemoteBackupFile {
fn location(&self) -> Self::LocationType; fn location(&self) -> Self::LocationType;
/// Get the time the remote_backup file was created /// Get the time the remote_backup file was created
fn time_created(&self) -> chrono::NaiveDateTime; fn time_created(&self) -> NaiveDateTime;
/// Parse the time created from the file name /// Parse the time created from the file name
fn parse_file_name(file_name: &str) -> Option<chrono::NaiveDateTime> { fn parse_file_name(file_name: &str) -> Option<NaiveDateTime> {
let time: Vec<&str> = file_name.split("_backup.tar.gz").collect(); let time: Vec<&str> = file_name.split("_backup.tar.gz").collect();
if let Some(time_str) = time.get(0) { if let Some(time_str) = time.first() {
chrono::NaiveDateTime::parse_from_str(time_str, "%d-%m-%y_%H.%M.%S").ok() NaiveDateTime::parse_from_str(time_str, "%d-%m-%y_%H.%M.%S").ok()
} else { } else {
None None
} }
@ -34,7 +35,7 @@ pub trait RemoteBackupSite {
/// Struct representing the location of a backup on the site /// Struct representing the location of a backup on the site
type FileType: RemoteBackupFile; type FileType: RemoteBackupFile;
/// Backup a file to the the remote_backup site /// Backup a file to the remote_backup site
fn backup_to_remote(&mut self, file: PathBuf) -> Result<()>; fn backup_to_remote(&mut self, file: PathBuf) -> Result<()>;
/// Get the locations backups contained on the remote_backup site /// Get the locations backups contained on the remote_backup site
@ -80,17 +81,13 @@ impl PathLocation {
if let Some(file_name) = path.file_name() { if let Some(file_name) = path.file_name() {
let file_name = file_name.to_str().unwrap(); let file_name = file_name.to_str().unwrap();
if let Some(time) = Self::parse_file_name(file_name) { Self::parse_file_name(file_name).map(|time| Self {
Some(Self {
location: path, location: path,
time_created: time, time_created: time,
}) })
} else { } else {
None None
} }
} else {
None
}
} }
} }

View File

@ -3,10 +3,10 @@ use std::path::PathBuf;
use ssh2::Session; use ssh2::Session;
use crate::config::remote::SFTPConfig;
use crate::error; use crate::error;
use crate::error::AlbatrossError; use crate::error::AlbatrossError;
use crate::remote::{PathLocation, RemoteBackupSite}; use crate::remote::{PathLocation, RemoteBackupSite};
use crate::config::remote::SFTPConfig;
/// SFTP Remote Site /// SFTP Remote Site
pub struct SFTPBackup { pub struct SFTPBackup {
@ -69,7 +69,7 @@ impl RemoteBackupSite for SFTPBackup {
} }
fn remove_backup(&mut self, backup: Self::FileType) -> error::Result<()> { fn remove_backup(&mut self, backup: Self::FileType) -> error::Result<()> {
Ok(self.session.sftp()?.unlink(&*backup.location)?) Ok(self.session.sftp()?.unlink(&backup.location)?)
} }
fn backups_to_keep(&self) -> usize { fn backups_to_keep(&self) -> usize {

View File

@ -1,9 +1,10 @@
use crate::backup::uncompress_backup; use crate::backup::uncompress_backup;
use crate::chunk_coordinate::ChunkCoordinate; use crate::chunk_coordinate::ChunkCoordinate;
use crate::error::Result; use crate::error::Result;
use anvil_region::AnvilChunkProvider; use anvil_region::position::{RegionChunkPosition, RegionPosition};
use anvil_region::provider::{FolderRegionProvider, RegionProvider};
use std::fs::remove_dir_all; use std::fs::remove_dir_all;
use std::path::PathBuf; use std::path::{Path, PathBuf};
/// Struct for manipulating a world from a backup /// Struct for manipulating a world from a backup
struct RestoreAccess { struct RestoreAccess {
@ -15,7 +16,7 @@ struct RestoreAccess {
impl RestoreAccess { impl RestoreAccess {
/// Create new RestoreAccess /// Create new RestoreAccess
pub fn new(world_name: &str, src_path: &PathBuf, dest_path: &PathBuf) -> Result<Self> { pub fn new(world_name: &str, src_path: &Path, dest_path: &Path) -> Result<Self> {
let src_path = uncompress_backup(src_path)?.join(world_name).join("region"); let src_path = uncompress_backup(src_path)?.join(world_name).join("region");
let dest_path = dest_path.join(world_name).join("region"); let dest_path = dest_path.join(world_name).join("region");
@ -25,15 +26,24 @@ impl RestoreAccess {
}) })
} }
/// Copy chunk from source to desination /// Copy chunk from source to destination
pub fn copy_chunk(&self, x: i32, z: i32) { pub fn copy_chunk(&self, x: i32, z: i32) {
let src_provider = AnvilChunkProvider::new(self.src_path.to_str().unwrap()); let region_position = RegionPosition::from_chunk_position(x, z);
let dest_provider = AnvilChunkProvider::new(self.dest_path.to_str().unwrap()); let region_chunk_position = RegionChunkPosition::from_chunk_position(x, z);
let chunk = src_provider.load_chunk(x, z).expect("Unable to load chunk"); let src_provider = FolderRegionProvider::new(self.src_path.to_str().unwrap());
dest_provider let dest_provider = FolderRegionProvider::new(self.dest_path.to_str().unwrap());
.save_chunk(x, z, chunk)
.expect("Unable to save chunk"); let mut src_region = src_provider.get_region(region_position).unwrap();
let src_chunk_compound_tag = src_region
.read_chunk(region_chunk_position)
.expect("Unable to load chunk");
let mut dst_region = dest_provider.get_region(region_position).unwrap();
dst_region
.write_chunk(region_chunk_position, src_chunk_compound_tag)
.expect("Unable to write chunk");
} }
/// Cleanup process /// Cleanup process
@ -47,8 +57,8 @@ pub fn restore_range_from_backup(
world_name: &str, world_name: &str,
lower: ChunkCoordinate, lower: ChunkCoordinate,
upper: ChunkCoordinate, upper: ChunkCoordinate,
backup_path: &PathBuf, backup_path: &Path,
minecraft_dir: &PathBuf, minecraft_dir: &Path,
) -> Result<u64> { ) -> Result<u64> {
let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?; let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?;
let mut count = 0; let mut count = 0;
@ -68,8 +78,8 @@ pub fn restore_range_from_backup(
pub fn restore_chunk_from_backup( pub fn restore_chunk_from_backup(
world_name: &str, world_name: &str,
chunk: ChunkCoordinate, chunk: ChunkCoordinate,
backup_path: &PathBuf, backup_path: &Path,
minecraft_dir: &PathBuf, minecraft_dir: &Path,
) -> Result<()> { ) -> Result<()> {
let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?; let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?;
chunk_access.copy_chunk(chunk.x, chunk.z); chunk_access.copy_chunk(chunk.x, chunk.z);