Compare commits
No commits in common. "38630a4d5718011032085cab104d5c00f711724e" and "320c600c5a5f9dfa5b16556f837956c1e11ba0bc" have entirely different histories.
38630a4d57
...
320c600c5a
@ -10,7 +10,7 @@ trigger:
|
|||||||
steps:
|
steps:
|
||||||
- name: build
|
- name: build
|
||||||
pull: always
|
pull: always
|
||||||
image: rust:1.55.0
|
image: rust:1.46.0
|
||||||
commands:
|
commands:
|
||||||
- cargo build --verbose
|
- cargo build --verbose
|
||||||
|
|
||||||
@ -28,7 +28,7 @@ trigger:
|
|||||||
steps:
|
steps:
|
||||||
- name: build
|
- name: build
|
||||||
pull: always
|
pull: always
|
||||||
image: rust:1.55.0
|
image: rust:1.46.0
|
||||||
commands:
|
commands:
|
||||||
- cargo build --verbose --release
|
- cargo build --verbose --release
|
||||||
- name: gitea-release
|
- name: gitea-release
|
||||||
@ -37,6 +37,6 @@ steps:
|
|||||||
settings:
|
settings:
|
||||||
token:
|
token:
|
||||||
from_secret: gitea_token
|
from_secret: gitea_token
|
||||||
base: https://git.canopymc.net
|
base: https://git.etztech.xyz
|
||||||
files:
|
files:
|
||||||
- "target/release/albatross"
|
- "target/release/albatross"
|
2433
Cargo.lock
generated
2433
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
14
Cargo.toml
14
Cargo.toml
@ -1,24 +1,20 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "albatross"
|
name = "albatross"
|
||||||
version = "0.6.1"
|
version = "0.3.0"
|
||||||
authors = ["Joey Hines <joey@ahines.net>"]
|
authors = ["Joey Hines <joey@ahines.net>"]
|
||||||
edition = "2024"
|
edition = "2018"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
structopt = "0.3.20"
|
structopt = "0.3.20"
|
||||||
serde = { version="1.0.116", features=["derive"] }
|
serde = { version="1.0.116", features=["derive"] }
|
||||||
config = "0.15.13"
|
config = "0.9"
|
||||||
log = "0.4.8"
|
log = "0.4.8"
|
||||||
chrono = "0.4"
|
chrono = "0.4"
|
||||||
regex = "1.3.9"
|
regex = "1.3.9"
|
||||||
flate2 = "1.0.14"
|
flate2 = "1.0.14"
|
||||||
tar = "0.4.28"
|
tar = "0.4.28"
|
||||||
reqwest = { version = "0.12.22", features = ["blocking", "json"] }
|
reqwest = { version = "0.10", features = ["blocking", "json"] }
|
||||||
discord-hooks-rs = { git = "https://github.com/joeyahines/discord-hooks-rs" }
|
discord-hooks-rs = { git = "https://github.com/joeyahines/discord-hooks-rs" }
|
||||||
anvil-region = "0.8.1"
|
anvil-region = "0.4.0"
|
||||||
ssh2 = "0.9.1"
|
|
||||||
ftp = "3.0.1"
|
|
||||||
env_logger = "0.11.8"
|
|
||||||
thiserror = "2.0.12"
|
|
||||||
|
43
README.md
43
README.md
@ -1,14 +1,12 @@
|
|||||||
# Albatross
|
# Albatross
|
||||||
Back up what you care about in your Minecraft worlds.
|
Back up what you care about in your Minecraft worlds.
|
||||||
|
|
||||||
Albatross backs up player files and region files within a certain configurable radius. It can also send Discord
|
Albatross backs up player files and region files within a certain configurable radius. It can also send Discord
|
||||||
webhooks. Backups are compressed and stored as `tar.gz` archives.
|
webhooks. Backups are compressed and stored as `tar.gz` archives.
|
||||||
|
|
||||||
Backups can also be transferred to a remote server using SFTP.
|
|
||||||
|
|
||||||
## Help
|
## Help
|
||||||
```
|
```
|
||||||
albatross 0.4.0
|
albatross 0.3.0
|
||||||
Backup your Minecraft Server!
|
Backup your Minecraft Server!
|
||||||
|
|
||||||
USAGE:
|
USAGE:
|
||||||
@ -42,47 +40,28 @@ Exporting a backup to a single player world:
|
|||||||
|
|
||||||
Restoring a single chunk (from -2,-2 to 2,2):
|
Restoring a single chunk (from -2,-2 to 2,2):
|
||||||
|
|
||||||
`albatorss -c test.toml restore world backups/04-11-20_01.51.27_backup.tar.gz sp.tar.gz` (0,0)
|
`albatorss -c test.toml restore world backups/04-11-20_01.51.27_backup.tar.gz sp.tar.gz (0,0)`
|
||||||
|
|
||||||
Restoring a range of chunks (from -2,-2 to 2,2):
|
Restoring a range of chunks (from -2,-2 to 2,2):
|
||||||
|
|
||||||
`albatorss -c test.toml restore world backups/04-11-20_01.51.27_backup.tar.gz sp.tar.gz` (-2,-2) -u (2,2)
|
`albatorss -c test.toml restore world backups/04-11-20_01.51.27_backup.tar.gz sp.tar.gz (-2,-2) -u (2,2)`
|
||||||
|
|
||||||
## Config
|
## Config
|
||||||
```toml
|
```toml
|
||||||
# Local Backup Config
|
|
||||||
[backup]
|
[backup]
|
||||||
# Minecraft sever directory
|
# Minecraft sever directory
|
||||||
minecraft_dir = "/home/mc/server"
|
minecraft_dir = "/home/mc/server"
|
||||||
# Optional Discord webhook
|
# Directory to place backups
|
||||||
discord_webhook = "https://discordapp.com/api/webhooks/"
|
output_dir = "/home/mc/backups"
|
||||||
# Number of backups to keep
|
# Number of backups to keep
|
||||||
backups_to_keep = 10
|
backups_to_keep = 10
|
||||||
|
# Discord Webhook
|
||||||
[backup.output_config]
|
discord_webhook = "https://discordapp.com/api/webhooks/"
|
||||||
# Directory to place backups
|
|
||||||
path = "/home/mc/backups"
|
|
||||||
|
|
||||||
# Optional remote_backup backup config
|
|
||||||
[remote]
|
|
||||||
# SFTP server host:port
|
|
||||||
sftp_server_addr = "localhost:22"
|
|
||||||
# Remote directory
|
|
||||||
remote_dir = "/home/backup/"
|
|
||||||
# Remote user
|
|
||||||
username = "user"
|
|
||||||
# Password Auth
|
|
||||||
password = "cooluser123"
|
|
||||||
# Key Auth
|
|
||||||
#public_key = /home/user/.ssh/id_rsa.pub"
|
|
||||||
#private_key = /home/user/.ssh/id_rsa"
|
|
||||||
# Backups to keep on the remote_backup host
|
|
||||||
backups_to_keep = 3
|
|
||||||
|
|
||||||
# World config options
|
# World config options
|
||||||
[[world_config]]
|
[[world_config]]
|
||||||
# World name
|
# world name
|
||||||
world_name = "world"
|
world_name = "world"
|
||||||
# World save radius (in blocks)
|
# world save radius (in blocks)
|
||||||
save_radius = 8000
|
save_radius = 8000
|
||||||
```
|
```
|
361
src/backup.rs
361
src/backup.rs
@ -1,19 +1,16 @@
|
|||||||
use crate::config::{AlbatrossConfig, RemoteBackupConfig, WorldConfig, WorldType};
|
use crate::backup;
|
||||||
|
use crate::config::{AlbatrossConfig, WorldConfig, WorldType};
|
||||||
use crate::discord::send_webhook;
|
use crate::discord::send_webhook;
|
||||||
use crate::error::Result;
|
|
||||||
use crate::region::Region;
|
use crate::region::Region;
|
||||||
use crate::remote::RemoteBackupSite;
|
use chrono::{NaiveDateTime, Utc};
|
||||||
use crate::remote::file::FileBackup;
|
|
||||||
use crate::remote::ftp::FTPBackup;
|
|
||||||
use crate::remote::sftp::SFTPBackup;
|
|
||||||
use chrono::Utc;
|
|
||||||
use flate2::Compression;
|
|
||||||
use flate2::read::GzDecoder;
|
use flate2::read::GzDecoder;
|
||||||
use flate2::write::GzEncoder;
|
use flate2::write::GzEncoder;
|
||||||
use log::{error, info, warn};
|
use flate2::Compression;
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use std::fs::{File, copy, create_dir, create_dir_all, remove_dir_all, rename};
|
use std::fs::{
|
||||||
use std::path::{Path, PathBuf};
|
copy, create_dir, create_dir_all, remove_dir_all, remove_file, rename, DirEntry, File,
|
||||||
|
};
|
||||||
|
use std::path::PathBuf;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
use tar::Archive;
|
use tar::Archive;
|
||||||
|
|
||||||
@ -23,15 +20,15 @@ use tar::Archive;
|
|||||||
/// * `file_name` - file name
|
/// * `file_name` - file name
|
||||||
/// * `world_path` - path to the world folder
|
/// * `world_path` - path to the world folder
|
||||||
/// * `backup_path` - path to the backup folder
|
/// * `backup_path` - path to the backup folder
|
||||||
pub fn backup_file(file_name: &str, world_path: &Path, backup_path: &Path) -> Result<u64> {
|
pub fn backup_file(
|
||||||
let world_path = world_path.join(file_name);
|
file_name: &str,
|
||||||
let backup_path = backup_path.join(file_name);
|
mut world_path: PathBuf,
|
||||||
|
mut backup_path: PathBuf,
|
||||||
|
) -> Result<u64, std::io::Error> {
|
||||||
|
world_path.push(file_name);
|
||||||
|
backup_path.push(file_name);
|
||||||
|
|
||||||
if !world_path.exists() {
|
copy(world_path, backup_path)
|
||||||
warn!("File '{world_path:?}' does not exist.");
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(copy(world_path, backup_path)?)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Backup a directory
|
/// Backup a directory
|
||||||
@ -40,15 +37,15 @@ pub fn backup_file(file_name: &str, world_path: &Path, backup_path: &Path) -> Re
|
|||||||
/// * `dir_name` - directory name
|
/// * `dir_name` - directory name
|
||||||
/// * `world_path` - path to the world folder
|
/// * `world_path` - path to the world folder
|
||||||
/// * `backup_path` - path to the backup folder
|
/// * `backup_path` - path to the backup folder
|
||||||
pub fn backup_dir(dir_name: &str, world_path: &Path, backup_path: &Path) -> Result<u64> {
|
pub fn backup_dir(
|
||||||
let src_dir = world_path.join(dir_name);
|
dir_name: &str,
|
||||||
|
world_path: &PathBuf,
|
||||||
if !src_dir.exists() {
|
backup_path: &PathBuf,
|
||||||
warn!("Directory '{dir_name}' does not exist in '{world_path:?}'");
|
) -> Result<u64, std::io::Error> {
|
||||||
return Ok(0);
|
let mut src_dir = world_path.clone();
|
||||||
}
|
src_dir.push(dir_name);
|
||||||
|
let mut backup_dir = backup_path.clone();
|
||||||
let backup_dir = backup_path.join(dir_name);
|
backup_dir.push(dir_name);
|
||||||
create_dir(&backup_dir)?;
|
create_dir(&backup_dir)?;
|
||||||
|
|
||||||
let mut file_count = 0;
|
let mut file_count = 0;
|
||||||
@ -74,18 +71,14 @@ pub fn backup_dir(dir_name: &str, world_path: &Path, backup_path: &Path) -> Resu
|
|||||||
pub fn backup_region(
|
pub fn backup_region(
|
||||||
dir_name: &str,
|
dir_name: &str,
|
||||||
save_radius: u64,
|
save_radius: u64,
|
||||||
world_path: &Path,
|
world_path: &PathBuf,
|
||||||
backup_path: &Path,
|
backup_path: &PathBuf,
|
||||||
) -> Result<u64> {
|
) -> Result<u64, std::io::Error> {
|
||||||
let mut count: u64 = 0;
|
let mut count: u64 = 0;
|
||||||
let src_dir = world_path.join(dir_name);
|
let mut src_dir = world_path.clone();
|
||||||
|
src_dir.push(dir_name);
|
||||||
if !src_dir.exists() {
|
let mut backup_dir = backup_path.clone();
|
||||||
warn!("Region directory '{dir_name}' does not exist in '{world_path:?}'");
|
backup_dir.push(dir_name);
|
||||||
return Ok(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
let backup_dir = backup_path.join(dir_name);
|
|
||||||
create_dir(&backup_dir)?;
|
create_dir(&backup_dir)?;
|
||||||
|
|
||||||
let save_radius = (save_radius as f64 / 512.0).ceil() as i64;
|
let save_radius = (save_radius as f64 / 512.0).ceil() as i64;
|
||||||
@ -115,15 +108,21 @@ pub fn backup_region(
|
|||||||
/// * `backup_path` - path to the backup folder
|
/// * `backup_path` - path to the backup folder
|
||||||
/// * `world_config` - world config options
|
/// * `world_config` - world config options
|
||||||
pub fn backup_world(
|
pub fn backup_world(
|
||||||
world_path: &Path,
|
world_path: PathBuf,
|
||||||
backup_path: &Path,
|
mut backup_path: PathBuf,
|
||||||
world_config: &WorldConfig,
|
world_config: &WorldConfig,
|
||||||
) -> Result<u64> {
|
) -> Result<u64, std::io::Error> {
|
||||||
let backup_path = backup_path.join(&world_config.world_name);
|
let region_count;
|
||||||
|
backup_path.push(&world_config.world_name);
|
||||||
create_dir(backup_path.as_path())?;
|
create_dir(backup_path.as_path())?;
|
||||||
|
|
||||||
backup_region("poi", world_config.save_radius, world_path, &backup_path)?;
|
backup_region("poi", world_config.save_radius, &world_path, &backup_path)?;
|
||||||
let region_count = backup_region("region", world_config.save_radius, world_path, &backup_path)?;
|
region_count = backup_region(
|
||||||
|
"region",
|
||||||
|
world_config.save_radius,
|
||||||
|
&world_path,
|
||||||
|
&backup_path,
|
||||||
|
)?;
|
||||||
Ok(region_count)
|
Ok(region_count)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -134,19 +133,19 @@ pub fn backup_world(
|
|||||||
/// * `backup_path` - path to the backup folder
|
/// * `backup_path` - path to the backup folder
|
||||||
/// * `world_config` - world config options
|
/// * `world_config` - world config options
|
||||||
pub fn backup_overworld(
|
pub fn backup_overworld(
|
||||||
world_path: &Path,
|
world_path: PathBuf,
|
||||||
backup_path: &Path,
|
backup_path: PathBuf,
|
||||||
world_config: &WorldConfig,
|
world_config: &WorldConfig,
|
||||||
) -> Result<(u64, u64)> {
|
) -> Result<(u64, u64), std::io::Error> {
|
||||||
backup_dir("data", world_path, backup_path)?;
|
backup_dir("data", &world_path, &backup_path)?;
|
||||||
backup_dir("stats", world_path, backup_path).ok();
|
backup_dir("stats", &world_path, &backup_path)?;
|
||||||
|
|
||||||
backup_file("level.dat", world_path, backup_path)?;
|
backup_file("level.dat", world_path.clone(), backup_path.clone())?;
|
||||||
backup_file("level.dat_old", world_path, backup_path).ok();
|
backup_file("level.dat_old", world_path.clone(), backup_path.clone())?;
|
||||||
backup_file("session.lock", world_path, backup_path).ok();
|
backup_file("session.lock", world_path.clone(), backup_path.clone())?;
|
||||||
backup_file("uid.dat", world_path, backup_path)?;
|
backup_file("uid.dat", world_path.clone(), backup_path.clone())?;
|
||||||
|
|
||||||
let player_count = backup_dir("playerdata", world_path, backup_path)?;
|
let player_count = backup_dir("playerdata", &world_path, &backup_path)?;
|
||||||
let region_count = backup_world(world_path, backup_path, world_config)?;
|
let region_count = backup_world(world_path, backup_path, world_config)?;
|
||||||
|
|
||||||
Ok((region_count, player_count))
|
Ok((region_count, player_count))
|
||||||
@ -159,13 +158,14 @@ pub fn backup_overworld(
|
|||||||
/// * `backup_path` - path to the backup folder
|
/// * `backup_path` - path to the backup folder
|
||||||
/// * `world_config` - world config options
|
/// * `world_config` - world config options
|
||||||
pub fn backup_nether(
|
pub fn backup_nether(
|
||||||
world_path: &Path,
|
world_path: PathBuf,
|
||||||
backup_path: &Path,
|
backup_path: PathBuf,
|
||||||
world_config: &WorldConfig,
|
world_config: &WorldConfig,
|
||||||
) -> Result<u64> {
|
) -> Result<u64, std::io::Error> {
|
||||||
let nether_path = world_path.join(WorldType::Nether.dim_name());
|
let mut nether_path = world_path;
|
||||||
|
nether_path.push("DIM-1");
|
||||||
|
|
||||||
backup_world(&nether_path, backup_path, world_config)
|
backup_world(nether_path, backup_path, world_config)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Backup the end
|
/// Backup the end
|
||||||
@ -175,13 +175,14 @@ pub fn backup_nether(
|
|||||||
/// * `backup_path` - path to the backup folder
|
/// * `backup_path` - path to the backup folder
|
||||||
/// * `world_config` - world config options
|
/// * `world_config` - world config options
|
||||||
pub fn backup_end(
|
pub fn backup_end(
|
||||||
world_path: &Path,
|
world_path: PathBuf,
|
||||||
backup_path: &Path,
|
backup_path: PathBuf,
|
||||||
world_config: &WorldConfig,
|
world_config: &WorldConfig,
|
||||||
) -> Result<u64> {
|
) -> Result<u64, std::io::Error> {
|
||||||
let end_path = world_path.join(WorldType::End.dim_name());
|
let mut end_path = world_path;
|
||||||
|
end_path.push("DIM1");
|
||||||
|
|
||||||
backup_world(&end_path, backup_path, world_config)
|
backup_world(end_path, backup_path, world_config)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Compress the backup after the files have been copied
|
/// Compress the backup after the files have been copied
|
||||||
@ -189,7 +190,7 @@ pub fn backup_end(
|
|||||||
/// # Param
|
/// # Param
|
||||||
/// * `tmp_dir`: tmp directory with the backed up files
|
/// * `tmp_dir`: tmp directory with the backed up files
|
||||||
/// * `output_file`: output archive
|
/// * `output_file`: output archive
|
||||||
pub fn compress_backup(tmp_dir: &Path, output_file: &Path) -> Result<()> {
|
pub fn compress_backup(tmp_dir: &PathBuf, output_file: &PathBuf) -> Result<(), std::io::Error> {
|
||||||
let archive = File::create(output_file)?;
|
let archive = File::create(output_file)?;
|
||||||
let enc = GzEncoder::new(archive, Compression::default());
|
let enc = GzEncoder::new(archive, Compression::default());
|
||||||
let mut tar_builder = tar::Builder::new(enc);
|
let mut tar_builder = tar::Builder::new(enc);
|
||||||
@ -197,7 +198,7 @@ pub fn compress_backup(tmp_dir: &Path, output_file: &Path) -> Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn uncompress_backup(backup: &Path) -> Result<PathBuf> {
|
pub fn uncompress_backup(backup: &PathBuf) -> Result<PathBuf, std::io::Error> {
|
||||||
let backup_file = File::open(backup)?;
|
let backup_file = File::open(backup)?;
|
||||||
let dec = GzDecoder::new(backup_file);
|
let dec = GzDecoder::new(backup_file);
|
||||||
let mut extract = Archive::new(dec);
|
let mut extract = Archive::new(dec);
|
||||||
@ -213,23 +214,30 @@ pub fn uncompress_backup(backup: &Path) -> Result<PathBuf> {
|
|||||||
/// * config - Albatross config
|
/// * config - Albatross config
|
||||||
/// * backup - path of the backup to convert
|
/// * backup - path of the backup to convert
|
||||||
/// * output - output path
|
/// * output - output path
|
||||||
pub fn convert_backup_to_sp(config: &AlbatrossConfig, backup: &Path, output: &Path) -> Result<()> {
|
pub fn convert_backup_to_sp(
|
||||||
|
config: &AlbatrossConfig,
|
||||||
|
backup: &PathBuf,
|
||||||
|
output: &PathBuf,
|
||||||
|
) -> Result<(), std::io::Error> {
|
||||||
let extract_path = uncompress_backup(backup)?;
|
let extract_path = uncompress_backup(backup)?;
|
||||||
|
|
||||||
if let Some(worlds) = &config.world_config {
|
if let Some(worlds) = &config.world_config {
|
||||||
for world in worlds {
|
for world in worlds {
|
||||||
let world_type = world.world_type.clone().unwrap_or(WorldType::Overworld);
|
let world_type = match world.world_type.clone() {
|
||||||
|
Some(world_type) => world_type,
|
||||||
|
None => WorldType::OVERWORLD,
|
||||||
|
};
|
||||||
let src = PathBuf::from(&extract_path).join(&world.world_name);
|
let src = PathBuf::from(&extract_path).join(&world.world_name);
|
||||||
let dest = PathBuf::from(&extract_path);
|
let dest = PathBuf::from(&extract_path);
|
||||||
match world_type {
|
match world_type {
|
||||||
WorldType::Overworld => {
|
WorldType::OVERWORLD => {
|
||||||
rename(src.clone().join("poi"), dest.clone().join("poi"))?;
|
rename(src.clone().join("poi"), dest.clone().join("poi"))?;
|
||||||
rename(src.clone().join("region"), dest.clone().join("region"))?;
|
rename(src.clone().join("region"), dest.clone().join("region"))?;
|
||||||
}
|
}
|
||||||
WorldType::Nether => {
|
WorldType::NETHER => {
|
||||||
rename(src, dest.clone().join("DIM-1"))?;
|
rename(src, dest.clone().join("DIM-1"))?;
|
||||||
}
|
}
|
||||||
WorldType::End => {
|
WorldType::END => {
|
||||||
rename(src, dest.clone().join("DIM1"))?;
|
rename(src, dest.clone().join("DIM1"))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -242,144 +250,137 @@ pub fn convert_backup_to_sp(config: &AlbatrossConfig, backup: &Path, output: &Pa
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Preform a remote_backup backup, if configured
|
/// Get the time of the backup from a file name
|
||||||
pub fn do_remote_backup(
|
///
|
||||||
remote_backup_cfg: &RemoteBackupConfig,
|
/// # Param
|
||||||
backup_path: PathBuf,
|
/// * `archive_entry`: archive entry
|
||||||
) -> Result<()> {
|
fn get_time_from_file_name(
|
||||||
if let Some(config) = &remote_backup_cfg.sftp {
|
archive_entry: &DirEntry,
|
||||||
let mut sftp_backup = SFTPBackup::new(config, remote_backup_cfg.backups_to_keep)?;
|
) -> Result<Option<NaiveDateTime>, std::io::Error> {
|
||||||
sftp_backup.backup_to_remote(backup_path)?;
|
let file_name = archive_entry.file_name().to_str().unwrap().to_string();
|
||||||
sftp_backup.cleanup()?;
|
let name: Vec<&str> = file_name.split("_backup.tar.gz").collect();
|
||||||
} else if let Some(config) = &remote_backup_cfg.ftp {
|
|
||||||
let mut ftps_backup = FTPBackup::new(config, remote_backup_cfg.backups_to_keep)?;
|
Ok(chrono::NaiveDateTime::parse_from_str(name[0], "%d-%m-%y_%H.%M.%S").ok())
|
||||||
ftps_backup.backup_to_remote(backup_path)?;
|
}
|
||||||
ftps_backup.cleanup()?;
|
|
||||||
} else if let Some(config) = &remote_backup_cfg.file {
|
/// Removes the old backups from the ouput directory
|
||||||
let mut file_backup = FileBackup::new(config, remote_backup_cfg.backups_to_keep)?;
|
///
|
||||||
file_backup.backup_to_remote(backup_path)?;
|
/// # Params
|
||||||
file_backup.cleanup()?;
|
/// * `output_dir` - output directory containing
|
||||||
|
/// * `keep` - number of backups to keep
|
||||||
|
fn remove_old_backups(output_dir: &PathBuf, keep: u64) -> Result<usize, std::io::Error> {
|
||||||
|
let mut backups = vec![];
|
||||||
|
let mut num_of_removed_backups: usize = 0;
|
||||||
|
|
||||||
|
for entry in output_dir.read_dir()? {
|
||||||
|
let entry = entry?;
|
||||||
|
|
||||||
|
if let Some(ext) = entry.path().extension() {
|
||||||
|
if ext == "gz" {
|
||||||
|
backups.push(entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
if backups.len() > keep as usize {
|
||||||
|
backups.sort_by(|a, b| {
|
||||||
|
let a_time = get_time_from_file_name(a).unwrap().unwrap();
|
||||||
|
let b_time = get_time_from_file_name(b).unwrap().unwrap();
|
||||||
|
|
||||||
|
b_time.cmp(&a_time)
|
||||||
|
});
|
||||||
|
|
||||||
|
num_of_removed_backups = backups.len() - keep as usize;
|
||||||
|
|
||||||
|
for _i in 0..num_of_removed_backups {
|
||||||
|
let oldest = backups.pop().unwrap();
|
||||||
|
remove_file(oldest.path())?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(num_of_removed_backups)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Backup the configured worlds from a minecraft server
|
/// Backup the configured worlds from a minecraft server
|
||||||
///
|
///
|
||||||
/// # Params
|
/// # Params
|
||||||
/// * `cfg` - config file
|
/// * `cfg` - config file
|
||||||
pub fn do_backup(cfg: AlbatrossConfig, output: Option<PathBuf>) -> Result<()> {
|
pub fn do_backup(cfg: AlbatrossConfig, output: Option<PathBuf>) -> Result<(), std::io::Error> {
|
||||||
let server_base_dir = cfg.backup.minecraft_dir.clone();
|
let server_base_dir = cfg.backup.minecraft_dir.clone();
|
||||||
let worlds = cfg.world_config.clone().expect("No worlds configured");
|
let worlds = cfg.world_config.clone().expect("No worlds configured");
|
||||||
let time_str = Utc::now().format("%d-%m-%y_%H.%M.%S").to_string();
|
let time_str = Utc::now().format("%d-%m-%y_%H.%M.%S").to_string();
|
||||||
let backup_name = format!("{time_str}_backup.tar.gz");
|
let backup_name = format!("{}_backup.tar.gz", time_str);
|
||||||
let mut output_archive = match output {
|
let mut output_archive = match output {
|
||||||
Some(out_path) => out_path,
|
Some(out_path) => out_path,
|
||||||
None => cfg.backup.output_config.path.clone(),
|
None => cfg.backup.output_dir.clone(),
|
||||||
};
|
};
|
||||||
output_archive.push(backup_name);
|
output_archive.push(backup_name);
|
||||||
let mut tmp_dir = cfg.backup.output_config.path.clone();
|
let mut tmp_dir = cfg.backup.output_dir.clone();
|
||||||
tmp_dir.push("tmp");
|
tmp_dir.push("tmp");
|
||||||
remove_dir_all(&tmp_dir).ok();
|
remove_dir_all(&tmp_dir).ok();
|
||||||
|
|
||||||
create_dir_all(tmp_dir.clone())?;
|
create_dir_all(tmp_dir.clone()).unwrap();
|
||||||
|
|
||||||
let timer = Instant::now();
|
|
||||||
|
|
||||||
send_webhook("**Albatross is swooping in to backup your worlds!**", &cfg);
|
send_webhook("**Albatross is swooping in to backup your worlds!**", &cfg);
|
||||||
|
let timer = Instant::now();
|
||||||
|
for world in worlds {
|
||||||
|
let mut world_dir = server_base_dir.clone();
|
||||||
|
let world_name = world.world_name.clone();
|
||||||
|
let world_type = match world.world_type.clone() {
|
||||||
|
Some(world_type) => world_type,
|
||||||
|
None => WorldType::OVERWORLD,
|
||||||
|
};
|
||||||
|
world_dir.push(world_name.clone());
|
||||||
|
|
||||||
backup_worlds(&cfg, server_base_dir, worlds, &tmp_dir).map_err(|e| {
|
if world_dir.exists() && world_dir.is_dir() {
|
||||||
send_webhook("Failed to copy worlds to backup location", &cfg);
|
send_webhook(
|
||||||
error!("Failed to copy worlds: {e}");
|
format!("Starting backup of **{}**", world_name).as_str(),
|
||||||
e
|
&cfg,
|
||||||
})?;
|
);
|
||||||
|
let webhook_msg = match world_type {
|
||||||
|
WorldType::OVERWORLD => {
|
||||||
|
let (region_count, player_count) =
|
||||||
|
backup_overworld(world_dir.clone(), tmp_dir.clone(), &world)?;
|
||||||
|
format!(
|
||||||
|
"{} regions and {} player files backed up.",
|
||||||
|
region_count, player_count
|
||||||
|
)
|
||||||
|
}
|
||||||
|
WorldType::NETHER => {
|
||||||
|
let region_count = backup_nether(world_dir, tmp_dir.clone(), &world)?;
|
||||||
|
format!("{} regions backed up.", region_count)
|
||||||
|
}
|
||||||
|
WorldType::END => {
|
||||||
|
let region_count = backup_end(world_dir, tmp_dir.clone(), &world)?;
|
||||||
|
format!("{} regions backed up.", region_count)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
compress_backup(&tmp_dir, &output_archive).map_err(|e| {
|
send_webhook(&webhook_msg, &cfg);
|
||||||
send_webhook("Failed to compress backup", &cfg);
|
} else {
|
||||||
error!("Failed to compress backup: {e}");
|
send_webhook(format!("Error: {} not found.", world_name).as_str(), &cfg);
|
||||||
e
|
println!("World \"{}\" not found", world_name.clone());
|
||||||
})?;
|
|
||||||
|
|
||||||
remove_dir_all(&tmp_dir)?;
|
|
||||||
|
|
||||||
let mut local_backup =
|
|
||||||
FileBackup::new(&cfg.backup.output_config, cfg.backup.backups_to_keep).unwrap();
|
|
||||||
|
|
||||||
match local_backup.cleanup() {
|
|
||||||
Ok(backups_removed) => {
|
|
||||||
if backups_removed > 0 {
|
|
||||||
let msg = format!(
|
|
||||||
"Albatross mistook **{backups_removed}** of your old backups for some french fries and ate them!! SKRAWWWW"
|
|
||||||
);
|
|
||||||
send_webhook(msg.as_str(), &cfg);
|
|
||||||
info!("Removing {backups_removed} backups...")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
send_webhook("Failed to remove old backups!", &cfg);
|
|
||||||
error!("Failed to remove old backups: {e}")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(remote_backup_config) = &cfg.remote {
|
backup::compress_backup(&tmp_dir, &output_archive)?;
|
||||||
match do_remote_backup(remote_backup_config, output_archive) {
|
|
||||||
Ok(_) => {
|
remove_dir_all(&tmp_dir)?;
|
||||||
send_webhook("Remote backup completed!", &cfg);
|
|
||||||
}
|
let backups_removed = remove_old_backups(&cfg.backup.output_dir, cfg.backup.backups_to_keep)?;
|
||||||
Err(e) => {
|
|
||||||
send_webhook("Remote backup failed!", &cfg);
|
if backups_removed > 0 {
|
||||||
error!("Remote backup failed with error: {e}");
|
let msg = format!(
|
||||||
}
|
"Albatross mistook **{}** of your old backups for some french fries and ate them!! SKRAWWWW",
|
||||||
}
|
backups_removed
|
||||||
|
);
|
||||||
|
send_webhook(msg.as_str(), &cfg);
|
||||||
}
|
}
|
||||||
|
|
||||||
let secs = timer.elapsed().as_secs();
|
let secs = timer.elapsed().as_secs();
|
||||||
send_webhook(
|
send_webhook(
|
||||||
format!("**Full backup completed in {secs}s**! *SKREEEEEEEEEE*").as_str(),
|
format!("**Full backup completed in {}s**! *SKREEEEEEEEEE*", secs).as_str(),
|
||||||
&cfg,
|
&cfg,
|
||||||
);
|
);
|
||||||
info!("Full backup completed in {secs}s!");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn backup_worlds(
|
|
||||||
cfg: &AlbatrossConfig,
|
|
||||||
server_base_dir: PathBuf,
|
|
||||||
worlds: Vec<WorldConfig>,
|
|
||||||
tmp_dir: &Path,
|
|
||||||
) -> Result<()> {
|
|
||||||
for world in worlds {
|
|
||||||
let mut world_dir = server_base_dir.clone();
|
|
||||||
let world_name = world.world_name.clone();
|
|
||||||
let world_type = world.world_type.clone().unwrap_or(WorldType::Overworld);
|
|
||||||
world_dir.push(world_name.clone());
|
|
||||||
|
|
||||||
if world_dir.exists() && world_dir.is_dir() {
|
|
||||||
send_webhook(format!("Starting backup of **{world_name}**").as_str(), cfg);
|
|
||||||
info!("Starting backup of {world_name}.");
|
|
||||||
let webhook_msg = match world_type {
|
|
||||||
WorldType::Overworld => {
|
|
||||||
let (region_count, player_count) =
|
|
||||||
backup_overworld(&world_dir.clone(), tmp_dir, &world)?;
|
|
||||||
format!("{region_count} regions and {player_count} player files backed up.")
|
|
||||||
}
|
|
||||||
WorldType::Nether => {
|
|
||||||
let region_count = backup_nether(&world_dir, tmp_dir, &world)?;
|
|
||||||
format!("{region_count} regions backed up.")
|
|
||||||
}
|
|
||||||
WorldType::End => {
|
|
||||||
let region_count = backup_end(&world_dir, tmp_dir, &world)?;
|
|
||||||
format!("{region_count} regions backed up.")
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
send_webhook(&webhook_msg, cfg);
|
|
||||||
info!("{webhook_msg}");
|
|
||||||
} else {
|
|
||||||
send_webhook(format!("Error: {world_name} not found.").as_str(), cfg);
|
|
||||||
error!("World \"{world_name}\" not found");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1,20 +1,41 @@
|
|||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
use std::error::Error;
|
||||||
|
use std::fmt;
|
||||||
use std::num::ParseIntError;
|
use std::num::ParseIntError;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
/// Chunk error
|
/// Chunk error
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug)]
|
||||||
pub enum ChunkCoordinateErr {
|
pub enum ChunkCoordinateErr {
|
||||||
#[error("Failed to parse int: {0}")]
|
/// Error parsing integer
|
||||||
ParseIntError(#[from] ParseIntError),
|
ParseIntError(ParseIntError),
|
||||||
#[error("Regex error: {0}")]
|
/// Regex error
|
||||||
RegexError(#[from] regex::Error),
|
RegexError(regex::Error),
|
||||||
#[error("Invalid chunk coordinate")]
|
/// Invalid chunk coordinate given
|
||||||
InvalidChunk,
|
InvalidChunk,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Chunk Coordinate pair
|
impl From<ParseIntError> for ChunkCoordinateErr {
|
||||||
|
fn from(e: ParseIntError) -> Self {
|
||||||
|
Self::ParseIntError(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<regex::Error> for ChunkCoordinateErr {
|
||||||
|
fn from(e: regex::Error) -> Self {
|
||||||
|
Self::RegexError(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for ChunkCoordinateErr {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "Unable to parse chunk range: {:?}", self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Error for ChunkCoordinateErr {}
|
||||||
|
|
||||||
|
/// Chunk Coordinate paiir
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ChunkCoordinate {
|
pub struct ChunkCoordinate {
|
||||||
/// X Coordinate
|
/// X Coordinate
|
||||||
@ -27,7 +48,7 @@ impl FromStr for ChunkCoordinate {
|
|||||||
type Err = ChunkCoordinateErr;
|
type Err = ChunkCoordinateErr;
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
let re = Regex::new(r"\((?P<x>-?[0-9]*),(?P<z>-?[0-9]*)\)")?;
|
let re = Regex::new(r"\((?P<x>-?[0-9]*),(?P<z>-?[0-9]*)\)").unwrap();
|
||||||
|
|
||||||
if let Some(cap) = re.captures(s) {
|
if let Some(cap) = re.captures(s) {
|
||||||
let x = cap["x"].parse::<i32>()?;
|
let x = cap["x"].parse::<i32>()?;
|
||||||
@ -39,26 +60,3 @@ impl FromStr for ChunkCoordinate {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use crate::chunk_coordinate::ChunkCoordinate;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_chunk_coordinate_success() {
|
|
||||||
let chunk_x = 5;
|
|
||||||
let chunk_z = -15;
|
|
||||||
let chunk_string = format!("({chunk_x},{chunk_z})");
|
|
||||||
|
|
||||||
let chunk = ChunkCoordinate::from_str(chunk_string.as_str()).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(chunk.x, chunk_x);
|
|
||||||
assert_eq!(chunk.z, chunk_z);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_chunk_coordinate_failure() {
|
|
||||||
assert!(ChunkCoordinate::from_str("(nan,5)").is_err());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
59
src/config.rs
Normal file
59
src/config.rs
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
use config::{Config, ConfigError, File};
|
||||||
|
use serde::Deserialize;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
/// World types supported
|
||||||
|
#[derive(Debug, Deserialize, Clone)]
|
||||||
|
pub enum WorldType {
|
||||||
|
/// The End (DIM1)
|
||||||
|
END,
|
||||||
|
/// Nether (DIM-1)
|
||||||
|
NETHER,
|
||||||
|
/// Overworld
|
||||||
|
OVERWORLD,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for WorldType {
|
||||||
|
/// Convert config strings to WorldType
|
||||||
|
fn from(string: String) -> Self {
|
||||||
|
match string.as_str() {
|
||||||
|
"END" => WorldType::END,
|
||||||
|
"NETHER" => WorldType::NETHER,
|
||||||
|
_ => WorldType::OVERWORLD,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Config for individual WorldConfig
|
||||||
|
#[derive(Debug, Deserialize, Clone)]
|
||||||
|
pub struct WorldConfig {
|
||||||
|
pub world_name: String,
|
||||||
|
pub save_radius: u64,
|
||||||
|
pub world_type: Option<WorldType>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Config for doing backups
|
||||||
|
#[derive(Debug, Deserialize, Clone)]
|
||||||
|
pub struct BackupConfig {
|
||||||
|
pub minecraft_dir: PathBuf,
|
||||||
|
pub output_dir: PathBuf,
|
||||||
|
pub backups_to_keep: u64,
|
||||||
|
pub discord_webhook: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configs
|
||||||
|
#[derive(Debug, Deserialize, Clone)]
|
||||||
|
pub struct AlbatrossConfig {
|
||||||
|
pub backup: BackupConfig,
|
||||||
|
pub world_config: Option<Vec<WorldConfig>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AlbatrossConfig {
|
||||||
|
/// Create new backup from file
|
||||||
|
pub fn new(config_path: &str) -> Result<Self, ConfigError> {
|
||||||
|
let mut cfg = Config::new();
|
||||||
|
cfg.merge(File::with_name(config_path))?;
|
||||||
|
|
||||||
|
cfg.try_into()
|
||||||
|
}
|
||||||
|
}
|
@ -1,84 +0,0 @@
|
|||||||
pub(crate) mod remote;
|
|
||||||
|
|
||||||
use crate::config::remote::{FTPConfig, FileConfig, SFTPConfig};
|
|
||||||
use config::{Config, ConfigError, File};
|
|
||||||
use serde::Deserialize;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
/// World types supported
|
|
||||||
#[derive(Debug, Deserialize, Clone)]
|
|
||||||
pub enum WorldType {
|
|
||||||
End,
|
|
||||||
Nether,
|
|
||||||
Overworld,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<String> for WorldType {
|
|
||||||
/// Convert config strings to WorldType
|
|
||||||
fn from(string: String) -> Self {
|
|
||||||
match string.as_str() {
|
|
||||||
"END" => WorldType::End,
|
|
||||||
"NETHER" => WorldType::Nether,
|
|
||||||
_ => WorldType::Overworld,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WorldType {
|
|
||||||
pub fn dim_number(&self) -> i8 {
|
|
||||||
match self {
|
|
||||||
WorldType::End => 1,
|
|
||||||
WorldType::Nether => -1,
|
|
||||||
WorldType::Overworld => 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn dim_name(&self) -> String {
|
|
||||||
format!("DIM{}", self.dim_number())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Config for individual world configuration
|
|
||||||
#[derive(Debug, Deserialize, Clone)]
|
|
||||||
pub struct WorldConfig {
|
|
||||||
pub world_name: String,
|
|
||||||
pub save_radius: u64,
|
|
||||||
pub world_type: Option<WorldType>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Config for doing backups
|
|
||||||
#[derive(Debug, Deserialize, Clone)]
|
|
||||||
pub struct BackupConfig {
|
|
||||||
pub minecraft_dir: PathBuf,
|
|
||||||
pub backups_to_keep: usize,
|
|
||||||
pub discord_webhook: Option<String>,
|
|
||||||
pub output_config: FileConfig,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Config for remote_backup backups
|
|
||||||
#[derive(Debug, Deserialize, Clone)]
|
|
||||||
pub struct RemoteBackupConfig {
|
|
||||||
pub backups_to_keep: usize,
|
|
||||||
pub sftp: Option<SFTPConfig>,
|
|
||||||
pub ftp: Option<FTPConfig>,
|
|
||||||
pub file: Option<FileConfig>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Configs
|
|
||||||
#[derive(Debug, Deserialize, Clone)]
|
|
||||||
pub struct AlbatrossConfig {
|
|
||||||
pub backup: BackupConfig,
|
|
||||||
pub world_config: Option<Vec<WorldConfig>>,
|
|
||||||
pub remote: Option<RemoteBackupConfig>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AlbatrossConfig {
|
|
||||||
/// Create new backup from file
|
|
||||||
pub fn new(config_path: &str) -> Result<Self, ConfigError> {
|
|
||||||
let cfg = Config::builder()
|
|
||||||
.add_source(File::with_name(config_path))
|
|
||||||
.build()?;
|
|
||||||
|
|
||||||
cfg.try_deserialize()
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,39 +0,0 @@
|
|||||||
use serde::Deserialize;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
/// SFTP Config
|
|
||||||
#[derive(Debug, Deserialize, Clone)]
|
|
||||||
pub struct SFTPConfig {
|
|
||||||
/// Remote server address
|
|
||||||
pub server_addr: String,
|
|
||||||
/// Remote output directory
|
|
||||||
pub remote_dir: PathBuf,
|
|
||||||
/// Remote server username
|
|
||||||
pub username: String,
|
|
||||||
/// Public key for key auth
|
|
||||||
pub public_key: Option<PathBuf>,
|
|
||||||
/// Private key for key auth
|
|
||||||
pub private_key: Option<PathBuf>,
|
|
||||||
/// Password if using password auth
|
|
||||||
pub password: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// FTP Config
|
|
||||||
#[derive(Debug, Deserialize, Clone)]
|
|
||||||
pub struct FTPConfig {
|
|
||||||
/// Remote server address
|
|
||||||
pub server_addr: String,
|
|
||||||
/// Remote output directory
|
|
||||||
pub remote_dir: PathBuf,
|
|
||||||
/// Remote server username
|
|
||||||
pub username: String,
|
|
||||||
/// Password
|
|
||||||
pub password: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// File Config
|
|
||||||
#[derive(Debug, Deserialize, Clone)]
|
|
||||||
pub struct FileConfig {
|
|
||||||
/// Path to backup to
|
|
||||||
pub path: PathBuf,
|
|
||||||
}
|
|
@ -1,6 +1,5 @@
|
|||||||
use crate::config::AlbatrossConfig;
|
use crate::config::AlbatrossConfig;
|
||||||
use discord_hooks_rs::DiscordWebhook;
|
use discord_hooks_rs::DiscordWebhook;
|
||||||
use log::{debug, error};
|
|
||||||
|
|
||||||
/// Sends a webhook to Discord if its configured
|
/// Sends a webhook to Discord if its configured
|
||||||
///
|
///
|
||||||
@ -12,13 +11,6 @@ pub fn send_webhook(msg: &str, cfg: &AlbatrossConfig) {
|
|||||||
let json = DiscordWebhook::new().content(msg);
|
let json = DiscordWebhook::new().content(msg);
|
||||||
|
|
||||||
let client = reqwest::blocking::Client::new();
|
let client = reqwest::blocking::Client::new();
|
||||||
match client.post(webhook).json(&json).send() {
|
client.post(webhook).json(&json).send().ok();
|
||||||
Ok(_) => {
|
|
||||||
debug!("Sent webhook with message '{msg}'")
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
error!("Failed to send webhook: '{err:?}'")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
68
src/error.rs
68
src/error.rs
@ -1,68 +0,0 @@
|
|||||||
use crate::region::RegionParseError;
|
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, AlbatrossError>;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum AlbatrossError {
|
|
||||||
FileError(std::io::Error),
|
|
||||||
SSHError(ssh2::Error),
|
|
||||||
ChunkParseError(crate::chunk_coordinate::ChunkCoordinateErr),
|
|
||||||
RegionParseError(RegionParseError),
|
|
||||||
ChronoParseError(chrono::ParseError),
|
|
||||||
NoSSHAuth,
|
|
||||||
FTPError(ftp::FtpError),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for AlbatrossError {}
|
|
||||||
|
|
||||||
impl std::fmt::Display for AlbatrossError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
AlbatrossError::FileError(e) => write!(f, "File I/O error: {e}"),
|
|
||||||
AlbatrossError::SSHError(e) => write!(f, "SSH error: {e}"),
|
|
||||||
AlbatrossError::ChunkParseError(e) => {
|
|
||||||
write!(f, "Unable to parse chunk coordinate: {e}")
|
|
||||||
}
|
|
||||||
AlbatrossError::RegionParseError(e) => write!(f, "Unable to parse region name: {e}"),
|
|
||||||
AlbatrossError::ChronoParseError(e) => write!(f, "Unable to parse time: {e}"),
|
|
||||||
AlbatrossError::NoSSHAuth => write!(f, "No SSH auth methods provided in the config"),
|
|
||||||
AlbatrossError::FTPError(e) => write!(f, "FTP error: {e}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<std::io::Error> for AlbatrossError {
|
|
||||||
fn from(e: std::io::Error) -> Self {
|
|
||||||
AlbatrossError::FileError(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<ssh2::Error> for AlbatrossError {
|
|
||||||
fn from(e: ssh2::Error) -> Self {
|
|
||||||
AlbatrossError::SSHError(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<crate::chunk_coordinate::ChunkCoordinateErr> for AlbatrossError {
|
|
||||||
fn from(e: crate::chunk_coordinate::ChunkCoordinateErr) -> Self {
|
|
||||||
AlbatrossError::ChunkParseError(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<crate::region::RegionParseError> for AlbatrossError {
|
|
||||||
fn from(e: RegionParseError) -> Self {
|
|
||||||
AlbatrossError::RegionParseError(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<chrono::ParseError> for AlbatrossError {
|
|
||||||
fn from(e: chrono::ParseError) -> Self {
|
|
||||||
AlbatrossError::ChronoParseError(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<ftp::FtpError> for AlbatrossError {
|
|
||||||
fn from(e: ftp::FtpError) -> Self {
|
|
||||||
AlbatrossError::FTPError(e)
|
|
||||||
}
|
|
||||||
}
|
|
34
src/main.rs
34
src/main.rs
@ -1,4 +1,3 @@
|
|||||||
use log::{LevelFilter, info};
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use structopt::StructOpt;
|
use structopt::StructOpt;
|
||||||
|
|
||||||
@ -6,9 +5,7 @@ mod backup;
|
|||||||
mod chunk_coordinate;
|
mod chunk_coordinate;
|
||||||
mod config;
|
mod config;
|
||||||
mod discord;
|
mod discord;
|
||||||
mod error;
|
|
||||||
mod region;
|
mod region;
|
||||||
mod remote;
|
|
||||||
mod restore;
|
mod restore;
|
||||||
|
|
||||||
use crate::backup::{convert_backup_to_sp, do_backup};
|
use crate::backup::{convert_backup_to_sp, do_backup};
|
||||||
@ -64,33 +61,28 @@ enum SubCommand {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
env_logger::builder()
|
|
||||||
.filter_level(LevelFilter::Info)
|
|
||||||
.parse_default_env()
|
|
||||||
.init();
|
|
||||||
|
|
||||||
let opt = Albatross::from_args();
|
let opt = Albatross::from_args();
|
||||||
|
|
||||||
let cfg = AlbatrossConfig::new(opt.config_path.into_os_string().to_str().unwrap())
|
let cfg = AlbatrossConfig::new(opt.config_path.into_os_string().to_str().unwrap())
|
||||||
.expect("Config error");
|
.expect("Config not found");
|
||||||
|
|
||||||
if cfg.world_config.is_some() {
|
if cfg.world_config.is_some() {
|
||||||
match opt.sub_command {
|
match opt.sub_command {
|
||||||
SubCommand::Backup { output } => {
|
SubCommand::Backup { output } => {
|
||||||
info!("Starting backup");
|
println!("Starting backup");
|
||||||
match do_backup(cfg, output) {
|
match do_backup(cfg, output) {
|
||||||
Ok(_) => info!("Backup complete!"),
|
Ok(_) => println!("Backup complete!"),
|
||||||
Err(e) => info!("Error doing backup: {e:?}"),
|
Err(e) => println!("Error doing backup: {:?}", e),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
SubCommand::Export {
|
SubCommand::Export {
|
||||||
input_backup,
|
input_backup,
|
||||||
output,
|
output,
|
||||||
} => {
|
} => {
|
||||||
info!("Starting export");
|
println!("Starting export");
|
||||||
match convert_backup_to_sp(&cfg, &input_backup, &output) {
|
match convert_backup_to_sp(&cfg, &input_backup, &output) {
|
||||||
Ok(_) => info!("Export complete!"),
|
Ok(_) => println!("Export complete!"),
|
||||||
Err(e) => info!("Error exporting backup: {e:?}"),
|
Err(e) => println!("Error exporting backup: {:?}", e),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
SubCommand::Restore {
|
SubCommand::Restore {
|
||||||
@ -100,7 +92,7 @@ fn main() {
|
|||||||
chunk,
|
chunk,
|
||||||
upper_bound,
|
upper_bound,
|
||||||
} => {
|
} => {
|
||||||
info!("Starting restore");
|
println!("Starting restore");
|
||||||
|
|
||||||
let server_directory = match server_directory {
|
let server_directory = match server_directory {
|
||||||
Some(dir) => dir,
|
Some(dir) => dir,
|
||||||
@ -115,8 +107,8 @@ fn main() {
|
|||||||
&backup_path,
|
&backup_path,
|
||||||
&server_directory,
|
&server_directory,
|
||||||
) {
|
) {
|
||||||
Ok(count) => info!("Restored {count} chunks!"),
|
Ok(count) => println!("Restored {} chunks!", count),
|
||||||
Err(e) => info!("Error restoring backup: {e:?}"),
|
Err(e) => println!("Error restoring backup: {:?}", e),
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
match restore_chunk_from_backup(
|
match restore_chunk_from_backup(
|
||||||
@ -125,13 +117,13 @@ fn main() {
|
|||||||
&backup_path,
|
&backup_path,
|
||||||
&server_directory,
|
&server_directory,
|
||||||
) {
|
) {
|
||||||
Ok(_) => info!("Restored chunk!"),
|
Ok(_) => println!("Restored chunk!"),
|
||||||
Err(e) => info!("Error restoring backup: {e:?}"),
|
Err(e) => println!("Error restoring backup: {:?}", e),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
info!("No worlds specified in config file!")
|
println!("No worlds specified in config file!")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,18 +1,19 @@
|
|||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use std::num::ParseIntError;
|
use std::error::Error;
|
||||||
use thiserror::Error;
|
use std::fmt;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Error)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum RegionParseError {
|
pub struct RegionParseError;
|
||||||
#[error("Regex Error '{0}'")]
|
|
||||||
RegexError(#[from] regex::Error),
|
impl fmt::Display for RegionParseError {
|
||||||
#[error("Int parse error '{0}'")]
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
IntParseError(#[from] ParseIntError),
|
write!(f, "Unable to parse region file name")
|
||||||
#[error("Cannot parse region file name '{0}'")]
|
}
|
||||||
RegionNameParseFailure(String),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Error for RegionParseError {}
|
||||||
|
|
||||||
/// Struct to store information about the region
|
/// Struct to store information about the region
|
||||||
pub struct Region {
|
pub struct Region {
|
||||||
/// x position of the region
|
/// x position of the region
|
||||||
@ -26,41 +27,16 @@ impl TryFrom<String> for Region {
|
|||||||
|
|
||||||
/// Try from string
|
/// Try from string
|
||||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
fn try_from(value: String) -> Result<Self, Self::Error> {
|
||||||
let re = Regex::new(r"r\.(?P<x>-?[0-9]*)+\.(?P<y>-?[0-9]*)")?;
|
let re = Regex::new(r"r\.(?P<x>-?[0-9]*)+\.(?P<y>-?[0-9]*)").unwrap();
|
||||||
if re.is_match(&value) {
|
if re.is_match(&value) {
|
||||||
let captures = re.captures(value.as_str()).unwrap();
|
let captures = re.captures(value.as_str()).unwrap();
|
||||||
|
|
||||||
return Ok(Region {
|
return Ok(Region {
|
||||||
x: captures["x"].parse::<i64>()?,
|
x: captures["x"].parse::<i64>().unwrap(),
|
||||||
y: captures["y"].parse::<i64>()?,
|
y: captures["y"].parse::<i64>().unwrap(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(RegionParseError::RegionNameParseFailure(value))
|
Err(RegionParseError)
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use crate::region::Region;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_success() {
|
|
||||||
let region_x = 5;
|
|
||||||
let region_y = -15;
|
|
||||||
let region_string = format!("r.{region_x}.{region_y}");
|
|
||||||
|
|
||||||
let region = Region::try_from(region_string).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(region.x, region_x);
|
|
||||||
assert_eq!(region.y, region_y);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_failure() {
|
|
||||||
let region_y = -15;
|
|
||||||
let region_string = format!("r.pb.{region_y}");
|
|
||||||
|
|
||||||
assert!(Region::try_from(region_string).is_err());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,47 +0,0 @@
|
|||||||
use crate::config::remote::FileConfig;
|
|
||||||
use crate::error::Result;
|
|
||||||
use crate::remote::{PathLocation, RemoteBackupSite};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
pub struct FileBackup {
|
|
||||||
/// Target directory on the file system
|
|
||||||
target_dir: PathBuf,
|
|
||||||
/// Number of backups to keep
|
|
||||||
backups_to_keep: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileBackup {
|
|
||||||
/// New FileBackup
|
|
||||||
pub fn new(config: &FileConfig, backups_to_keep: usize) -> Result<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
target_dir: config.path.clone(),
|
|
||||||
backups_to_keep,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RemoteBackupSite for FileBackup {
|
|
||||||
type FileType = PathLocation;
|
|
||||||
|
|
||||||
fn backup_to_remote(&mut self, file: PathBuf) -> Result<()> {
|
|
||||||
let dest = self.target_dir.join(file.file_name().unwrap());
|
|
||||||
std::fs::copy(file, dest)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_backups(&mut self) -> Result<Vec<Self::FileType>> {
|
|
||||||
Ok(self
|
|
||||||
.target_dir
|
|
||||||
.read_dir()?
|
|
||||||
.filter_map(|file| Self::FileType::new(file.unwrap().path()))
|
|
||||||
.collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn remove_backup(&mut self, backup: Self::FileType) -> Result<()> {
|
|
||||||
Ok(std::fs::remove_file(backup.location)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn backups_to_keep(&self) -> usize {
|
|
||||||
self.backups_to_keep
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,67 +0,0 @@
|
|||||||
use ftp::FtpStream;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use crate::config::remote::FTPConfig;
|
|
||||||
use crate::error;
|
|
||||||
use crate::remote::{PathLocation, RemoteBackupSite};
|
|
||||||
|
|
||||||
/// FTP Remote Site
|
|
||||||
pub struct FTPBackup {
|
|
||||||
/// FTP command stream
|
|
||||||
stream: FtpStream,
|
|
||||||
/// Remote target directory
|
|
||||||
target_dir: PathBuf,
|
|
||||||
/// Number of backups to keep
|
|
||||||
backups_to_keep: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FTPBackup {
|
|
||||||
/// New FTPBackup
|
|
||||||
pub fn new(config: &FTPConfig, backups_to_keep: usize) -> error::Result<Self> {
|
|
||||||
let mut ftp_stream = FtpStream::connect(&config.server_addr)?;
|
|
||||||
|
|
||||||
ftp_stream.login(&config.username, &config.password)?;
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
stream: ftp_stream,
|
|
||||||
target_dir: config.remote_dir.clone(),
|
|
||||||
backups_to_keep,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for FTPBackup {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
self.stream.quit().ok();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RemoteBackupSite for FTPBackup {
|
|
||||||
type FileType = PathLocation;
|
|
||||||
|
|
||||||
fn backup_to_remote(&mut self, file: PathBuf) -> error::Result<()> {
|
|
||||||
let mut local_file = std::fs::File::open(&file)?;
|
|
||||||
let location = self.target_dir.join(file.file_name().unwrap());
|
|
||||||
|
|
||||||
self.stream
|
|
||||||
.put(location.to_str().unwrap(), &mut local_file)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_backups(&mut self) -> error::Result<Vec<Self::FileType>> {
|
|
||||||
let files = self.stream.nlst(Some(self.target_dir.to_str().unwrap()))?;
|
|
||||||
Ok(files
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|file| Self::FileType::new(PathBuf::from(file)))
|
|
||||||
.collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn remove_backup(&mut self, backup: Self::FileType) -> error::Result<()> {
|
|
||||||
Ok(self.stream.rm(backup.location.to_str().unwrap())?)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn backups_to_keep(&self) -> usize {
|
|
||||||
self.backups_to_keep
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,104 +0,0 @@
|
|||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use chrono::NaiveDateTime;
|
|
||||||
|
|
||||||
use crate::error::Result;
|
|
||||||
|
|
||||||
pub mod file;
|
|
||||||
pub mod ftp;
|
|
||||||
pub mod sftp;
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub trait RemoteBackupFile {
|
|
||||||
/// Type containing the location of the remote_backup backup
|
|
||||||
type LocationType;
|
|
||||||
|
|
||||||
/// Get the underlying location type
|
|
||||||
fn location(&self) -> Self::LocationType;
|
|
||||||
|
|
||||||
/// Get the time the remote_backup file was created
|
|
||||||
fn time_created(&self) -> NaiveDateTime;
|
|
||||||
|
|
||||||
/// Parse the time created from the file name
|
|
||||||
fn parse_file_name(file_name: &str) -> Option<NaiveDateTime> {
|
|
||||||
let time: Vec<&str> = file_name.split("_backup.tar.gz").collect();
|
|
||||||
|
|
||||||
if let Some(time_str) = time.first() {
|
|
||||||
NaiveDateTime::parse_from_str(time_str, "%d-%m-%y_%H.%M.%S").ok()
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait RemoteBackupSite {
|
|
||||||
/// Struct representing the location of a backup on the site
|
|
||||||
type FileType: RemoteBackupFile;
|
|
||||||
|
|
||||||
/// Backup a file to the remote_backup site
|
|
||||||
fn backup_to_remote(&mut self, file: PathBuf) -> Result<()>;
|
|
||||||
|
|
||||||
/// Get the locations backups contained on the remote_backup site
|
|
||||||
fn get_backups(&mut self) -> Result<Vec<Self::FileType>>;
|
|
||||||
|
|
||||||
/// Remove a backup from the side
|
|
||||||
fn remove_backup(&mut self, backup: Self::FileType) -> Result<()>;
|
|
||||||
|
|
||||||
/// Number of backups to keep on the site
|
|
||||||
fn backups_to_keep(&self) -> usize;
|
|
||||||
|
|
||||||
/// Cleanup old backups on the remote_backup site
|
|
||||||
fn cleanup(&mut self) -> Result<usize> {
|
|
||||||
let mut backups = self.get_backups()?;
|
|
||||||
|
|
||||||
backups.sort_by_key(|backup| backup.time_created());
|
|
||||||
|
|
||||||
let mut backups: Vec<Self::FileType> = backups.into_iter().rev().collect();
|
|
||||||
|
|
||||||
let mut removed_count: usize = 0;
|
|
||||||
if backups.len() > self.backups_to_keep() {
|
|
||||||
for _ in 0..(backups.len() - self.backups_to_keep()) {
|
|
||||||
if let Some(backup) = backups.pop() {
|
|
||||||
self.remove_backup(backup)?;
|
|
||||||
removed_count += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(removed_count)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Backup location that can be represented by a path
|
|
||||||
pub struct PathLocation {
|
|
||||||
location: PathBuf,
|
|
||||||
time_created: NaiveDateTime,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PathLocation {
|
|
||||||
/// New PathLocation
|
|
||||||
fn new(path: PathBuf) -> Option<Self> {
|
|
||||||
if let Some(file_name) = path.file_name() {
|
|
||||||
let file_name = file_name.to_str().unwrap();
|
|
||||||
|
|
||||||
Self::parse_file_name(file_name).map(|time| Self {
|
|
||||||
location: path,
|
|
||||||
time_created: time,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RemoteBackupFile for PathLocation {
|
|
||||||
type LocationType = PathBuf;
|
|
||||||
|
|
||||||
fn location(&self) -> Self::LocationType {
|
|
||||||
self.location.to_path_buf()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn time_created(&self) -> NaiveDateTime {
|
|
||||||
self.time_created
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,78 +0,0 @@
|
|||||||
use std::net::TcpStream;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use ssh2::Session;
|
|
||||||
|
|
||||||
use crate::config::remote::SFTPConfig;
|
|
||||||
use crate::error;
|
|
||||||
use crate::error::AlbatrossError;
|
|
||||||
use crate::remote::{PathLocation, RemoteBackupSite};
|
|
||||||
|
|
||||||
/// SFTP Remote Site
|
|
||||||
pub struct SFTPBackup {
|
|
||||||
/// SSH Session
|
|
||||||
session: Session,
|
|
||||||
/// Remote target directory
|
|
||||||
target_dir: PathBuf,
|
|
||||||
/// Number of backups to keep
|
|
||||||
backups_to_keep: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SFTPBackup {
|
|
||||||
/// New SFTPBackup
|
|
||||||
pub fn new(config: &SFTPConfig, backups_to_keep: usize) -> error::Result<Self> {
|
|
||||||
let tcp = TcpStream::connect(&config.server_addr)?;
|
|
||||||
let mut sess = Session::new()?;
|
|
||||||
sess.set_tcp_stream(tcp);
|
|
||||||
sess.handshake().unwrap();
|
|
||||||
|
|
||||||
if let Some(password) = &config.password {
|
|
||||||
sess.userauth_password(&config.username, password)?;
|
|
||||||
} else if let Some(key) = &config.private_key {
|
|
||||||
let public_key = config.public_key.as_deref();
|
|
||||||
sess.userauth_pubkey_file(&config.username, public_key, key, None)?;
|
|
||||||
} else {
|
|
||||||
return Err(AlbatrossError::NoSSHAuth);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
session: sess,
|
|
||||||
target_dir: config.remote_dir.clone(),
|
|
||||||
backups_to_keep,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RemoteBackupSite for SFTPBackup {
|
|
||||||
type FileType = PathLocation;
|
|
||||||
|
|
||||||
fn backup_to_remote(&mut self, file: PathBuf) -> error::Result<()> {
|
|
||||||
let remote_path = self.target_dir.join(file.file_name().unwrap());
|
|
||||||
|
|
||||||
let mut local_file = std::fs::File::open(&file)?;
|
|
||||||
|
|
||||||
let sftp = self.session.sftp()?;
|
|
||||||
|
|
||||||
let mut remote_file = sftp.create(&remote_path)?;
|
|
||||||
|
|
||||||
std::io::copy(&mut local_file, &mut remote_file)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_backups(&mut self) -> error::Result<Vec<Self::FileType>> {
|
|
||||||
let files = self.session.sftp()?.readdir(&self.target_dir)?;
|
|
||||||
Ok(files
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|(file, _)| Self::FileType::new(file))
|
|
||||||
.collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn remove_backup(&mut self, backup: Self::FileType) -> error::Result<()> {
|
|
||||||
Ok(self.session.sftp()?.unlink(&backup.location)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn backups_to_keep(&self) -> usize {
|
|
||||||
self.backups_to_keep
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,10 +1,9 @@
|
|||||||
use crate::backup::uncompress_backup;
|
use crate::backup::uncompress_backup;
|
||||||
use crate::chunk_coordinate::ChunkCoordinate;
|
use crate::chunk_coordinate::ChunkCoordinate;
|
||||||
use crate::error::Result;
|
use anvil_region::AnvilChunkProvider;
|
||||||
use anvil_region::position::{RegionChunkPosition, RegionPosition};
|
use std::error;
|
||||||
use anvil_region::provider::{FolderRegionProvider, RegionProvider};
|
|
||||||
use std::fs::remove_dir_all;
|
use std::fs::remove_dir_all;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::PathBuf;
|
||||||
|
|
||||||
/// Struct for manipulating a world from a backup
|
/// Struct for manipulating a world from a backup
|
||||||
struct RestoreAccess {
|
struct RestoreAccess {
|
||||||
@ -16,7 +15,11 @@ struct RestoreAccess {
|
|||||||
|
|
||||||
impl RestoreAccess {
|
impl RestoreAccess {
|
||||||
/// Create new RestoreAccess
|
/// Create new RestoreAccess
|
||||||
pub fn new(world_name: &str, src_path: &Path, dest_path: &Path) -> Result<Self> {
|
pub fn new(
|
||||||
|
world_name: &str,
|
||||||
|
src_path: &PathBuf,
|
||||||
|
dest_path: &PathBuf,
|
||||||
|
) -> Result<Self, std::io::Error> {
|
||||||
let src_path = uncompress_backup(src_path)?.join(world_name).join("region");
|
let src_path = uncompress_backup(src_path)?.join(world_name).join("region");
|
||||||
let dest_path = dest_path.join(world_name).join("region");
|
let dest_path = dest_path.join(world_name).join("region");
|
||||||
|
|
||||||
@ -26,29 +29,20 @@ impl RestoreAccess {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Copy chunk from source to destination
|
/// Copy chunk from source to desination
|
||||||
pub fn copy_chunk(&self, x: i32, z: i32) {
|
pub fn copy_chunk(&self, x: i32, z: i32) {
|
||||||
let region_position = RegionPosition::from_chunk_position(x, z);
|
let src_provider = AnvilChunkProvider::new(self.src_path.to_str().unwrap());
|
||||||
let region_chunk_position = RegionChunkPosition::from_chunk_position(x, z);
|
let dest_provider = AnvilChunkProvider::new(self.dest_path.to_str().unwrap());
|
||||||
|
|
||||||
let src_provider = FolderRegionProvider::new(self.src_path.to_str().unwrap());
|
let chunk = src_provider.load_chunk(x, z).expect("Unable to load chunk");
|
||||||
let dest_provider = FolderRegionProvider::new(self.dest_path.to_str().unwrap());
|
dest_provider
|
||||||
|
.save_chunk(x, z, chunk)
|
||||||
let mut src_region = src_provider.get_region(region_position).unwrap();
|
.expect("Unable to save chunk");
|
||||||
let src_chunk_compound_tag = src_region
|
|
||||||
.read_chunk(region_chunk_position)
|
|
||||||
.expect("Unable to load chunk");
|
|
||||||
|
|
||||||
let mut dst_region = dest_provider.get_region(region_position).unwrap();
|
|
||||||
|
|
||||||
dst_region
|
|
||||||
.write_chunk(region_chunk_position, src_chunk_compound_tag)
|
|
||||||
.expect("Unable to write chunk");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Cleanup process
|
/// Cleanup process
|
||||||
pub fn cleanup(self) -> Result<()> {
|
pub fn cleanup(self) -> Result<(), std::io::Error> {
|
||||||
Ok(remove_dir_all("tmp")?)
|
remove_dir_all("tmp")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -57,9 +51,9 @@ pub fn restore_range_from_backup(
|
|||||||
world_name: &str,
|
world_name: &str,
|
||||||
lower: ChunkCoordinate,
|
lower: ChunkCoordinate,
|
||||||
upper: ChunkCoordinate,
|
upper: ChunkCoordinate,
|
||||||
backup_path: &Path,
|
backup_path: &PathBuf,
|
||||||
minecraft_dir: &Path,
|
minecraft_dir: &PathBuf,
|
||||||
) -> Result<u64> {
|
) -> Result<u64, Box<dyn error::Error>> {
|
||||||
let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?;
|
let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?;
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
|
|
||||||
@ -78,9 +72,9 @@ pub fn restore_range_from_backup(
|
|||||||
pub fn restore_chunk_from_backup(
|
pub fn restore_chunk_from_backup(
|
||||||
world_name: &str,
|
world_name: &str,
|
||||||
chunk: ChunkCoordinate,
|
chunk: ChunkCoordinate,
|
||||||
backup_path: &Path,
|
backup_path: &PathBuf,
|
||||||
minecraft_dir: &Path,
|
minecraft_dir: &PathBuf,
|
||||||
) -> Result<()> {
|
) -> Result<(), Box<dyn error::Error>> {
|
||||||
let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?;
|
let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?;
|
||||||
chunk_access.copy_chunk(chunk.x, chunk.z);
|
chunk_access.copy_chunk(chunk.x, chunk.z);
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user