diff --git a/Cargo.lock b/Cargo.lock index 2741fa9..22d8905 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -156,7 +156,7 @@ dependencies = [ [[package]] name = "discord-backup-util" -version = "0.2.3" +version = "0.3.0" dependencies = [ "minreq", "rand", diff --git a/Cargo.toml b/Cargo.toml index 500fd08..4199a38 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "discord-backup-util" -version = "0.2.3" +version = "0.3.0" authors = ["buj"] license = "AGPL-3.0-or-later" description = "A tiny tool to backup stuff to Discord" diff --git a/backup_config b/backup_config index a5ffc7d..cc8c0cf 100644 --- a/backup_config +++ b/backup_config @@ -2,7 +2,10 @@ webhook every 6 hours #password noaccesslol +# Line below will work until Discord lowers the limit again +#block-size 25 + # Script below is being executed in a temporary directory that is then zipped and sent in chunks #!/bin/bash -echo "Hello, world!" > test-file \ No newline at end of file +echo "Hello, world!" > test-file diff --git a/src/config.rs b/src/config.rs index d958d86..f9dc068 100644 --- a/src/config.rs +++ b/src/config.rs @@ -11,6 +11,7 @@ pub struct Config { pub delay: Duration, pub password: Option, pub compression_level: i64, + pub block_size: u8, } struct TimeColumn { @@ -93,6 +94,7 @@ pub fn parse_args() -> Config { let mut delay = None; let mut password = None; let mut compression = None; + let mut block_size = None; while let Some(x) = lines.peek() { let x = x.trim(); @@ -131,6 +133,19 @@ pub fn parse_args() -> Config { } } + if x.starts_with("block-size ") { + if let Ok(value) = x.split_once(' ').unwrap().1.parse::() { + if block_size.replace(value).is_some() { + eprintln!("{exe}: cannot set multiple block sizes"); + exit(-1); + } + continue; + } else { + eprintln!("{exe}: invalid block size"); + exit(-1); + } + } + if x.starts_with("webhook ") { if webhook .replace(Webhook::new(x.split_once(' ').unwrap().1.to_string())) @@ -235,6 +250,7 @@ pub fn parse_args() -> Config { } }, compression_level: compression.unwrap_or(10), + block_size: block_size.unwrap_or(10), shell, script, password, diff --git a/src/hook.rs b/src/hook.rs index ca27627..ec73917 100644 --- a/src/hook.rs +++ b/src/hook.rs @@ -64,15 +64,6 @@ impl Message { std::thread::sleep(Duration::from_secs(10)); } } - - pub fn reply( - &self, - hook: &Webhook, - message: impl Fn(MessageBuilder) -> MessageBuilder, - logger: &mut L, - ) -> Message { - hook.send(message, logger) - } } struct ApiMessage { @@ -86,6 +77,10 @@ impl Webhook { Self(url) } + pub fn url(&self) -> &str { + &self.0 + } + /// Send a message. /// /// Will try indefinitely until success. diff --git a/src/upload.rs b/src/upload.rs index 8891ccd..06402b4 100644 --- a/src/upload.rs +++ b/src/upload.rs @@ -1,13 +1,69 @@ use std::{ fs::{self, File}, io::{Read, Write}, + os::unix::fs::MetadataExt, path::PathBuf, process::{Command, Stdio}, + rc::Rc, + sync::{atomic::AtomicU64, Mutex}, }; use zip::{write::FileOptions, ZipWriter}; -use crate::{config::Config, log::Logger, temp::temp_path, Defer}; +use crate::{ + config::Config, + hook::{Message, Webhook}, + log::Logger, + temp::temp_path, + Defer, +}; + +fn upload_chunked( + block_size: u8, + webhook: &Webhook, + mut file: impl Read, + name: impl Fn(usize) -> String, + uploaded: impl Fn(Message, usize) -> std::io::Result<()>, + log: &mut impl Logger, +) -> std::io::Result { + let chunk_size: usize = 1000 * 1000 * block_size as usize; + let mut buffer = vec![0u8; chunk_size]; + let mut i = 0; + + loop { + let mut ptr = 0usize; + + let mut end = false; + + while ptr < chunk_size { + match file.read(&mut buffer[ptr..]) { + Ok(len) => { + ptr += len; + if len == 0 { + end = true; + break; + } + } + Err(why) => { + log.error(&format!("Failed to upload artifact: {why}")); + return Err(why); + } + } + } + + if ptr == chunk_size || end { + uploaded( + webhook.send(|x| x.file(name(i), buffer[0..ptr].to_vec()), log), + i, + )?; + if end { + break Ok(i); + } + } + + i += 1; + } +} pub fn upload<'a, L: Logger>(config: &'a Config, log: &'a mut L) { log.info("Trying to initiate a backup..."); @@ -182,7 +238,15 @@ pub fn upload<'a, L: Logger>(config: &'a Config, log: &'a mut L) { drop(script); - let mut file = match File::open(&*archive) { + if let Err(why) = zip.finish() { + log.error(&format!("Failed to contruct a zip archive: {why}")); + head.edit(&config.webhook, "Failed to finalize a zip archive", log); + return; + } + + drop(dir); + + let file = match File::open(&*archive) { Ok(x) => x, Err(why) => { log.error(&format!("Failed to open temporary file: {why}")); @@ -191,49 +255,166 @@ pub fn upload<'a, L: Logger>(config: &'a Config, log: &'a mut L) { } }; - if let Err(why) = zip.finish() { - log.error(&format!("Failed to contruct a zip archive: {why}")); - head.edit(&config.webhook, "Failed to finalize a zip archive", log); + let volumes = ["B", "KiB", "MiB", "GiB", "TiB"]; + match file.metadata() { + Ok(x) => { + let mut volume = 0; + let mut size = x.size() as f32; + while size >= 1024.0 && volume < volumes.len() - 1 { + size /= 1024.0; + volume += 1; + } + log.info(&format!( + "Final archive size: {size:0.3}{}", + volumes[volume] + )); + } + Err(why) => { + log.error(&format!("Failed to fetch file metadata: {why}")); + head.edit(&config.webhook, "Failed to fetch file metadata", log); + return; + } + } + + head.edit(&config.webhook, "Publishing artifact...", log); + + let delete_file = |x: &mut PathBuf| { + let _ = fs::remove_file(x).ok(); + }; + + let mut script_path = Defer::new(temp_path(), delete_file); + let mut script_file = Rc::new(Mutex::new( + match File::options() + .write(true) + .truncate(true) + .create_new(true) + .open(&*script_path) + { + Ok(x) => x, + Err(why) => { + log.error(&format!("Failed to create download script: {why}")); + head.edit(&config.webhook, "Failed to create download script", log); + return; + } + }, + )); + + if let Err(why) = script_file.lock().unwrap() + .write_all(format!(r#"dl(){{ curl -f -L "$(curl -f -L "{}/messages/$1"|grep -Eo '"url":"[^"]+"'|grep -Eo 'https[^"]+')">>dl_backup.zip;if [ ! $? -eq 0 ];then sleep 5;dl "$1";fi }};printf "">dl_backup.zip"#, config.webhook.url()).as_bytes()) + { + log.error(&format!("Failed to create download script: {why}")); + head.edit(&config.webhook, "Failed to create download script", log); return; } - const CHUNK_SIZE: usize = 1000 * 1000 * 25; - let mut buffer = vec![0u8; CHUNK_SIZE]; - let mut chunks = 0u32; + let chunks = match upload_chunked( + config.block_size, + &config.webhook, + file, + |i| format!("chunk_{i}.zip"), + |msg, _| { + script_file + .lock() + .unwrap() + .write_all(format!(";dl {}", msg.id.unwrap()).as_bytes()) + }, + log, + ) { + Ok(x) => x + 1, + Err(why) => { + log.error(&format!("Failed to upload artifact: {why}")); + head.edit(&config.webhook, "Failed to upload artifact", log); + return; + } + }; + + head.edit(&config.webhook, "Uploading download script...", log); + config.webhook.send(|x| x.content(":warning: Do not manually download files below! :warning:\n\nThose are for the download script."), log); - head.edit(&config.webhook, "Publishing artifact...", log); + let mut lol = 0usize; loop { - chunks += 1; - let mut ptr = 0usize; + if let Err(why) = script_file.lock().unwrap().flush() { + log.error(&format!("Failed to upload download script: {why}")); + head.edit(&config.webhook, "Failed to upload download script", log); + return; + } - let mut end = false; + script_file = Rc::new(Mutex::new(match File::open(&*script_path) { + Ok(x) => x, + Err(why) => { + log.error(&format!("Failed to upload download script: {why}")); + head.edit(&config.webhook, "Failed to upload download script", log); + return; + } + })); - while ptr != CHUNK_SIZE { - match file.read(&mut buffer) { - Ok(len) => { - ptr += len; - if len == 0 { - end = true; - break; - } - } + let overflow_path = Defer::new(temp_path(), delete_file); + let overflow_file = Rc::new(Mutex::new( + match File::options() + .write(true) + .truncate(true) + .create_new(true) + .open(&*overflow_path) + { + Ok(x) => x, Err(why) => { - log.error(&format!("Failed to upload artifact: {why}")); - head.edit(&config.webhook, "Upload failed", log); - continue; + log.error(&format!("Failed to upload download script: {why}")); + head.edit(&config.webhook, "Failed to upload download script", log); + return; } + }, + )); + + if let Err(why) = overflow_file.lock().unwrap() + .write_all(format!(r#"TFILE=mktemp;dl(){{ curl -f -L "$(curl -f -L "{}/messages/$1"|grep -Eo '"url":"[^"]+"'|grep -Eo 'https[^"]+')">>$TFILE;if [ ! $? -eq 0 ];then sleep 5;dl "$1";fi }};printf "">$TFILE"#, config.webhook.url()).as_bytes()) + { + log.error(&format!("Failed to upload download script: {why}")); + head.edit(&config.webhook, "Failed to upload download script", log); + return; + } + + let message_id = Rc::new(AtomicU64::default()); + + match upload_chunked( + config.block_size, + &config.webhook, + &mut *script_file.lock().unwrap(), + |i| format!("script_{lol}_{i}.zip"), + |msg, _| { + message_id.store(msg.id.unwrap().get(), std::sync::atomic::Ordering::SeqCst); + overflow_file + .lock() + .unwrap() + .write_all(format!(";dl {}", msg.id.unwrap()).as_bytes()) + }, + log, + ) { + Ok(0) => { + config.webhook.send(|x| x.content(format!("Upload complete!\n\nTo automatically download the backup archive, use the following script:```sh\ncurl -f -L \"$(curl -f -L \"{}/messages/{}\" | grep -Eo '\"url\":\"[^\"]+\"' | grep -Eo 'https[^\"]+')\" | sh -\n```\n\nMake sure `curl` and `grep` are installed.", config.webhook.url(), message_id.load(std::sync::atomic::Ordering::SeqCst))), log); + break; + } + Err(why) => { + log.error(&format!("Failed to upload download script: {why}")); + head.edit(&config.webhook, "Failed to upload download script", log); + return; } + _ => (), } - if ptr == CHUNK_SIZE || end { - head.reply( - &config.webhook, - move |x| x.file(format!("chunk_{chunks}.zip"), buffer[0..ptr].to_vec()), - log, - ); - break; + if let Err(why) = overflow_file + .lock() + .unwrap() + .write_all(r#";sh $TFILE;rm $TFILE"#.as_bytes()) + { + log.error(&format!("Failed to upload download script: {why}")); + head.edit(&config.webhook, "Failed to upload download script", log); + return; } + + script_path = overflow_path; + + lol += 1; } head.edit(&config.webhook, format!("Backup completed successfully.\n\nTo assemble the original archive, download all {chunks} chunks and concatenate them into a single file"), log);