Skip to content

Commit

Permalink
Merge branch 'main' into test_rework
Browse files Browse the repository at this point in the history
  • Loading branch information
BrettMayson committed Oct 20, 2024
2 parents 42d81b9 + 32ccaae commit e8a52e2
Show file tree
Hide file tree
Showing 8 changed files with 774 additions and 21 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,6 @@ hls/hemtt-language-server.exe

# Insta Pending Snpashots
*.snap.new

# Cargo Mutants
mutants.*/
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions libs/pbo/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,6 @@ byteorder = { workspace = true }
indexmap = { workspace = true }
serde = { workspace = true }
sha-1 = { workspace = true }

[dev-dependencies]
insta = { workspace = true }
10 changes: 6 additions & 4 deletions libs/pbo/src/write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use hemtt_common::io::WriteExt;
use indexmap::IndexMap;
use sha1::{Digest, Sha1};

use crate::{error::Error, model::Header, WritePbo};
use crate::{error::Error, model::Header, Checksum, WritePbo};

#[derive(Default)]
/// A PBO file that can be written to
Expand Down Expand Up @@ -133,7 +133,7 @@ impl<I: Seek + Read> WritablePbo<I> {
&mut self,
output: &mut O,
properties: bool,
) -> Result<(), Error> {
) -> Result<Checksum, Error> {
let mut headers: Cursor<Vec<u8>> = Cursor::new(Vec::new());
if properties {
Header::property().write_pbo(&mut headers)?;
Expand Down Expand Up @@ -194,9 +194,11 @@ impl<I: Seek + Read> WritablePbo<I> {
}
}

let checksum = hasher.finalize();
buffered_output.write_all(&[0])?;
buffered_output.write_all(&hasher.finalize())?;
buffered_output.write_all(&checksum)?;
buffered_output.flush()?;

Ok(())
Ok(checksum.to_vec().into())
}
}
80 changes: 75 additions & 5 deletions libs/pbo/tests/ace_weather.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,24 @@ use std::{fs::File, io::Read};
use hemtt_pbo::{Checksum, Mime, WritablePbo};

mod utils;
use sha1::{Digest, Sha1};
use utils::*;

#[allow(clippy::too_many_lines)]
#[test]
fn ace_weather_cba6f72c() {
let checksum = Checksum::from_bytes([
210, 213, 255, 98, 5, 201, 111, 118, 217, 52, 219, 91, 163, 179, 230, 89, 98, 139, 31, 78,
]);
let mut pbo = pbo(
File::open("tests/ace_weather.pbo_cba6f72c").unwrap(),
41,
true,
3,
"cba6f72c",
"z\\ace\\addons\\weather",
Checksum::from_bytes([
210, 213, 255, 98, 5, 201, 111, 118, 217, 52, 219, 91, 163, 179, 230, 89, 98, 139, 31,
78,
]),
checksum,
checksum,
);
header(
pbo.files().first().unwrap(),
Expand All @@ -44,6 +47,39 @@ fn ace_weather_cba6f72c() {
"XEH_preStart.sqf",
"#include \"script_component.hpp\"\r\n\r\n#include \"XEH_PREP.hpp\"\r\n",
);

{
let mut pbo_summary = String::from("# Properties\n");
for ext in pbo.properties() {
pbo_summary.push_str(&format!("{}: {}\n", ext.0, ext.1));
}
pbo_summary.push_str("\n# Files\n");
for file in pbo.files_sorted() {
pbo_summary.push_str(&format!("{}\n", file.filename()));
pbo_summary.push_str(&format!(" mime {}\n", file.mime()));
pbo_summary.push_str(&format!(" original {}\n", file.original()));
pbo_summary.push_str(&format!(" reserved {}\n", file.reserved()));
pbo_summary.push_str(&format!(" timestamp {}\n", file.timestamp()));
pbo_summary.push_str(&format!(" size {}\n", file.size()));
pbo_summary.push_str(&format!(
" offset {:?}\n",
pbo.file_offset(file.filename()).unwrap()
));
pbo_summary.push_str(&format!(" hash {}\n", {
let mut content = pbo.file(file.filename()).unwrap().unwrap();
let mut data = Vec::new();
content.read_to_end(&mut data).unwrap();
let mut hasher = Sha1::new();
hasher.update(data);
let result: Checksum = hasher.finalize().to_vec().into();
result.hex()
}));
}
pbo_summary.push_str("\n# Checksum\n");
pbo_summary.push_str(checksum.hex().as_str());
insta::assert_snapshot!(pbo_summary);
}

let mut new_pbo = WritablePbo::new();
let mut new_files = std::collections::HashMap::new();
for file in pbo.files() {
Expand All @@ -64,13 +100,43 @@ fn ace_weather_cba6f72c() {
new_pbo.add_property(ext.0, ext.1);
}
let mut new_pbo_bin = std::io::Cursor::new(Vec::new());
new_pbo.write(&mut new_pbo_bin, true).unwrap();
let checksum = new_pbo.write(&mut new_pbo_bin, true).unwrap();
let mut old_pbo_bin = std::io::Cursor::new(Vec::new());
File::open("tests/ace_weather.pbo_cba6f72c")
.unwrap()
.read_to_end(old_pbo_bin.get_mut())
.unwrap();
assert_eq!(old_pbo_bin.get_ref(), new_pbo_bin.get_ref());

{
let mut pbo_summary = String::from("# Properties\n");
for ext in new_pbo.properties() {
pbo_summary.push_str(&format!("{}: {}\n", ext.0, ext.1));
}
pbo_summary.push_str("\n# Files\n");
for file in new_pbo.files_sorted() {
pbo_summary.push_str(&format!("{}\n", file.filename()));
pbo_summary.push_str(&format!(" mime {}\n", file.mime()));
pbo_summary.push_str(&format!(" original {}\n", file.original()));
pbo_summary.push_str(&format!(" reserved {}\n", file.reserved()));
pbo_summary.push_str(&format!(" timestamp {}\n", file.timestamp()));
pbo_summary.push_str(&format!(" size {}\n", file.size()));
pbo_summary.push_str(&format!(
" offset {:?}\n",
pbo.file_offset(file.filename()).unwrap()
));
pbo_summary.push_str(&format!(" hash {}\n", {
let t = &new_files[file.filename()];
let mut hasher = Sha1::new();
hasher.update(t);
let result: Checksum = hasher.finalize().to_vec().into();
result.hex()
}));
}
pbo_summary.push_str("\n# Checksum\n");
pbo_summary.push_str(checksum.hex().as_str());
insta::assert_snapshot!(pbo_summary);
}
}

#[test]
Expand All @@ -86,6 +152,10 @@ fn ace_weather_8bd4922f() {
182, 44, 18, 201, 133, 232, 236, 162, 127, 37, 203, 45, 42, 137, 130, 36, 120, 104,
187, 203,
]),
Checksum::from_bytes([
192, 194, 71, 145, 26, 138, 140, 97, 35, 238, 93, 21, 54, 70, 202, 148, 73, 239, 125,
183,
]),
);
header(
pbo.files().first().unwrap(),
Expand Down
Loading

0 comments on commit e8a52e2

Please sign in to comment.