Skip to content

Commit

Permalink
switch to cargo-insta for snapshot testing
Browse files Browse the repository at this point in the history
  • Loading branch information
icewind1991 committed Dec 21, 2023
1 parent d20fbb8 commit 07039da
Show file tree
Hide file tree
Showing 23 changed files with 42,991 additions and 22,877 deletions.
148 changes: 92 additions & 56 deletions Cargo.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ pretty_assertions = "1.3.0"
test-case = "2.2.2"
iai = "0.1.1"
criterion = "0.4.0"
insta = { version = "1.34.0", features = ["json"] }

[profile.release]
lto = true
Expand Down
2 changes: 1 addition & 1 deletion flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@
};

devShells.default = pkgs.mkShell {
nativeBuildInputs = with pkgs; [rust-bin.stable.latest.default bacon cargo-edit cargo-outdated rustfmt clippy cargo-audit hyperfine valgrind];
nativeBuildInputs = with pkgs; [rust-bin.stable.latest.default bacon cargo-edit cargo-outdated rustfmt clippy cargo-audit hyperfine valgrind cargo-insta];
};
});
}
13 changes: 11 additions & 2 deletions src/demo/parser/analyser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -356,6 +356,12 @@ pub struct Analyser {
user_id_map: HashMap<EntityId, UserId>,
}

#[derive(Default, Debug, Serialize, Deserialize, PartialEq)]
pub struct Pause {
from: DemoTick,
to: DemoTick,
}

impl MessageHandler for Analyser {
type Output = MatchState;

Expand Down Expand Up @@ -387,7 +393,10 @@ impl MessageHandler for Analyser {
self.pause_start = Some(tick);
} else {
let start = self.pause_start.unwrap_or_default();
self.state.pauses.push((start, tick))
self.state.pauses.push(Pause {
from: start,
to: tick,
})
}
}
_ => {}
Expand Down Expand Up @@ -498,5 +507,5 @@ pub struct MatchState {
pub rounds: Vec<Round>,
pub start_tick: ServerTick,
pub interval_per_tick: f32,
pub pauses: Vec<(DemoTick, DemoTick)>,
pub pauses: Vec<Pause>,
}
1 change: 0 additions & 1 deletion test_data/comp_message_types.json

This file was deleted.

316 changes: 0 additions & 316 deletions test_data/small_entities.json

This file was deleted.

84 changes: 6 additions & 78 deletions tests/entity.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
use serde::{Deserialize, Serialize};
use serde_repr::{Deserialize_repr, Serialize_repr};
use std::fs::{self, File};
use std::fs;
use test_case::test_case;

use fnv::FnvHashMap;
use std::collections::HashMap;
use std::io::{BufRead, BufReader};
use tf_demo_parser::demo::data::DemoTick;
use tf_demo_parser::demo::message::packetentities::{EntityId, PacketEntity, UpdateType};
use tf_demo_parser::demo::message::Message;
Expand Down Expand Up @@ -134,86 +133,15 @@ impl MessageHandler for EntityDumper {
}
}

#[test_case("test_data/small.dem", "test_data/small_entities.json"; "small.dem")]
fn entity_test(input_file: &str, snapshot_file: &str) {
#[test_case("test_data/small.dem")]
fn entity_test(input_file: &str) {
let file = fs::read(input_file).expect("Unable to read file");
let demo = Demo::new(&file);
let (_, entities) = DemoParser::new_with_analyser(demo.get_stream(), EntityDumper::new())
.parse()
.unwrap();

let json_file = File::open(snapshot_file).expect("Unable to read file");
let mut reader = BufReader::new(json_file);
let mut buffer = String::new();

let mut expected = Vec::with_capacity(128);

while reader.read_line(&mut buffer).expect("failed to read line") > 0 {
let entity: EntityDump =
serde_json::from_str(buffer.trim_end()).expect("failed to parse json");
expected.push(entity);
buffer.clear();
}

pretty_assertions::assert_eq!(expected.len(), entities.len());

let entity_ids: Vec<_> = entities.iter().map(|entity| entity.id).collect();
let expected_ids: Vec<_> = expected.iter().map(|entity| entity.id).collect();

pretty_assertions::assert_eq!(expected_ids, entity_ids);

for (expected_entity, entity) in expected.into_iter().zip(entities.into_iter()) {
pretty_assertions::assert_eq!(
expected_entity.tick,
entity.tick,
"Failed comparing entity {}",
entity.id
);
pretty_assertions::assert_eq!(
expected_entity.id,
entity.id,
"Failed comparing entity {}",
entity.id
);
pretty_assertions::assert_eq!(
expected_entity.server_class,
entity.server_class,
"Failed comparing entity {}",
entity.id
);
pretty_assertions::assert_eq!(
expected_entity.pvs,
entity.pvs,
"Failed comparing entity {}",
entity.id
);
let mut prop_names: Vec<_> = entity.props.keys().collect();
let mut expected_prop_names: Vec<_> = expected_entity.props.keys().collect();
prop_names.sort();
expected_prop_names.sort();

pretty_assertions::assert_eq!(
expected_prop_names,
prop_names,
"Failed comparing entity {}",
entity.id
);

for prop_name in expected_prop_names {
pretty_assertions::assert_eq!(
expected_entity.props.get(prop_name),
entity.props.get(prop_name),
"Failed comparing entity {} prop {}",
entity.id,
prop_name
);
}

pretty_assertions::assert_eq!(
expected_entity,
entity,
"Failed comparing entity {}",
entity.id
);
}
insta::with_settings!({sort_maps =>true}, {
insta::assert_json_snapshot!(entities);
});
}
23 changes: 6 additions & 17 deletions tests/sendprops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::fs;
use test_case::test_case;

use fnv::FnvHashMap;
use std::collections::{HashMap, HashSet};
use std::collections::HashMap;
use tf_demo_parser::demo::packet::datatable::{ParseSendTable, SendTableName, ServerClass};
use tf_demo_parser::demo::parser::MessageHandler;
use tf_demo_parser::demo::sendprop::{SendPropIdentifier, SendPropName};
Expand Down Expand Up @@ -53,8 +53,8 @@ impl MessageHandler for SendPropAnalyser {
}
}

#[test_case("test_data/gully.dem", "test_data/gully_props.json"; "gully.dem")]
fn flatten_test(input_file: &str, snapshot_file: &str) {
#[test_case("test_data/gully.dem")]
fn flatten_test(input_file: &str) {
let file = fs::read(input_file).expect("Unable to read file");
let demo = Demo::new(&file);
let (_, (send_tables, prop_names)) =
Expand All @@ -79,18 +79,7 @@ fn flatten_test(input_file: &str, snapshot_file: &str) {
})
.collect();

let expected: HashMap<SendTableName, Vec<String>> = serde_json::from_slice(
fs::read(snapshot_file)
.expect("Unable to read file")
.as_slice(),
)
.unwrap();

let expected_tables: HashSet<_> = expected.keys().collect();
let actual_tables: HashSet<_> = flat_props.keys().collect();

pretty_assertions::assert_eq!(expected_tables, actual_tables);
for table in expected_tables {
pretty_assertions::assert_eq!(expected[table], flat_props[table]);
}
insta::with_settings!({sort_maps =>true}, {
insta::assert_json_snapshot!(flat_props);
});
}
Loading

0 comments on commit 07039da

Please sign in to comment.