Skip to content

Commit

Permalink
Merge pull request #330 from epage/unordered
Browse files Browse the repository at this point in the history
feat(filter): Unordered match support
  • Loading branch information
epage authored May 23, 2024
2 parents 7269940 + 4134e1a commit 3803df6
Show file tree
Hide file tree
Showing 12 changed files with 2,161 additions and 814 deletions.
10 changes: 8 additions & 2 deletions crates/snapbox/src/assert/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use anstream::stderr;
#[cfg(not(feature = "color"))]
use std::io::stderr;

use crate::filter::{Filter as _, FilterNewlines, FilterPaths, FilterRedactions};
use crate::filter::{Filter as _, FilterNewlines, FilterPaths, NormalizeToExpected};
use crate::IntoData;

pub use action::Action;
Expand Down Expand Up @@ -127,9 +127,15 @@ impl Assert {
if expected.filters.is_newlines_set() {
actual = FilterNewlines.filter(actual);
}

let mut normalize = NormalizeToExpected::new();
if expected.filters.is_redaction_set() {
actual = FilterRedactions::new(&self.substitutions, &expected).filter(actual);
normalize = normalize.redact_with(&self.substitutions);
}
if expected.filters.is_unordered_set() {
normalize = normalize.unordered();
}
actual = normalize.normalize(actual, &expected);

(actual, expected)
}
Expand Down
10 changes: 10 additions & 0 deletions crates/snapbox/src/data/filters.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,11 @@ impl FilterSet {
self
}

pub(crate) fn unordered(mut self) -> Self {
self.set(Self::UNORDERED);
self
}

pub(crate) const fn is_redaction_set(&self) -> bool {
self.is_set(Self::REDACTIONS)
}
Expand All @@ -38,12 +43,17 @@ impl FilterSet {
pub(crate) const fn is_paths_set(&self) -> bool {
self.is_set(Self::PATHS)
}

pub(crate) const fn is_unordered_set(&self) -> bool {
self.is_set(Self::UNORDERED)
}
}

impl FilterSet {
const REDACTIONS: usize = 1 << 0;
const NEWLINES: usize = 1 << 1;
const PATHS: usize = 1 << 2;
const UNORDERED: usize = 1 << 3;

fn set(&mut self, flag: usize) -> &mut Self {
self.flags |= flag;
Expand Down
29 changes: 29 additions & 0 deletions crates/snapbox/src/data/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,29 @@ pub trait IntoData: Sized {
self.into_data().raw()
}

/// Treat lines and json arrays as unordered
///
/// # Examples
///
/// ```rust
/// # #[cfg(feature = "json")] {
/// use snapbox::prelude::*;
/// use snapbox::str;
/// use snapbox::assert_data_eq;
///
/// let actual = str![[r#"["world", "hello"]"#]]
/// .is(snapbox::data::DataFormat::Json)
/// .unordered();
/// let expected = str![[r#"["hello", "world"]"#]]
/// .is(snapbox::data::DataFormat::Json)
/// .unordered();
/// assert_data_eq!(actual, expected);
/// # }
/// ```
fn unordered(self) -> Data {
self.into_data().unordered()
}

/// Initialize as [`format`][DataFormat] or [`Error`][DataFormat::Error]
///
/// This is generally used for `expected` data
Expand Down Expand Up @@ -301,6 +324,12 @@ impl Data {
self.filters = FilterSet::empty().newlines();
self
}

/// Treat lines and json arrays as unordered
pub fn unordered(mut self) -> Self {
self.filters = FilterSet::empty().unordered();
self
}
}

/// # Assertion frameworks operations
Expand Down
7 changes: 4 additions & 3 deletions crates/snapbox/src/dir/diff.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#[cfg(feature = "dir")]
use crate::filter::{Filter as _, FilterNewlines, FilterPaths, FilterRedactions};
use crate::filter::{Filter as _, FilterNewlines, FilterPaths, NormalizeToExpected};

#[derive(Clone, Debug, PartialEq, Eq)]
pub enum PathDiff {
Expand Down Expand Up @@ -160,8 +160,9 @@ impl PathDiff {
if normalize_paths {
actual = FilterPaths.filter(actual);
}
actual = FilterRedactions::new(substitutions, &expected)
.filter(FilterNewlines.filter(actual));
actual = NormalizeToExpected::new()
.redact_with(substitutions)
.normalize(FilterNewlines.filter(actual), &expected);

if expected != actual {
return Err(Self::ContentMismatch {
Expand Down
129 changes: 20 additions & 109 deletions crates/snapbox/src/filter/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,19 @@
//! - Making snapshots consistent across platforms or conditional compilation
//! - Focusing snapshots on the characteristics of the data being tested
mod pattern;
mod redactions;
#[cfg(test)]
mod test;
#[cfg(test)]
mod test_redactions;
#[cfg(test)]
mod test_unordered_redactions;

use crate::data::DataInner;
use crate::Data;

pub use pattern::NormalizeToExpected;
pub use redactions::RedactedValue;
pub use redactions::Redactions;

Expand All @@ -33,13 +39,13 @@ impl Filter for FilterNewlines {
#[cfg(feature = "json")]
DataInner::Json(value) => {
let mut value = value;
normalize_json_string(&mut value, normalize_lines);
normalize_json_string(&mut value, &normalize_lines);
DataInner::Json(value)
}
#[cfg(feature = "json")]
DataInner::JsonLines(value) => {
let mut value = value;
normalize_json_string(&mut value, normalize_lines);
normalize_json_string(&mut value, &normalize_lines);
DataInner::JsonLines(value)
}
#[cfg(feature = "term-svg")]
Expand Down Expand Up @@ -80,13 +86,13 @@ impl Filter for FilterPaths {
#[cfg(feature = "json")]
DataInner::Json(value) => {
let mut value = value;
normalize_json_string(&mut value, normalize_paths);
normalize_json_string(&mut value, &normalize_paths);
DataInner::Json(value)
}
#[cfg(feature = "json")]
DataInner::JsonLines(value) => {
let mut value = value;
normalize_json_string(&mut value, normalize_paths);
normalize_json_string(&mut value, &normalize_paths);
DataInner::JsonLines(value)
}
#[cfg(feature = "term-svg")]
Expand Down Expand Up @@ -117,59 +123,36 @@ fn normalize_paths_chars(data: impl Iterator<Item = char>) -> impl Iterator<Item
data.map(|c| if c == '\\' { '/' } else { c })
}

pub struct FilterRedactions<'a> {
substitutions: &'a crate::Redactions,
pattern: &'a Data,
}

impl<'a> FilterRedactions<'a> {
pub fn new(substitutions: &'a crate::Redactions, pattern: &'a Data) -> Self {
FilterRedactions {
substitutions,
pattern,
}
}
struct NormalizeRedactions<'r> {
redactions: &'r Redactions,
}

impl Filter for FilterRedactions<'_> {
impl Filter for NormalizeRedactions<'_> {
fn filter(&self, data: Data) -> Data {
let source = data.source;
let filters = data.filters;
let inner = match data.inner {
DataInner::Error(err) => DataInner::Error(err),
DataInner::Binary(bin) => DataInner::Binary(bin),
DataInner::Text(text) => {
if let Some(pattern) = self.pattern.render() {
let lines = self.substitutions.normalize(&text, &pattern);
DataInner::Text(lines)
} else {
DataInner::Text(text)
}
let lines = self.redactions.redact(&text);
DataInner::Text(lines)
}
#[cfg(feature = "json")]
DataInner::Json(value) => {
let mut value = value;
if let DataInner::Json(exp) = &self.pattern.inner {
normalize_value_matches(&mut value, exp, self.substitutions);
}
normalize_json_string(&mut value, &|s| self.redactions.redact(s));
DataInner::Json(value)
}
#[cfg(feature = "json")]
DataInner::JsonLines(value) => {
let mut value = value;
if let DataInner::Json(exp) = &self.pattern.inner {
normalize_value_matches(&mut value, exp, self.substitutions);
}
normalize_json_string(&mut value, &|s| self.redactions.redact(s));
DataInner::JsonLines(value)
}
#[cfg(feature = "term-svg")]
DataInner::TermSvg(text) => {
if let Some(pattern) = self.pattern.render() {
let lines = self.substitutions.normalize(&text, &pattern);
DataInner::TermSvg(lines)
} else {
DataInner::TermSvg(text)
}
let lines = normalize_lines(&text);
DataInner::TermSvg(lines)
}
};
Data {
Expand All @@ -181,7 +164,7 @@ impl Filter for FilterRedactions<'_> {
}

#[cfg(feature = "structured-data")]
fn normalize_json_string(value: &mut serde_json::Value, op: fn(&str) -> String) {
fn normalize_json_string(value: &mut serde_json::Value, op: &dyn Fn(&str) -> String) {
match value {
serde_json::Value::String(str) => {
*str = op(str);
Expand All @@ -201,75 +184,3 @@ fn normalize_json_string(value: &mut serde_json::Value, op: fn(&str) -> String)
_ => {}
}
}

#[cfg(feature = "structured-data")]
fn normalize_value_matches(
actual: &mut serde_json::Value,
expected: &serde_json::Value,
substitutions: &crate::Redactions,
) {
use serde_json::Value::*;

const KEY_WILDCARD: &str = "...";
const VALUE_WILDCARD: &str = "{...}";

match (actual, expected) {
(act, String(exp)) if exp == VALUE_WILDCARD => {
*act = serde_json::json!(VALUE_WILDCARD);
}
(String(act), String(exp)) => {
*act = substitutions.normalize(act, exp);
}
(Array(act), Array(exp)) => {
let mut sections = exp.split(|e| e == VALUE_WILDCARD).peekable();
let mut processed = 0;
while let Some(expected_subset) = sections.next() {
// Process all values in the current section
if !expected_subset.is_empty() {
let actual_subset = &mut act[processed..processed + expected_subset.len()];
for (a, e) in actual_subset.iter_mut().zip(expected_subset) {
normalize_value_matches(a, e, substitutions);
}
processed += expected_subset.len();
}

if let Some(next_section) = sections.peek() {
// If the next section has nothing in it, replace from processed to end with
// a single "{...}"
if next_section.is_empty() {
act.splice(processed.., vec![String(VALUE_WILDCARD.to_owned())]);
processed += 1;
} else {
let first = next_section.first().unwrap();
// Replace everything up until the value we are looking for with
// a single "{...}".
if let Some(index) = act.iter().position(|v| v == first) {
act.splice(processed..index, vec![String(VALUE_WILDCARD.to_owned())]);
processed += 1;
} else {
// If we cannot find the value we are looking for return early
break;
}
}
}
}
}
(Object(act), Object(exp)) => {
let has_key_wildcard =
exp.get(KEY_WILDCARD).and_then(|v| v.as_str()) == Some(VALUE_WILDCARD);
for (actual_key, mut actual_value) in std::mem::replace(act, serde_json::Map::new()) {
let actual_key = substitutions.redact(&actual_key);
if let Some(expected_value) = exp.get(&actual_key) {
normalize_value_matches(&mut actual_value, expected_value, substitutions)
} else if has_key_wildcard {
continue;
}
act.insert(actual_key, actual_value);
}
if has_key_wildcard {
act.insert(KEY_WILDCARD.to_owned(), String(VALUE_WILDCARD.to_owned()));
}
}
(_, _) => {}
}
}
Loading

0 comments on commit 3803df6

Please sign in to comment.