revm_ee_tests/lib.rs
1//! Common test utilities for REVM crates.
2//!
3//! This crate provides shared test utilities that are used across different REVM crates.
4
5use std::path::PathBuf;
6
7use serde_json::Value;
8
9/// Configuration for the test data comparison utility.
10#[derive(Debug, Clone, PartialEq, Eq)]
11pub struct TestdataConfig {
12 /// The directory where test data files are stored.
13 pub testdata_dir: PathBuf,
14}
15
16impl Default for TestdataConfig {
17 fn default() -> Self {
18 Self {
19 testdata_dir: PathBuf::from("tests/testdata"),
20 }
21 }
22}
23
24/// Compares or saves the execution output to a testdata file.
25///
26/// This utility helps maintain consistent test behavior by comparing
27/// execution results against known-good outputs stored in JSON files.
28///
29/// # Arguments
30///
31/// * `filename` - The name of the testdata file, relative to tests/testdata/
32/// * `output` - The execution output to compare or save
33///
34/// # Panics
35///
36/// This function panics if:
37/// - The output doesn't match the expected testdata (when testdata file exists)
38/// - There's an error reading/writing files
39/// - JSON serialization/deserialization fails
40///
41/// # Note
42///
43/// Tests using this function require the `serde` feature to be enabled:
44/// ```bash
45/// cargo test --features serde
46/// ```
47pub fn compare_or_save_testdata<T>(filename: &str, output: &T)
48where
49 T: serde::Serialize + for<'a> serde::Deserialize<'a> + PartialEq + std::fmt::Debug,
50{
51 compare_or_save_testdata_with_config(filename, output, TestdataConfig::default());
52}
53
54/// Compares or saves the execution output to a testdata file with custom configuration.
55///
56/// This is a more flexible version of [`compare_or_save_testdata`] that allows
57/// specifying a custom testdata directory.
58///
59/// # Arguments
60///
61/// * `filename` - The name of the testdata file, relative to the testdata directory
62/// * `output` - The execution output to compare or save
63/// * `config` - Configuration for the test data comparison
64pub fn compare_or_save_testdata_with_config<T>(filename: &str, output: &T, config: TestdataConfig)
65where
66 T: serde::Serialize + for<'a> serde::Deserialize<'a> + PartialEq + std::fmt::Debug,
67{
68 use std::fs;
69
70 let testdata_file = config.testdata_dir.join(filename);
71
72 // Create directory if it doesn't exist
73 if !config.testdata_dir.exists() {
74 fs::create_dir_all(&config.testdata_dir).unwrap();
75 }
76
77 // Serialize the output to serde Value.
78 let output_json = serde_json::to_string(&output).unwrap();
79
80 // convert to Value and sort all objects.
81 let mut temp: Value = serde_json::from_str(&output_json).unwrap();
82 temp.sort_all_objects();
83
84 // serialize to pretty string
85 let output_json = serde_json::to_string_pretty(&temp).unwrap();
86
87 // If the testdata file doesn't exist, save the output
88 if !testdata_file.exists() {
89 fs::write(&testdata_file, &output_json).unwrap();
90 println!("Saved testdata to {}", testdata_file.display());
91 return;
92 }
93
94 // Read the expected output from the testdata file
95 let expected_json = fs::read_to_string(&testdata_file).unwrap();
96
97 // Deserialize to actual object for proper comparison
98 let expected: T = serde_json::from_str(&expected_json).unwrap();
99
100 // Compare the output objects directly
101 if *output != expected {
102 panic!(
103 "Value does not match testdata.\nExpected:\n{expected_json}\n\nActual:\n{output_json}"
104 );
105 }
106}
107
108#[cfg(test)]
109mod tests {
110 use super::*;
111
112 #[derive(Debug, PartialEq, serde::Serialize, serde::Deserialize)]
113 struct TestData {
114 value: u32,
115 message: String,
116 }
117
118 #[test]
119 fn test_compare_or_save_testdata() {
120 let test_data = TestData {
121 value: 42,
122 message: "test message".to_string(),
123 };
124
125 // This will save the test data on first run, then compare on subsequent runs
126 compare_or_save_testdata("test_data.json", &test_data);
127 }
128}
129
130#[cfg(test)]
131mod op_revm_tests;
132
133#[cfg(test)]
134mod revm_tests;