revm_interpreter/interpreter/
stack.rs1use super::StackTr;
2use crate::InstructionResult;
3use core::fmt;
4use primitives::{hints_util::cold_path, U256};
5use std::vec::Vec;
6
7pub const STACK_LIMIT: usize = 1024;
9
10#[derive(Debug, PartialEq, Eq, Hash)]
12#[cfg_attr(feature = "serde", derive(serde::Serialize))]
13pub struct Stack {
14 data: Vec<U256>,
16}
17
18impl fmt::Display for Stack {
19 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
20 f.write_str("[")?;
21 for (i, x) in self.data.iter().enumerate() {
22 if i > 0 {
23 f.write_str(", ")?;
24 }
25 write!(f, "{x}")?;
26 }
27 f.write_str("]")
28 }
29}
30
31impl Default for Stack {
32 #[inline]
33 fn default() -> Self {
34 Self::new()
35 }
36}
37
38impl Clone for Stack {
39 fn clone(&self) -> Self {
40 let mut new_stack = Self::new();
44 new_stack.data.extend_from_slice(&self.data);
45 new_stack
46 }
47}
48
49impl StackTr for Stack {
50 #[inline]
51 fn len(&self) -> usize {
52 self.len()
53 }
54
55 #[inline]
56 fn data(&self) -> &[U256] {
57 &self.data
58 }
59
60 #[inline]
61 fn clear(&mut self) {
62 self.data.clear();
63 }
64
65 #[inline]
66 fn popn<const N: usize>(&mut self) -> Option<[U256; N]> {
67 self.popn()
68 }
69
70 #[inline]
71 fn popn_top<const POPN: usize>(&mut self) -> Option<([U256; POPN], &mut U256)> {
72 self.popn_top()
73 }
74
75 #[inline]
76 fn exchange(&mut self, n: usize, m: usize) -> bool {
77 self.exchange(n, m)
78 }
79
80 #[inline]
81 fn dup(&mut self, n: usize) -> bool {
82 self.dup(n)
83 }
84
85 #[inline]
86 fn push(&mut self, value: U256) -> bool {
87 self.push(value)
88 }
89
90 #[inline]
91 fn push_slice(&mut self, slice: &[u8]) -> bool {
92 self.push_slice_(slice)
93 }
94}
95
96impl Stack {
97 #[inline]
99 pub fn new() -> Self {
100 Self {
101 data: Vec::with_capacity(STACK_LIMIT),
103 }
104 }
105
106 #[inline]
108 pub const fn invalid() -> Self {
109 Self { data: Vec::new() }
110 }
111
112 #[inline]
114 pub const fn len(&self) -> usize {
115 self.data.len()
116 }
117
118 #[inline]
120 pub const fn is_empty(&self) -> bool {
121 self.data.is_empty()
122 }
123
124 #[inline]
126 pub const fn data(&self) -> &Vec<U256> {
127 &self.data
128 }
129
130 #[inline]
132 pub const fn data_mut(&mut self) -> &mut Vec<U256> {
133 &mut self.data
134 }
135
136 #[inline]
138 pub fn into_data(self) -> Vec<U256> {
139 self.data
140 }
141
142 #[inline]
145 #[cfg_attr(debug_assertions, track_caller)]
146 pub fn pop(&mut self) -> Result<U256, InstructionResult> {
147 self.data.pop().ok_or(InstructionResult::StackUnderflow)
148 }
149
150 #[inline]
156 #[cfg_attr(debug_assertions, track_caller)]
157 pub unsafe fn pop_unchecked(&mut self) -> U256 {
158 assume!(!self.is_empty());
159 self.data.pop().unwrap_unchecked()
160 }
161
162 #[inline]
164 pub fn top(&mut self) -> Option<&mut U256> {
165 self.data.last_mut()
166 }
167
168 #[inline]
174 #[cfg_attr(debug_assertions, track_caller)]
175 pub unsafe fn top_unchecked(&mut self) -> &mut U256 {
176 assume!(!self.is_empty());
177 self.data.last_mut().unwrap_unchecked()
178 }
179
180 #[inline]
182 pub fn popn<const N: usize>(&mut self) -> Option<[U256; N]> {
183 if self.len() < N {
184 return None;
185 }
186 Some(unsafe { self.popn_unchecked() })
188 }
189
190 #[inline]
196 #[cfg_attr(debug_assertions, track_caller)]
197 pub unsafe fn popn_unchecked<const N: usize>(&mut self) -> [U256; N] {
198 assume!(self.len() >= N);
199 core::array::from_fn(|_| unsafe { self.pop_unchecked() })
200 }
201
202 #[inline]
204 #[cfg_attr(debug_assertions, track_caller)]
205 pub fn popn_top<const N: usize>(&mut self) -> Option<([U256; N], &mut U256)> {
206 if self.len() < N + 1 {
207 return None;
208 }
209 Some(unsafe { self.popn_top_unchecked() })
211 }
212
213 #[inline]
219 #[cfg_attr(debug_assertions, track_caller)]
220 pub unsafe fn popn_top_unchecked<const N: usize>(&mut self) -> ([U256; N], &mut U256) {
221 unsafe { (self.popn_unchecked(), self.top_unchecked()) }
222 }
223
224 #[inline]
229 #[must_use]
230 #[cfg_attr(debug_assertions, track_caller)]
231 pub fn push(&mut self, value: U256) -> bool {
232 debug_assert!(self.data.capacity() >= STACK_LIMIT);
234 let len = self.len();
235 if len == STACK_LIMIT {
236 cold_path();
237 return false;
238 }
239 unsafe {
240 let end = self.data.as_mut_ptr().add(len);
241 core::ptr::write(end, value);
242 self.data.set_len(len + 1);
243 }
244 true
245 }
246
247 #[inline]
251 pub fn peek(&self, no_from_top: usize) -> Result<U256, InstructionResult> {
252 if self.data.len() > no_from_top {
253 Ok(self.data[self.data.len() - no_from_top - 1])
254 } else {
255 cold_path();
256 Err(InstructionResult::StackUnderflow)
257 }
258 }
259
260 #[inline]
266 #[must_use]
267 #[cfg_attr(debug_assertions, track_caller)]
268 pub fn dup(&mut self, n: usize) -> bool {
269 assume!(n > 0, "attempted to dup 0");
270 let len = self.len();
271 if (len < n) | (len + 1 > STACK_LIMIT) {
272 cold_path();
273 return false;
274 }
275 unsafe {
277 let ptr = self.data.as_mut_ptr().add(len);
278 *ptr = *ptr.sub(n);
279 self.data.set_len(len + 1);
280 }
281 true
282 }
283
284 #[inline(always)]
290 #[cfg_attr(debug_assertions, track_caller)]
291 pub fn swap(&mut self, n: usize) -> bool {
292 self.exchange(0, n)
293 }
294
295 #[inline]
303 #[cfg_attr(debug_assertions, track_caller)]
304 pub fn exchange(&mut self, n: usize, m: usize) -> bool {
305 assume!(m > 0, "overlapping exchange");
306 let len = self.len();
307 let n_m_index = n + m;
308 if n_m_index >= len {
309 cold_path();
310 return false;
311 }
312 unsafe {
314 let top = self.data.as_mut_ptr().add(len - 1);
319 core::ptr::swap_nonoverlapping(top.sub(n), top.sub(n_m_index), 1);
320 }
321 true
322 }
323
324 #[inline]
327 pub fn push_slice(&mut self, slice: &[u8]) -> Result<(), InstructionResult> {
328 if self.push_slice_(slice) {
329 Ok(())
330 } else {
331 Err(InstructionResult::StackOverflow)
332 }
333 }
334
335 #[inline]
338 fn push_slice_(&mut self, slice: &[u8]) -> bool {
339 if slice.is_empty() {
340 cold_path();
341 return true;
342 }
343
344 let n_words = slice.len().div_ceil(32);
345 let new_len = self.len() + n_words;
346 if new_len > STACK_LIMIT {
347 cold_path();
348 return false;
349 }
350
351 debug_assert!(self.data.capacity() >= new_len);
353
354 unsafe {
356 let dst = self.data.as_mut_ptr().add(self.len()).cast::<u64>();
357 self.data.set_len(new_len);
358
359 let mut i = 0;
360
361 let words = slice.chunks_exact(32);
363 let partial_last_word = words.remainder();
364 for word in words {
365 for l in word.rchunks_exact(8) {
368 dst.add(i).write(u64::from_be_bytes(l.try_into().unwrap()));
369 i += 1;
370 }
371 }
372
373 if partial_last_word.is_empty() {
374 return true;
375 }
376
377 let limbs = partial_last_word.rchunks_exact(8);
379 let partial_last_limb = limbs.remainder();
380 for l in limbs {
381 dst.add(i).write(u64::from_be_bytes(l.try_into().unwrap()));
382 i += 1;
383 }
384
385 if !partial_last_limb.is_empty() {
387 let mut tmp = [0u8; 8];
388 tmp[8 - partial_last_limb.len()..].copy_from_slice(partial_last_limb);
389 dst.add(i).write(u64::from_be_bytes(tmp));
390 i += 1;
391 }
392
393 debug_assert_eq!(i.div_ceil(4), n_words, "wrote too much");
394
395 let m = i % 4; if m != 0 {
398 dst.add(i).write_bytes(0, 4 - m);
399 }
400 }
401
402 true
403 }
404
405 #[inline]
409 pub fn set(&mut self, no_from_top: usize, val: U256) -> Result<(), InstructionResult> {
410 if self.len() <= no_from_top {
411 cold_path();
412 return Err(InstructionResult::StackUnderflow);
413 }
414 let len = self.len();
415 self.data[len - no_from_top - 1] = val;
416 Ok(())
417 }
418}
419
420#[cfg(feature = "serde")]
421impl<'de> serde::Deserialize<'de> for Stack {
422 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
423 where
424 D: serde::Deserializer<'de>,
425 {
426 #[derive(serde::Deserialize)]
427 struct StackSerde {
428 data: Vec<U256>,
429 }
430
431 let mut stack = StackSerde::deserialize(deserializer)?;
432 if stack.data.len() > STACK_LIMIT {
433 return Err(serde::de::Error::custom(std::format!(
434 "stack size exceeds limit: {} > {}",
435 stack.data.len(),
436 STACK_LIMIT
437 )));
438 }
439 stack.data.reserve(STACK_LIMIT - stack.data.len());
440 Ok(Self { data: stack.data })
441 }
442}
443
444#[cfg(test)]
445mod tests {
446 use super::*;
447
448 fn run(f: impl FnOnce(&mut Stack)) {
449 let mut stack = Stack::new();
450 unsafe {
452 stack.data.set_len(STACK_LIMIT);
453 stack.data.fill(U256::MAX);
454 stack.data.set_len(0);
455 }
456 f(&mut stack);
457 }
458
459 #[test]
460 fn push_slices() {
461 run(|stack| {
463 stack.push_slice(b"").unwrap();
464 assert!(stack.is_empty());
465 });
466
467 run(|stack| {
469 stack.push_slice(&[42]).unwrap();
470 assert_eq!(stack.data, [U256::from(42)]);
471 });
472
473 let n = 0x1111_2222_3333_4444_5555_6666_7777_8888_u128;
474 run(|stack| {
475 stack.push_slice(&n.to_be_bytes()).unwrap();
476 assert_eq!(stack.data, [U256::from(n)]);
477 });
478
479 run(|stack| {
481 let b = [U256::from(n).to_be_bytes::<32>(); 2].concat();
482 stack.push_slice(&b).unwrap();
483 assert_eq!(stack.data, [U256::from(n); 2]);
484 });
485
486 run(|stack| {
487 let b = [&[0; 32][..], &[42u8]].concat();
488 stack.push_slice(&b).unwrap();
489 assert_eq!(stack.data, [U256::ZERO, U256::from(42)]);
490 });
491
492 run(|stack| {
493 let b = [&[0; 32][..], &n.to_be_bytes()].concat();
494 stack.push_slice(&b).unwrap();
495 assert_eq!(stack.data, [U256::ZERO, U256::from(n)]);
496 });
497
498 run(|stack| {
499 let b = [&[0; 64][..], &n.to_be_bytes()].concat();
500 stack.push_slice(&b).unwrap();
501 assert_eq!(stack.data, [U256::ZERO, U256::ZERO, U256::from(n)]);
502 });
503 }
504
505 #[test]
506 fn stack_clone() {
507 let empty_stack = Stack::new();
509 let cloned_empty = empty_stack.clone();
510 assert_eq!(empty_stack, cloned_empty);
511 assert_eq!(cloned_empty.len(), 0);
512 assert_eq!(cloned_empty.data().capacity(), STACK_LIMIT);
513
514 let mut partial_stack = Stack::new();
516 for i in 0..10 {
517 assert!(partial_stack.push(U256::from(i)));
518 }
519 let mut cloned_partial = partial_stack.clone();
520 assert_eq!(partial_stack, cloned_partial);
521 assert_eq!(cloned_partial.len(), 10);
522 assert_eq!(cloned_partial.data().capacity(), STACK_LIMIT);
523
524 assert!(cloned_partial.push(U256::from(100)));
526 assert_ne!(partial_stack, cloned_partial);
527 assert_eq!(partial_stack.len(), 10);
528 assert_eq!(cloned_partial.len(), 11);
529
530 let mut full_stack = Stack::new();
532 for i in 0..STACK_LIMIT {
533 assert!(full_stack.push(U256::from(i)));
534 }
535 let mut cloned_full = full_stack.clone();
536 assert_eq!(full_stack, cloned_full);
537 assert_eq!(cloned_full.len(), STACK_LIMIT);
538 assert_eq!(cloned_full.data().capacity(), STACK_LIMIT);
539
540 assert!(!full_stack.push(U256::from(100)));
542 assert!(!cloned_full.push(U256::from(100)));
543 }
544}