revm_interpreter/interpreter/
shared_memory.rs

1use super::MemoryTr;
2use core::{
3    cell::{Ref, RefCell, RefMut},
4    cmp::min,
5    fmt,
6    ops::Range,
7};
8use primitives::{hex, B256, U256};
9use std::{rc::Rc, vec::Vec};
10
11/// A sequential memory shared between calls, which uses
12/// a `Vec` for internal representation.
13/// A [SharedMemory] instance should always be obtained using
14/// the `new` static method to ensure memory safety.
15#[derive(Clone, PartialEq, Eq)]
16#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
17pub struct SharedMemory {
18    /// The underlying buffer.
19    buffer: Rc<RefCell<Vec<u8>>>,
20    /// Memory checkpoints for each depth.
21    /// Invariant: these are always in bounds of `data`.
22    my_checkpoint: usize,
23    /// Child checkpoint that we need to free context to.
24    child_checkpoint: Option<usize>,
25    /// Memory limit. See [`Cfg`](context_interface::Cfg).
26    #[cfg(feature = "memory_limit")]
27    memory_limit: u64,
28}
29
30impl fmt::Debug for SharedMemory {
31    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32        f.debug_struct("SharedMemory")
33            .field("current_len", &self.len())
34            .field("context_memory", &hex::encode(&*self.context_memory()))
35            .finish_non_exhaustive()
36    }
37}
38
39impl Default for SharedMemory {
40    #[inline]
41    fn default() -> Self {
42        Self::new()
43    }
44}
45
46impl MemoryTr for SharedMemory {
47    fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
48        self.set_data(memory_offset, data_offset, len, data);
49    }
50
51    fn set(&mut self, memory_offset: usize, data: &[u8]) {
52        self.set(memory_offset, data);
53    }
54
55    fn size(&self) -> usize {
56        self.len()
57    }
58
59    fn copy(&mut self, destination: usize, source: usize, len: usize) {
60        self.copy(destination, source, len);
61    }
62
63    fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
64        self.slice_range(range)
65    }
66
67    fn local_memory_offset(&self) -> usize {
68        self.my_checkpoint
69    }
70
71    fn set_data_from_global(
72        &mut self,
73        memory_offset: usize,
74        data_offset: usize,
75        len: usize,
76        data_range: Range<usize>,
77    ) {
78        self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
79    }
80
81    /// Returns a byte slice of the memory region at the given offset.
82    ///
83    /// # Safety
84    ///
85    /// In debug this will panic on out of bounds. In release it will silently fail.
86    #[inline]
87    #[cfg_attr(debug_assertions, track_caller)]
88    fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
89        let buffer = self.buffer.borrow(); // Borrow the inner Vec<u8>
90        Ref::map(buffer, |b| match b.get(range) {
91            Some(slice) => slice,
92            None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
93        })
94    }
95
96    fn resize(&mut self, new_size: usize) -> bool {
97        self.resize(new_size);
98        true
99    }
100}
101
102impl SharedMemory {
103    /// Creates a new memory instance that can be shared between calls.
104    ///
105    /// The default initial capacity is 4KiB.
106    #[inline]
107    pub fn new() -> Self {
108        Self::with_capacity(4 * 1024) // from evmone
109    }
110
111    /// Creates a new memory instance with a given shared buffer.
112    pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
113        Self {
114            buffer,
115            my_checkpoint: 0,
116            child_checkpoint: None,
117            #[cfg(feature = "memory_limit")]
118            memory_limit: u64::MAX,
119        }
120    }
121
122    /// Creates a new memory instance that can be shared between calls with the given `capacity`.
123    #[inline]
124    pub fn with_capacity(capacity: usize) -> Self {
125        Self {
126            buffer: Rc::new(RefCell::new(Vec::with_capacity(capacity))),
127            my_checkpoint: 0,
128            child_checkpoint: None,
129            #[cfg(feature = "memory_limit")]
130            memory_limit: u64::MAX,
131        }
132    }
133
134    /// Creates a new memory instance that can be shared between calls,
135    /// with `memory_limit` as upper bound for allocation size.
136    ///
137    /// The default initial capacity is 4KiB.
138    #[cfg(feature = "memory_limit")]
139    #[inline]
140    pub fn new_with_memory_limit(memory_limit: u64) -> Self {
141        Self {
142            memory_limit,
143            ..Self::new()
144        }
145    }
146
147    /// Returns `true` if the `new_size` for the current context memory will
148    /// make the shared buffer length exceed the `memory_limit`.
149    #[cfg(feature = "memory_limit")]
150    #[inline]
151    pub fn limit_reached(&self, new_size: usize) -> bool {
152        self.my_checkpoint.saturating_add(new_size) as u64 > self.memory_limit
153    }
154
155    /// Prepares the shared memory for a new child context.
156    ///
157    /// # Panics
158    ///
159    /// Panics if this function was already called without freeing child context.
160    #[inline]
161    pub fn new_child_context(&mut self) -> SharedMemory {
162        if self.child_checkpoint.is_some() {
163            panic!("new_child_context was already called without freeing child context");
164        }
165        let new_checkpoint = self.buffer.borrow().len();
166        self.child_checkpoint = Some(new_checkpoint);
167        SharedMemory {
168            buffer: self.buffer.clone(),
169            my_checkpoint: new_checkpoint,
170            // child_checkpoint is same as my_checkpoint
171            child_checkpoint: None,
172            #[cfg(feature = "memory_limit")]
173            memory_limit: self.memory_limit,
174        }
175    }
176
177    /// Prepares the shared memory for returning from child context. Do nothing if there is no child context.
178    #[inline]
179    pub fn free_child_context(&mut self) {
180        let Some(child_checkpoint) = self.child_checkpoint.take() else {
181            return;
182        };
183        unsafe {
184            self.buffer.borrow_mut().set_len(child_checkpoint);
185        }
186    }
187
188    /// Returns the length of the current memory range.
189    #[inline]
190    pub fn len(&self) -> usize {
191        self.buffer.borrow().len() - self.my_checkpoint
192    }
193
194    /// Returns `true` if the current memory range is empty.
195    #[inline]
196    pub fn is_empty(&self) -> bool {
197        self.len() == 0
198    }
199
200    /// Resizes the memory in-place so that `len` is equal to `new_len`.
201    #[inline]
202    pub fn resize(&mut self, new_size: usize) {
203        self.buffer
204            .borrow_mut()
205            .resize(self.my_checkpoint + new_size, 0);
206    }
207
208    /// Returns a byte slice of the memory region at the given offset.
209    ///
210    /// # Panics
211    ///
212    /// Panics on out of bounds.
213    #[inline]
214    #[cfg_attr(debug_assertions, track_caller)]
215    pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
216        self.slice_range(offset..offset + size)
217    }
218
219    /// Returns a byte slice of the memory region at the given offset.
220    ///
221    /// # Panics
222    ///
223    /// Panics on out of bounds.
224    #[inline]
225    #[cfg_attr(debug_assertions, track_caller)]
226    pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
227        let buffer = self.buffer.borrow(); // Borrow the inner Vec<u8>
228        Ref::map(buffer, |b| {
229            match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
230                Some(slice) => slice,
231                None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
232            }
233        })
234    }
235
236    /// Returns a byte slice of the memory region at the given offset.
237    ///
238    /// # Panics
239    ///
240    /// Panics on out of bounds.
241    #[inline]
242    #[cfg_attr(debug_assertions, track_caller)]
243    pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
244        let buffer = self.buffer.borrow(); // Borrow the inner Vec<u8>
245        Ref::map(buffer, |b| match b.get(range) {
246            Some(slice) => slice,
247            None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
248        })
249    }
250
251    /// Returns a byte slice of the memory region at the given offset.
252    ///
253    /// # Panics
254    ///
255    /// Panics on out of bounds.
256    #[inline]
257    #[cfg_attr(debug_assertions, track_caller)]
258    pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
259        let buffer = self.buffer.borrow_mut(); // Borrow the inner Vec<u8> mutably
260        RefMut::map(buffer, |b| {
261            match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
262                Some(slice) => slice,
263                None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
264            }
265        })
266    }
267
268    /// Returns the byte at the given offset.
269    ///
270    /// # Panics
271    ///
272    /// Panics on out of bounds.
273    #[inline]
274    pub fn get_byte(&self, offset: usize) -> u8 {
275        self.slice_len(offset, 1)[0]
276    }
277
278    /// Returns a 32-byte slice of the memory region at the given offset.
279    ///
280    /// # Panics
281    ///
282    /// Panics on out of bounds.
283    #[inline]
284    pub fn get_word(&self, offset: usize) -> B256 {
285        (*self.slice_len(offset, 32)).try_into().unwrap()
286    }
287
288    /// Returns a U256 of the memory region at the given offset.
289    ///
290    /// # Panics
291    ///
292    /// Panics on out of bounds.
293    #[inline]
294    pub fn get_u256(&self, offset: usize) -> U256 {
295        self.get_word(offset).into()
296    }
297
298    /// Sets the `byte` at the given `index`.
299    ///
300    /// # Panics
301    ///
302    /// Panics on out of bounds.
303    #[inline]
304    #[cfg_attr(debug_assertions, track_caller)]
305    pub fn set_byte(&mut self, offset: usize, byte: u8) {
306        self.set(offset, &[byte]);
307    }
308
309    /// Sets the given 32-byte `value` to the memory region at the given `offset`.
310    ///
311    /// # Panics
312    ///
313    /// Panics on out of bounds.
314    #[inline]
315    #[cfg_attr(debug_assertions, track_caller)]
316    pub fn set_word(&mut self, offset: usize, value: &B256) {
317        self.set(offset, &value[..]);
318    }
319
320    /// Sets the given U256 `value` to the memory region at the given `offset`.
321    ///
322    /// # Panics
323    ///
324    /// Panics on out of bounds.
325    #[inline]
326    #[cfg_attr(debug_assertions, track_caller)]
327    pub fn set_u256(&mut self, offset: usize, value: U256) {
328        self.set(offset, &value.to_be_bytes::<32>());
329    }
330
331    /// Set memory region at given `offset`.
332    ///
333    /// # Panics
334    ///
335    /// Panics on out of bounds.
336    #[inline]
337    #[cfg_attr(debug_assertions, track_caller)]
338    pub fn set(&mut self, offset: usize, value: &[u8]) {
339        if !value.is_empty() {
340            self.slice_mut(offset, value.len()).copy_from_slice(value);
341        }
342    }
343
344    /// Set memory from data. Our memory offset+len is expected to be correct but we
345    /// are doing bound checks on data/data_offeset/len and zeroing parts that is not copied.
346    ///
347    /// # Panics
348    ///
349    /// Panics if memory is out of bounds.
350    #[inline]
351    #[cfg_attr(debug_assertions, track_caller)]
352    pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
353        let mut dst = self.context_memory_mut();
354        unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
355    }
356
357    /// Set data from global memory to local memory. If global range is smaller than len, zeroes the rest.
358    #[inline]
359    #[cfg_attr(debug_assertions, track_caller)]
360    pub fn global_to_local_set_data(
361        &mut self,
362        memory_offset: usize,
363        data_offset: usize,
364        len: usize,
365        data_range: Range<usize>,
366    ) {
367        let mut buffer = self.buffer.borrow_mut(); // Borrow the inner Vec<u8> mutably
368        let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
369        let src = if data_range.is_empty() {
370            &mut []
371        } else {
372            src.get_mut(data_range).unwrap()
373        };
374        unsafe { set_data(dst, src, memory_offset, data_offset, len) };
375    }
376
377    /// Copies elements from one part of the memory to another part of itself.
378    ///
379    /// # Panics
380    ///
381    /// Panics on out of bounds.
382    #[inline]
383    #[cfg_attr(debug_assertions, track_caller)]
384    pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
385        self.context_memory_mut().copy_within(src..src + len, dst);
386    }
387
388    /// Returns a reference to the memory of the current context, the active memory.
389    #[inline]
390    pub fn context_memory(&self) -> Ref<'_, [u8]> {
391        let buffer = self.buffer.borrow();
392        Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
393            Some(slice) => slice,
394            None => debug_unreachable!("Context memory should be always valid"),
395        })
396    }
397
398    /// Returns a mutable reference to the memory of the current context.
399    #[inline]
400    pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
401        let buffer = self.buffer.borrow_mut(); // Borrow the inner Vec<u8> mutably
402        RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
403            Some(slice) => slice,
404            None => debug_unreachable!("Context memory should be always valid"),
405        })
406    }
407}
408
409/// Copies data from src to dst taking into account the offsets and len.
410///
411/// If src does not have enough data, it nullifies the rest of dst that is not copied.
412///
413/// # Safety
414///
415/// Assumes that dst has enough space to copy the data.
416/// Assumes that src has enough data to copy.
417/// Assumes that dst_offset and src_offset are in bounds.
418/// Assumes that dst and src are valid.
419/// Assumes that dst and src do not overlap.
420unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
421    if src_offset >= src.len() {
422        // Nullify all memory slots
423        dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
424        return;
425    }
426    let src_end = min(src_offset + len, src.len());
427    let src_len = src_end - src_offset;
428    debug_assert!(src_offset < src.len() && src_end <= src.len());
429    let data = unsafe { src.get_unchecked(src_offset..src_end) };
430    unsafe {
431        dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
432            .copy_from_slice(data)
433    };
434
435    // Nullify rest of memory slots
436    // SAFETY: Memory is assumed to be valid, and it is commented where this assumption is made.
437    unsafe {
438        dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
439            .fill(0)
440    };
441}
442
443/// Returns number of words what would fit to provided number of bytes,
444/// i.e. it rounds up the number bytes to number of words.
445#[inline]
446pub const fn num_words(len: usize) -> usize {
447    len.saturating_add(31) / 32
448}
449
450#[cfg(test)]
451mod tests {
452    use super::*;
453
454    #[test]
455    fn test_num_words() {
456        assert_eq!(num_words(0), 0);
457        assert_eq!(num_words(1), 1);
458        assert_eq!(num_words(31), 1);
459        assert_eq!(num_words(32), 1);
460        assert_eq!(num_words(33), 2);
461        assert_eq!(num_words(63), 2);
462        assert_eq!(num_words(64), 2);
463        assert_eq!(num_words(65), 3);
464        assert_eq!(num_words(usize::MAX), usize::MAX / 32);
465    }
466
467    #[test]
468    fn new_free_child_context() {
469        let mut sm1 = SharedMemory::new();
470
471        assert_eq!(sm1.buffer.borrow().len(), 0);
472        assert_eq!(sm1.my_checkpoint, 0);
473
474        unsafe { sm1.buffer.borrow_mut().set_len(32) };
475        assert_eq!(sm1.len(), 32);
476        let mut sm2 = sm1.new_child_context();
477
478        assert_eq!(sm2.buffer.borrow().len(), 32);
479        assert_eq!(sm2.my_checkpoint, 32);
480        assert_eq!(sm2.len(), 0);
481
482        unsafe { sm2.buffer.borrow_mut().set_len(96) };
483        assert_eq!(sm2.len(), 64);
484        let mut sm3 = sm2.new_child_context();
485
486        assert_eq!(sm3.buffer.borrow().len(), 96);
487        assert_eq!(sm3.my_checkpoint, 96);
488        assert_eq!(sm3.len(), 0);
489
490        unsafe { sm3.buffer.borrow_mut().set_len(128) };
491        let sm4 = sm3.new_child_context();
492        assert_eq!(sm4.buffer.borrow().len(), 128);
493        assert_eq!(sm4.my_checkpoint, 128);
494        assert_eq!(sm4.len(), 0);
495
496        // Free contexts
497        drop(sm4);
498        sm3.free_child_context();
499        assert_eq!(sm3.buffer.borrow().len(), 128);
500        assert_eq!(sm3.my_checkpoint, 96);
501        assert_eq!(sm3.len(), 32);
502
503        sm2.free_child_context();
504        assert_eq!(sm2.buffer.borrow().len(), 96);
505        assert_eq!(sm2.my_checkpoint, 32);
506        assert_eq!(sm2.len(), 64);
507
508        sm1.free_child_context();
509        assert_eq!(sm1.buffer.borrow().len(), 32);
510        assert_eq!(sm1.my_checkpoint, 0);
511        assert_eq!(sm1.len(), 32);
512    }
513
514    #[test]
515    fn resize() {
516        let mut sm1 = SharedMemory::new();
517        sm1.resize(32);
518        assert_eq!(sm1.buffer.borrow().len(), 32);
519        assert_eq!(sm1.len(), 32);
520        assert_eq!(sm1.buffer.borrow().get(0..32), Some(&[0_u8; 32] as &[u8]));
521
522        let mut sm2 = sm1.new_child_context();
523        sm2.resize(96);
524        assert_eq!(sm2.buffer.borrow().len(), 128);
525        assert_eq!(sm2.len(), 96);
526        assert_eq!(sm2.buffer.borrow().get(32..128), Some(&[0_u8; 96] as &[u8]));
527
528        sm1.free_child_context();
529        assert_eq!(sm1.buffer.borrow().len(), 32);
530        assert_eq!(sm1.len(), 32);
531        assert_eq!(sm1.buffer.borrow().get(0..32), Some(&[0_u8; 32] as &[u8]));
532    }
533}