revm_interpreter/interpreter/
shared_memory.rs

1use super::MemoryTr;
2use core::{
3    cell::{Ref, RefCell, RefMut},
4    cmp::min,
5    fmt,
6    ops::Range,
7};
8use primitives::{hex, B256, U256};
9use std::{rc::Rc, vec::Vec};
10
11trait RefcellExt<T> {
12    fn dbg_borrow(&self) -> Ref<'_, T>;
13    fn dbg_borrow_mut(&self) -> RefMut<'_, T>;
14}
15
16impl<T> RefcellExt<T> for RefCell<T> {
17    #[inline]
18    fn dbg_borrow(&self) -> Ref<'_, T> {
19        match self.try_borrow() {
20            Ok(b) => b,
21            Err(e) => debug_unreachable!("{e}"),
22        }
23    }
24
25    #[inline]
26    fn dbg_borrow_mut(&self) -> RefMut<'_, T> {
27        match self.try_borrow_mut() {
28            Ok(b) => b,
29            Err(e) => debug_unreachable!("{e}"),
30        }
31    }
32}
33
34/// A sequential memory shared between calls, which uses
35/// a `Vec` for internal representation.
36/// A [SharedMemory] instance should always be obtained using
37/// the `new` static method to ensure memory safety.
38#[derive(Clone, PartialEq, Eq)]
39#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
40pub struct SharedMemory {
41    /// The underlying buffer.
42    buffer: Option<Rc<RefCell<Vec<u8>>>>,
43    /// Memory checkpoints for each depth.
44    /// Invariant: these are always in bounds of `data`.
45    my_checkpoint: usize,
46    /// Child checkpoint that we need to free context to.
47    child_checkpoint: Option<usize>,
48    /// Memory limit. See [`Cfg`](context_interface::Cfg).
49    #[cfg(feature = "memory_limit")]
50    memory_limit: u64,
51}
52
53impl fmt::Debug for SharedMemory {
54    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
55        f.debug_struct("SharedMemory")
56            .field("current_len", &self.len())
57            .field("context_memory", &hex::encode(&*self.context_memory()))
58            .finish_non_exhaustive()
59    }
60}
61
62impl Default for SharedMemory {
63    #[inline]
64    fn default() -> Self {
65        Self::new()
66    }
67}
68
69impl MemoryTr for SharedMemory {
70    fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
71        self.set_data(memory_offset, data_offset, len, data);
72    }
73
74    fn set(&mut self, memory_offset: usize, data: &[u8]) {
75        self.set(memory_offset, data);
76    }
77
78    fn size(&self) -> usize {
79        self.len()
80    }
81
82    fn copy(&mut self, destination: usize, source: usize, len: usize) {
83        self.copy(destination, source, len);
84    }
85
86    fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
87        self.slice_range(range)
88    }
89
90    fn local_memory_offset(&self) -> usize {
91        self.my_checkpoint
92    }
93
94    fn set_data_from_global(
95        &mut self,
96        memory_offset: usize,
97        data_offset: usize,
98        len: usize,
99        data_range: Range<usize>,
100    ) {
101        self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
102    }
103
104    /// Returns a byte slice of the memory region at the given offset.
105    ///
106    /// # Panics
107    ///
108    /// Panics on out of bounds access in debug builds only.
109    ///
110    /// # Safety
111    ///
112    /// In release builds, calling this method with an out-of-bounds range triggers undefined
113    /// behavior. Callers must ensure that the range is within the bounds of the buffer.
114    #[inline]
115    #[cfg_attr(debug_assertions, track_caller)]
116    fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
117        let buffer = self.buffer_ref();
118        Ref::map(buffer, |b| match b.get(range) {
119            Some(slice) => slice,
120            None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
121        })
122    }
123
124    fn resize(&mut self, new_size: usize) -> bool {
125        self.resize(new_size);
126        true
127    }
128
129    /// Returns `true` if the `new_size` for the current context memory will
130    /// make the shared buffer length exceed the `memory_limit`.
131    #[cfg(feature = "memory_limit")]
132    #[inline]
133    fn limit_reached(&self, offset: usize, len: usize) -> bool {
134        self.my_checkpoint
135            .saturating_add(offset)
136            .saturating_add(len) as u64
137            > self.memory_limit
138    }
139}
140
141impl SharedMemory {
142    /// Creates a new memory instance that can be shared between calls.
143    ///
144    /// The default initial capacity is 4KiB.
145    #[inline]
146    pub fn new() -> Self {
147        Self::with_capacity(4 * 1024) // from evmone
148    }
149
150    /// Creates a new invalid memory instance.
151    #[inline]
152    pub fn invalid() -> Self {
153        Self {
154            buffer: None,
155            my_checkpoint: 0,
156            child_checkpoint: None,
157            #[cfg(feature = "memory_limit")]
158            memory_limit: 0,
159        }
160    }
161
162    /// Creates a new memory instance with a given shared buffer.
163    pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
164        Self {
165            buffer: Some(buffer),
166            my_checkpoint: 0,
167            child_checkpoint: None,
168            #[cfg(feature = "memory_limit")]
169            memory_limit: u64::MAX,
170        }
171    }
172
173    /// Creates a new memory instance that can be shared between calls with the given `capacity`.
174    #[inline]
175    pub fn with_capacity(capacity: usize) -> Self {
176        Self {
177            buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
178            my_checkpoint: 0,
179            child_checkpoint: None,
180            #[cfg(feature = "memory_limit")]
181            memory_limit: u64::MAX,
182        }
183    }
184
185    /// Creates a new memory instance that can be shared between calls,
186    /// with `memory_limit` as upper bound for allocation size.
187    ///
188    /// The default initial capacity is 4KiB.
189    #[cfg(feature = "memory_limit")]
190    #[inline]
191    pub fn new_with_memory_limit(memory_limit: u64) -> Self {
192        Self {
193            memory_limit,
194            ..Self::new()
195        }
196    }
197
198    /// Sets the memory limit in bytes.
199    #[inline]
200    pub fn set_memory_limit(&mut self, limit: u64) {
201        #[cfg(feature = "memory_limit")]
202        {
203            self.memory_limit = limit;
204        }
205    }
206
207    #[inline]
208    fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
209        debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
210        unsafe { self.buffer.as_ref().unwrap_unchecked() }
211    }
212
213    #[inline]
214    fn buffer_ref(&self) -> Ref<'_, Vec<u8>> {
215        self.buffer().dbg_borrow()
216    }
217
218    #[inline]
219    fn buffer_ref_mut(&self) -> RefMut<'_, Vec<u8>> {
220        self.buffer().dbg_borrow_mut()
221    }
222
223    /// Prepares the shared memory for a new child context.
224    ///
225    /// # Panics
226    ///
227    /// Panics if this function was already called without freeing child context.
228    #[inline]
229    pub fn new_child_context(&mut self) -> SharedMemory {
230        if self.child_checkpoint.is_some() {
231            panic!("new_child_context was already called without freeing child context");
232        }
233        let new_checkpoint = self.full_len();
234        self.child_checkpoint = Some(new_checkpoint);
235        SharedMemory {
236            buffer: Some(self.buffer().clone()),
237            my_checkpoint: new_checkpoint,
238            // child_checkpoint is same as my_checkpoint
239            child_checkpoint: None,
240            #[cfg(feature = "memory_limit")]
241            memory_limit: self.memory_limit,
242        }
243    }
244
245    /// Prepares the shared memory for returning from child context. Do nothing if there is no child context.
246    #[inline]
247    pub fn free_child_context(&mut self) {
248        let Some(child_checkpoint) = self.child_checkpoint.take() else {
249            return;
250        };
251        unsafe {
252            self.buffer_ref_mut().set_len(child_checkpoint);
253        }
254    }
255
256    /// Returns the length of the current memory range.
257    #[inline]
258    pub fn len(&self) -> usize {
259        self.full_len() - self.my_checkpoint
260    }
261
262    fn full_len(&self) -> usize {
263        self.buffer_ref().len()
264    }
265
266    /// Returns `true` if the current memory range is empty.
267    #[inline]
268    pub fn is_empty(&self) -> bool {
269        self.len() == 0
270    }
271
272    /// Resizes the memory in-place so that `len` is equal to `new_len`.
273    #[inline]
274    pub fn resize(&mut self, new_size: usize) {
275        self.buffer()
276            .dbg_borrow_mut()
277            .resize(self.my_checkpoint + new_size, 0);
278    }
279
280    /// Returns a byte slice of the memory region at the given offset.
281    ///
282    /// # Panics
283    ///
284    /// Panics on out of bounds.
285    #[inline]
286    #[cfg_attr(debug_assertions, track_caller)]
287    pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
288        self.slice_range(offset..offset + size)
289    }
290
291    /// Returns a byte slice of the memory region at the given offset.
292    ///
293    /// # Panics
294    ///
295    /// Panics on out of bounds access in debug builds only.
296    ///
297    /// # Safety
298    ///
299    /// In release builds, calling this method with an out-of-bounds range triggers undefined
300    /// behavior. Callers must ensure that the range is within the bounds of the memory (i.e.,
301    /// `range.end <= self.len()`).
302    #[inline]
303    #[cfg_attr(debug_assertions, track_caller)]
304    pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
305        let buffer = self.buffer_ref();
306        Ref::map(buffer, |b| {
307            match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
308                Some(slice) => slice,
309                None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
310            }
311        })
312    }
313
314    /// Returns a byte slice of the memory region at the given offset.
315    ///
316    /// # Panics
317    ///
318    /// Panics on out of bounds access in debug builds only.
319    ///
320    /// # Safety
321    ///
322    /// In release builds, calling this method with an out-of-bounds range triggers undefined
323    /// behavior. Callers must ensure that the range is within the bounds of the buffer.
324    #[inline]
325    #[cfg_attr(debug_assertions, track_caller)]
326    pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
327        let buffer = self.buffer_ref();
328        Ref::map(buffer, |b| match b.get(range) {
329            Some(slice) => slice,
330            None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
331        })
332    }
333
334    /// Returns a byte slice of the memory region at the given offset.
335    ///
336    /// # Panics
337    ///
338    /// Panics on out of bounds access in debug builds only.
339    ///
340    /// # Safety
341    ///
342    /// In release builds, calling this method with out-of-bounds parameters triggers undefined
343    /// behavior. Callers must ensure that `offset + size` does not exceed the length of the
344    /// memory.
345    #[inline]
346    #[cfg_attr(debug_assertions, track_caller)]
347    pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
348        let buffer = self.buffer_ref_mut();
349        RefMut::map(buffer, |b| {
350            match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
351                Some(slice) => slice,
352                None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
353            }
354        })
355    }
356
357    /// Returns the byte at the given offset.
358    ///
359    /// # Panics
360    ///
361    /// Panics on out of bounds.
362    #[inline]
363    pub fn get_byte(&self, offset: usize) -> u8 {
364        self.slice_len(offset, 1)[0]
365    }
366
367    /// Returns a 32-byte slice of the memory region at the given offset.
368    ///
369    /// # Panics
370    ///
371    /// Panics on out of bounds.
372    #[inline]
373    pub fn get_word(&self, offset: usize) -> B256 {
374        (*self.slice_len(offset, 32)).try_into().unwrap()
375    }
376
377    /// Returns a U256 of the memory region at the given offset.
378    ///
379    /// # Panics
380    ///
381    /// Panics on out of bounds.
382    #[inline]
383    pub fn get_u256(&self, offset: usize) -> U256 {
384        self.get_word(offset).into()
385    }
386
387    /// Sets the `byte` at the given `index`.
388    ///
389    /// # Panics
390    ///
391    /// Panics on out of bounds.
392    #[inline]
393    #[cfg_attr(debug_assertions, track_caller)]
394    pub fn set_byte(&mut self, offset: usize, byte: u8) {
395        self.set(offset, &[byte]);
396    }
397
398    /// Sets the given 32-byte `value` to the memory region at the given `offset`.
399    ///
400    /// # Panics
401    ///
402    /// Panics on out of bounds.
403    #[inline]
404    #[cfg_attr(debug_assertions, track_caller)]
405    pub fn set_word(&mut self, offset: usize, value: &B256) {
406        self.set(offset, &value[..]);
407    }
408
409    /// Sets the given U256 `value` to the memory region at the given `offset`.
410    ///
411    /// # Panics
412    ///
413    /// Panics on out of bounds.
414    #[inline]
415    #[cfg_attr(debug_assertions, track_caller)]
416    pub fn set_u256(&mut self, offset: usize, value: U256) {
417        self.set(offset, &value.to_be_bytes::<32>());
418    }
419
420    /// Set memory region at given `offset`.
421    ///
422    /// # Panics
423    ///
424    /// Panics on out of bounds.
425    #[inline]
426    #[cfg_attr(debug_assertions, track_caller)]
427    pub fn set(&mut self, offset: usize, value: &[u8]) {
428        if !value.is_empty() {
429            self.slice_mut(offset, value.len()).copy_from_slice(value);
430        }
431    }
432
433    /// Set memory from data. Our memory offset+len is expected to be correct but we
434    /// are doing bound checks on data/data_offeset/len and zeroing parts that is not copied.
435    ///
436    /// # Panics
437    ///
438    /// Panics if memory is out of bounds.
439    #[inline]
440    #[cfg_attr(debug_assertions, track_caller)]
441    pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
442        let mut dst = self.context_memory_mut();
443        unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
444    }
445
446    /// Set data from global memory to local memory. If global range is smaller than len, zeroes the rest.
447    #[inline]
448    #[cfg_attr(debug_assertions, track_caller)]
449    pub fn global_to_local_set_data(
450        &mut self,
451        memory_offset: usize,
452        data_offset: usize,
453        len: usize,
454        data_range: Range<usize>,
455    ) {
456        let mut buffer = self.buffer_ref_mut();
457        let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
458        let src = if data_range.is_empty() {
459            &mut []
460        } else {
461            src.get_mut(data_range).unwrap()
462        };
463        unsafe { set_data(dst, src, memory_offset, data_offset, len) };
464    }
465
466    /// Copies elements from one part of the memory to another part of itself.
467    ///
468    /// # Panics
469    ///
470    /// Panics on out of bounds.
471    #[inline]
472    #[cfg_attr(debug_assertions, track_caller)]
473    pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
474        self.context_memory_mut().copy_within(src..src + len, dst);
475    }
476
477    /// Returns a reference to the memory of the current context, the active memory.
478    ///
479    /// # Panics
480    ///
481    /// Panics if the checkpoint is invalid in debug builds only.
482    ///
483    /// # Safety
484    ///
485    /// In release builds, calling this method with an invalid checkpoint triggers undefined
486    /// behavior. The checkpoint must be within the bounds of the buffer.
487    #[inline]
488    pub fn context_memory(&self) -> Ref<'_, [u8]> {
489        let buffer = self.buffer_ref();
490        Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
491            Some(slice) => slice,
492            None => debug_unreachable!("Context memory should be always valid"),
493        })
494    }
495
496    /// Returns a mutable reference to the memory of the current context.
497    ///
498    /// # Panics
499    ///
500    /// Panics if the checkpoint is invalid in debug builds only.
501    ///
502    /// # Safety
503    ///
504    /// In release builds, calling this method with an invalid checkpoint triggers undefined
505    /// behavior. The checkpoint must be within the bounds of the buffer.
506    #[inline]
507    pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
508        let buffer = self.buffer_ref_mut();
509        RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
510            Some(slice) => slice,
511            None => debug_unreachable!("Context memory should be always valid"),
512        })
513    }
514}
515
516/// Copies data from src to dst taking into account the offsets and len.
517///
518/// If src does not have enough data, it nullifies the rest of dst that is not copied.
519///
520/// # Safety
521///
522/// Assumes that dst has enough space to copy the data.
523/// Assumes that src has enough data to copy.
524/// Assumes that dst_offset and src_offset are in bounds.
525/// Assumes that dst and src are valid.
526/// Assumes that dst and src do not overlap.
527unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
528    if len == 0 {
529        return;
530    }
531    if src_offset >= src.len() {
532        // Nullify all memory slots
533        dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
534        return;
535    }
536    let src_end = min(src_offset + len, src.len());
537    let src_len = src_end - src_offset;
538    debug_assert!(src_offset < src.len() && src_end <= src.len());
539    let data = unsafe { src.get_unchecked(src_offset..src_end) };
540    unsafe {
541        dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
542            .copy_from_slice(data)
543    };
544
545    // Nullify rest of memory slots
546    // SAFETY: Memory is assumed to be valid, and it is commented where this assumption is made.
547    unsafe {
548        dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
549            .fill(0)
550    };
551}
552
553/// Returns number of words what would fit to provided number of bytes,
554/// i.e. it rounds up the number bytes to number of words.
555#[inline]
556pub const fn num_words(len: usize) -> usize {
557    len.saturating_add(31) / 32
558}
559
560/// Performs EVM memory resize.
561#[inline]
562#[must_use]
563pub fn resize_memory<Memory: MemoryTr>(
564    gas: &mut crate::Gas,
565    memory: &mut Memory,
566    offset: usize,
567    len: usize,
568) -> bool {
569    let new_num_words = num_words(offset.saturating_add(len));
570    if new_num_words > gas.memory().words_num {
571        resize_memory_cold(gas, memory, new_num_words)
572    } else {
573        true
574    }
575}
576
577#[cold]
578#[inline(never)]
579fn resize_memory_cold<Memory: MemoryTr>(
580    gas: &mut crate::Gas,
581    memory: &mut Memory,
582    new_num_words: usize,
583) -> bool {
584    let cost = unsafe {
585        gas.memory_mut()
586            .record_new_len(new_num_words)
587            .unwrap_unchecked()
588    };
589    if !gas.record_cost(cost) {
590        return false;
591    }
592    memory.resize(new_num_words * 32);
593    true
594}
595
596#[cfg(test)]
597mod tests {
598    use super::*;
599
600    #[test]
601    fn test_num_words() {
602        assert_eq!(num_words(0), 0);
603        assert_eq!(num_words(1), 1);
604        assert_eq!(num_words(31), 1);
605        assert_eq!(num_words(32), 1);
606        assert_eq!(num_words(33), 2);
607        assert_eq!(num_words(63), 2);
608        assert_eq!(num_words(64), 2);
609        assert_eq!(num_words(65), 3);
610        assert_eq!(num_words(usize::MAX), usize::MAX / 32);
611    }
612
613    #[test]
614    fn new_free_child_context() {
615        let mut sm1 = SharedMemory::new();
616
617        assert_eq!(sm1.buffer_ref().len(), 0);
618        assert_eq!(sm1.my_checkpoint, 0);
619
620        unsafe { sm1.buffer_ref_mut().set_len(32) };
621        assert_eq!(sm1.len(), 32);
622        let mut sm2 = sm1.new_child_context();
623
624        assert_eq!(sm2.buffer_ref().len(), 32);
625        assert_eq!(sm2.my_checkpoint, 32);
626        assert_eq!(sm2.len(), 0);
627
628        unsafe { sm2.buffer_ref_mut().set_len(96) };
629        assert_eq!(sm2.len(), 64);
630        let mut sm3 = sm2.new_child_context();
631
632        assert_eq!(sm3.buffer_ref().len(), 96);
633        assert_eq!(sm3.my_checkpoint, 96);
634        assert_eq!(sm3.len(), 0);
635
636        unsafe { sm3.buffer_ref_mut().set_len(128) };
637        let sm4 = sm3.new_child_context();
638        assert_eq!(sm4.buffer_ref().len(), 128);
639        assert_eq!(sm4.my_checkpoint, 128);
640        assert_eq!(sm4.len(), 0);
641
642        // Free contexts
643        drop(sm4);
644        sm3.free_child_context();
645        assert_eq!(sm3.buffer_ref().len(), 128);
646        assert_eq!(sm3.my_checkpoint, 96);
647        assert_eq!(sm3.len(), 32);
648
649        sm2.free_child_context();
650        assert_eq!(sm2.buffer_ref().len(), 96);
651        assert_eq!(sm2.my_checkpoint, 32);
652        assert_eq!(sm2.len(), 64);
653
654        sm1.free_child_context();
655        assert_eq!(sm1.buffer_ref().len(), 32);
656        assert_eq!(sm1.my_checkpoint, 0);
657        assert_eq!(sm1.len(), 32);
658    }
659
660    #[test]
661    fn resize() {
662        let mut sm1 = SharedMemory::new();
663        sm1.resize(32);
664        assert_eq!(sm1.buffer_ref().len(), 32);
665        assert_eq!(sm1.len(), 32);
666        assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
667
668        let mut sm2 = sm1.new_child_context();
669        sm2.resize(96);
670        assert_eq!(sm2.buffer_ref().len(), 128);
671        assert_eq!(sm2.len(), 96);
672        assert_eq!(sm2.buffer_ref().get(32..128), Some(&[0_u8; 96] as &[u8]));
673
674        sm1.free_child_context();
675        assert_eq!(sm1.buffer_ref().len(), 32);
676        assert_eq!(sm1.len(), 32);
677        assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
678    }
679}