revm_interpreter/interpreter/
shared_memory.rs

1use super::MemoryTr;
2use core::{
3    cell::{Ref, RefCell, RefMut},
4    cmp::min,
5    fmt,
6    ops::Range,
7};
8use primitives::{hex, B256, U256};
9use std::{rc::Rc, vec::Vec};
10
11trait RefcellExt<T> {
12    fn dbg_borrow(&self) -> Ref<'_, T>;
13    fn dbg_borrow_mut(&self) -> RefMut<'_, T>;
14}
15
16impl<T> RefcellExt<T> for RefCell<T> {
17    #[inline]
18    fn dbg_borrow(&self) -> Ref<'_, T> {
19        match self.try_borrow() {
20            Ok(b) => b,
21            Err(e) => debug_unreachable!("{e}"),
22        }
23    }
24
25    #[inline]
26    fn dbg_borrow_mut(&self) -> RefMut<'_, T> {
27        match self.try_borrow_mut() {
28            Ok(b) => b,
29            Err(e) => debug_unreachable!("{e}"),
30        }
31    }
32}
33
34/// A sequential memory shared between calls, which uses
35/// a `Vec` for internal representation.
36/// A [SharedMemory] instance should always be obtained using
37/// the `new` static method to ensure memory safety.
38#[derive(Clone, PartialEq, Eq)]
39#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
40pub struct SharedMemory {
41    /// The underlying buffer.
42    buffer: Option<Rc<RefCell<Vec<u8>>>>,
43    /// Memory checkpoints for each depth.
44    /// Invariant: these are always in bounds of `data`.
45    my_checkpoint: usize,
46    /// Child checkpoint that we need to free context to.
47    child_checkpoint: Option<usize>,
48    /// Memory limit. See [`Cfg`](context_interface::Cfg).
49    #[cfg(feature = "memory_limit")]
50    memory_limit: u64,
51}
52
53impl fmt::Debug for SharedMemory {
54    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
55        f.debug_struct("SharedMemory")
56            .field("current_len", &self.len())
57            .field("context_memory", &hex::encode(&*self.context_memory()))
58            .finish_non_exhaustive()
59    }
60}
61
62impl Default for SharedMemory {
63    #[inline]
64    fn default() -> Self {
65        Self::new()
66    }
67}
68
69impl MemoryTr for SharedMemory {
70    fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
71        self.set_data(memory_offset, data_offset, len, data);
72    }
73
74    fn set(&mut self, memory_offset: usize, data: &[u8]) {
75        self.set(memory_offset, data);
76    }
77
78    fn size(&self) -> usize {
79        self.len()
80    }
81
82    fn copy(&mut self, destination: usize, source: usize, len: usize) {
83        self.copy(destination, source, len);
84    }
85
86    fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
87        self.slice_range(range)
88    }
89
90    fn local_memory_offset(&self) -> usize {
91        self.my_checkpoint
92    }
93
94    fn set_data_from_global(
95        &mut self,
96        memory_offset: usize,
97        data_offset: usize,
98        len: usize,
99        data_range: Range<usize>,
100    ) {
101        self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
102    }
103
104    /// Returns a byte slice of the memory region at the given offset.
105    ///
106    /// # Panics
107    ///
108    /// Panics on out of bounds access in debug builds only.
109    ///
110    /// # Safety
111    ///
112    /// In release builds, calling this method with an out-of-bounds range triggers undefined
113    /// behavior. Callers must ensure that the range is within the bounds of the buffer.
114    #[inline]
115    #[cfg_attr(debug_assertions, track_caller)]
116    fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
117        let buffer = self.buffer_ref();
118        Ref::map(buffer, |b| match b.get(range) {
119            Some(slice) => slice,
120            None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
121        })
122    }
123
124    fn resize(&mut self, new_size: usize) -> bool {
125        self.resize(new_size);
126        true
127    }
128}
129
130impl SharedMemory {
131    /// Creates a new memory instance that can be shared between calls.
132    ///
133    /// The default initial capacity is 4KiB.
134    #[inline]
135    pub fn new() -> Self {
136        Self::with_capacity(4 * 1024) // from evmone
137    }
138
139    /// Creates a new invalid memory instance.
140    #[inline]
141    pub fn invalid() -> Self {
142        Self {
143            buffer: None,
144            my_checkpoint: 0,
145            child_checkpoint: None,
146            #[cfg(feature = "memory_limit")]
147            memory_limit: 0,
148        }
149    }
150
151    /// Creates a new memory instance with a given shared buffer.
152    pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
153        Self {
154            buffer: Some(buffer),
155            my_checkpoint: 0,
156            child_checkpoint: None,
157            #[cfg(feature = "memory_limit")]
158            memory_limit: u64::MAX,
159        }
160    }
161
162    /// Creates a new memory instance that can be shared between calls with the given `capacity`.
163    #[inline]
164    pub fn with_capacity(capacity: usize) -> Self {
165        Self {
166            buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
167            my_checkpoint: 0,
168            child_checkpoint: None,
169            #[cfg(feature = "memory_limit")]
170            memory_limit: u64::MAX,
171        }
172    }
173
174    /// Creates a new memory instance that can be shared between calls,
175    /// with `memory_limit` as upper bound for allocation size.
176    ///
177    /// The default initial capacity is 4KiB.
178    #[cfg(feature = "memory_limit")]
179    #[inline]
180    pub fn new_with_memory_limit(memory_limit: u64) -> Self {
181        Self {
182            memory_limit,
183            ..Self::new()
184        }
185    }
186
187    #[inline]
188    fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
189        debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
190        unsafe { self.buffer.as_ref().unwrap_unchecked() }
191    }
192
193    #[inline]
194    fn buffer_ref(&self) -> Ref<'_, Vec<u8>> {
195        self.buffer().dbg_borrow()
196    }
197
198    #[inline]
199    fn buffer_ref_mut(&self) -> RefMut<'_, Vec<u8>> {
200        self.buffer().dbg_borrow_mut()
201    }
202
203    /// Returns `true` if the `new_size` for the current context memory will
204    /// make the shared buffer length exceed the `memory_limit`.
205    #[cfg(feature = "memory_limit")]
206    #[inline]
207    pub fn limit_reached(&self, new_size: usize) -> bool {
208        self.my_checkpoint.saturating_add(new_size) as u64 > self.memory_limit
209    }
210
211    /// Prepares the shared memory for a new child context.
212    ///
213    /// # Panics
214    ///
215    /// Panics if this function was already called without freeing child context.
216    #[inline]
217    pub fn new_child_context(&mut self) -> SharedMemory {
218        if self.child_checkpoint.is_some() {
219            panic!("new_child_context was already called without freeing child context");
220        }
221        let new_checkpoint = self.full_len();
222        self.child_checkpoint = Some(new_checkpoint);
223        SharedMemory {
224            buffer: Some(self.buffer().clone()),
225            my_checkpoint: new_checkpoint,
226            // child_checkpoint is same as my_checkpoint
227            child_checkpoint: None,
228            #[cfg(feature = "memory_limit")]
229            memory_limit: self.memory_limit,
230        }
231    }
232
233    /// Prepares the shared memory for returning from child context. Do nothing if there is no child context.
234    #[inline]
235    pub fn free_child_context(&mut self) {
236        let Some(child_checkpoint) = self.child_checkpoint.take() else {
237            return;
238        };
239        unsafe {
240            self.buffer_ref_mut().set_len(child_checkpoint);
241        }
242    }
243
244    /// Returns the length of the current memory range.
245    #[inline]
246    pub fn len(&self) -> usize {
247        self.full_len() - self.my_checkpoint
248    }
249
250    fn full_len(&self) -> usize {
251        self.buffer_ref().len()
252    }
253
254    /// Returns `true` if the current memory range is empty.
255    #[inline]
256    pub fn is_empty(&self) -> bool {
257        self.len() == 0
258    }
259
260    /// Resizes the memory in-place so that `len` is equal to `new_len`.
261    #[inline]
262    pub fn resize(&mut self, new_size: usize) {
263        self.buffer()
264            .dbg_borrow_mut()
265            .resize(self.my_checkpoint + new_size, 0);
266    }
267
268    /// Returns a byte slice of the memory region at the given offset.
269    ///
270    /// # Panics
271    ///
272    /// Panics on out of bounds.
273    #[inline]
274    #[cfg_attr(debug_assertions, track_caller)]
275    pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
276        self.slice_range(offset..offset + size)
277    }
278
279    /// Returns a byte slice of the memory region at the given offset.
280    ///
281    /// # Panics
282    ///
283    /// Panics on out of bounds access in debug builds only.
284    ///
285    /// # Safety
286    ///
287    /// In release builds, calling this method with an out-of-bounds range triggers undefined
288    /// behavior. Callers must ensure that the range is within the bounds of the memory (i.e.,
289    /// `range.end <= self.len()`).
290    #[inline]
291    #[cfg_attr(debug_assertions, track_caller)]
292    pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
293        let buffer = self.buffer_ref();
294        Ref::map(buffer, |b| {
295            match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
296                Some(slice) => slice,
297                None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
298            }
299        })
300    }
301
302    /// Returns a byte slice of the memory region at the given offset.
303    ///
304    /// # Panics
305    ///
306    /// Panics on out of bounds access in debug builds only.
307    ///
308    /// # Safety
309    ///
310    /// In release builds, calling this method with an out-of-bounds range triggers undefined
311    /// behavior. Callers must ensure that the range is within the bounds of the buffer.
312    #[inline]
313    #[cfg_attr(debug_assertions, track_caller)]
314    pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
315        let buffer = self.buffer_ref();
316        Ref::map(buffer, |b| match b.get(range) {
317            Some(slice) => slice,
318            None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
319        })
320    }
321
322    /// Returns a byte slice of the memory region at the given offset.
323    ///
324    /// # Panics
325    ///
326    /// Panics on out of bounds access in debug builds only.
327    ///
328    /// # Safety
329    ///
330    /// In release builds, calling this method with out-of-bounds parameters triggers undefined
331    /// behavior. Callers must ensure that `offset + size` does not exceed the length of the
332    /// memory.
333    #[inline]
334    #[cfg_attr(debug_assertions, track_caller)]
335    pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
336        let buffer = self.buffer_ref_mut();
337        RefMut::map(buffer, |b| {
338            match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
339                Some(slice) => slice,
340                None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
341            }
342        })
343    }
344
345    /// Returns the byte at the given offset.
346    ///
347    /// # Panics
348    ///
349    /// Panics on out of bounds.
350    #[inline]
351    pub fn get_byte(&self, offset: usize) -> u8 {
352        self.slice_len(offset, 1)[0]
353    }
354
355    /// Returns a 32-byte slice of the memory region at the given offset.
356    ///
357    /// # Panics
358    ///
359    /// Panics on out of bounds.
360    #[inline]
361    pub fn get_word(&self, offset: usize) -> B256 {
362        (*self.slice_len(offset, 32)).try_into().unwrap()
363    }
364
365    /// Returns a U256 of the memory region at the given offset.
366    ///
367    /// # Panics
368    ///
369    /// Panics on out of bounds.
370    #[inline]
371    pub fn get_u256(&self, offset: usize) -> U256 {
372        self.get_word(offset).into()
373    }
374
375    /// Sets the `byte` at the given `index`.
376    ///
377    /// # Panics
378    ///
379    /// Panics on out of bounds.
380    #[inline]
381    #[cfg_attr(debug_assertions, track_caller)]
382    pub fn set_byte(&mut self, offset: usize, byte: u8) {
383        self.set(offset, &[byte]);
384    }
385
386    /// Sets the given 32-byte `value` to the memory region at the given `offset`.
387    ///
388    /// # Panics
389    ///
390    /// Panics on out of bounds.
391    #[inline]
392    #[cfg_attr(debug_assertions, track_caller)]
393    pub fn set_word(&mut self, offset: usize, value: &B256) {
394        self.set(offset, &value[..]);
395    }
396
397    /// Sets the given U256 `value` to the memory region at the given `offset`.
398    ///
399    /// # Panics
400    ///
401    /// Panics on out of bounds.
402    #[inline]
403    #[cfg_attr(debug_assertions, track_caller)]
404    pub fn set_u256(&mut self, offset: usize, value: U256) {
405        self.set(offset, &value.to_be_bytes::<32>());
406    }
407
408    /// Set memory region at given `offset`.
409    ///
410    /// # Panics
411    ///
412    /// Panics on out of bounds.
413    #[inline]
414    #[cfg_attr(debug_assertions, track_caller)]
415    pub fn set(&mut self, offset: usize, value: &[u8]) {
416        if !value.is_empty() {
417            self.slice_mut(offset, value.len()).copy_from_slice(value);
418        }
419    }
420
421    /// Set memory from data. Our memory offset+len is expected to be correct but we
422    /// are doing bound checks on data/data_offeset/len and zeroing parts that is not copied.
423    ///
424    /// # Panics
425    ///
426    /// Panics if memory is out of bounds.
427    #[inline]
428    #[cfg_attr(debug_assertions, track_caller)]
429    pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
430        let mut dst = self.context_memory_mut();
431        unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
432    }
433
434    /// Set data from global memory to local memory. If global range is smaller than len, zeroes the rest.
435    #[inline]
436    #[cfg_attr(debug_assertions, track_caller)]
437    pub fn global_to_local_set_data(
438        &mut self,
439        memory_offset: usize,
440        data_offset: usize,
441        len: usize,
442        data_range: Range<usize>,
443    ) {
444        let mut buffer = self.buffer_ref_mut();
445        let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
446        let src = if data_range.is_empty() {
447            &mut []
448        } else {
449            src.get_mut(data_range).unwrap()
450        };
451        unsafe { set_data(dst, src, memory_offset, data_offset, len) };
452    }
453
454    /// Copies elements from one part of the memory to another part of itself.
455    ///
456    /// # Panics
457    ///
458    /// Panics on out of bounds.
459    #[inline]
460    #[cfg_attr(debug_assertions, track_caller)]
461    pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
462        self.context_memory_mut().copy_within(src..src + len, dst);
463    }
464
465    /// Returns a reference to the memory of the current context, the active memory.
466    ///
467    /// # Panics
468    ///
469    /// Panics if the checkpoint is invalid in debug builds only.
470    ///
471    /// # Safety
472    ///
473    /// In release builds, calling this method with an invalid checkpoint triggers undefined
474    /// behavior. The checkpoint must be within the bounds of the buffer.
475    #[inline]
476    pub fn context_memory(&self) -> Ref<'_, [u8]> {
477        let buffer = self.buffer_ref();
478        Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
479            Some(slice) => slice,
480            None => debug_unreachable!("Context memory should be always valid"),
481        })
482    }
483
484    /// Returns a mutable reference to the memory of the current context.
485    ///
486    /// # Panics
487    ///
488    /// Panics if the checkpoint is invalid in debug builds only.
489    ///
490    /// # Safety
491    ///
492    /// In release builds, calling this method with an invalid checkpoint triggers undefined
493    /// behavior. The checkpoint must be within the bounds of the buffer.
494    #[inline]
495    pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
496        let buffer = self.buffer_ref_mut();
497        RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
498            Some(slice) => slice,
499            None => debug_unreachable!("Context memory should be always valid"),
500        })
501    }
502}
503
504/// Copies data from src to dst taking into account the offsets and len.
505///
506/// If src does not have enough data, it nullifies the rest of dst that is not copied.
507///
508/// # Safety
509///
510/// Assumes that dst has enough space to copy the data.
511/// Assumes that src has enough data to copy.
512/// Assumes that dst_offset and src_offset are in bounds.
513/// Assumes that dst and src are valid.
514/// Assumes that dst and src do not overlap.
515unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
516    if src_offset >= src.len() {
517        // Nullify all memory slots
518        dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
519        return;
520    }
521    let src_end = min(src_offset + len, src.len());
522    let src_len = src_end - src_offset;
523    debug_assert!(src_offset < src.len() && src_end <= src.len());
524    let data = unsafe { src.get_unchecked(src_offset..src_end) };
525    unsafe {
526        dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
527            .copy_from_slice(data)
528    };
529
530    // Nullify rest of memory slots
531    // SAFETY: Memory is assumed to be valid, and it is commented where this assumption is made.
532    unsafe {
533        dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
534            .fill(0)
535    };
536}
537
538/// Returns number of words what would fit to provided number of bytes,
539/// i.e. it rounds up the number bytes to number of words.
540#[inline]
541pub const fn num_words(len: usize) -> usize {
542    len.saturating_add(31) / 32
543}
544
545/// Performs EVM memory resize.
546#[inline]
547#[must_use]
548pub fn resize_memory<Memory: MemoryTr>(
549    gas: &mut crate::Gas,
550    memory: &mut Memory,
551    offset: usize,
552    len: usize,
553) -> bool {
554    let new_num_words = num_words(offset.saturating_add(len));
555    if new_num_words > gas.memory().words_num {
556        resize_memory_cold(gas, memory, new_num_words)
557    } else {
558        true
559    }
560}
561
562#[cold]
563#[inline(never)]
564fn resize_memory_cold<Memory: MemoryTr>(
565    gas: &mut crate::Gas,
566    memory: &mut Memory,
567    new_num_words: usize,
568) -> bool {
569    let cost = unsafe {
570        gas.memory_mut()
571            .record_new_len(new_num_words)
572            .unwrap_unchecked()
573    };
574    if !gas.record_cost(cost) {
575        return false;
576    }
577    memory.resize(new_num_words * 32);
578    true
579}
580
581#[cfg(test)]
582mod tests {
583    use super::*;
584
585    #[test]
586    fn test_num_words() {
587        assert_eq!(num_words(0), 0);
588        assert_eq!(num_words(1), 1);
589        assert_eq!(num_words(31), 1);
590        assert_eq!(num_words(32), 1);
591        assert_eq!(num_words(33), 2);
592        assert_eq!(num_words(63), 2);
593        assert_eq!(num_words(64), 2);
594        assert_eq!(num_words(65), 3);
595        assert_eq!(num_words(usize::MAX), usize::MAX / 32);
596    }
597
598    #[test]
599    fn new_free_child_context() {
600        let mut sm1 = SharedMemory::new();
601
602        assert_eq!(sm1.buffer_ref().len(), 0);
603        assert_eq!(sm1.my_checkpoint, 0);
604
605        unsafe { sm1.buffer_ref_mut().set_len(32) };
606        assert_eq!(sm1.len(), 32);
607        let mut sm2 = sm1.new_child_context();
608
609        assert_eq!(sm2.buffer_ref().len(), 32);
610        assert_eq!(sm2.my_checkpoint, 32);
611        assert_eq!(sm2.len(), 0);
612
613        unsafe { sm2.buffer_ref_mut().set_len(96) };
614        assert_eq!(sm2.len(), 64);
615        let mut sm3 = sm2.new_child_context();
616
617        assert_eq!(sm3.buffer_ref().len(), 96);
618        assert_eq!(sm3.my_checkpoint, 96);
619        assert_eq!(sm3.len(), 0);
620
621        unsafe { sm3.buffer_ref_mut().set_len(128) };
622        let sm4 = sm3.new_child_context();
623        assert_eq!(sm4.buffer_ref().len(), 128);
624        assert_eq!(sm4.my_checkpoint, 128);
625        assert_eq!(sm4.len(), 0);
626
627        // Free contexts
628        drop(sm4);
629        sm3.free_child_context();
630        assert_eq!(sm3.buffer_ref().len(), 128);
631        assert_eq!(sm3.my_checkpoint, 96);
632        assert_eq!(sm3.len(), 32);
633
634        sm2.free_child_context();
635        assert_eq!(sm2.buffer_ref().len(), 96);
636        assert_eq!(sm2.my_checkpoint, 32);
637        assert_eq!(sm2.len(), 64);
638
639        sm1.free_child_context();
640        assert_eq!(sm1.buffer_ref().len(), 32);
641        assert_eq!(sm1.my_checkpoint, 0);
642        assert_eq!(sm1.len(), 32);
643    }
644
645    #[test]
646    fn resize() {
647        let mut sm1 = SharedMemory::new();
648        sm1.resize(32);
649        assert_eq!(sm1.buffer_ref().len(), 32);
650        assert_eq!(sm1.len(), 32);
651        assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
652
653        let mut sm2 = sm1.new_child_context();
654        sm2.resize(96);
655        assert_eq!(sm2.buffer_ref().len(), 128);
656        assert_eq!(sm2.len(), 96);
657        assert_eq!(sm2.buffer_ref().get(32..128), Some(&[0_u8; 96] as &[u8]));
658
659        sm1.free_child_context();
660        assert_eq!(sm1.buffer_ref().len(), 32);
661        assert_eq!(sm1.len(), 32);
662        assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
663    }
664}