revm_interpreter/interpreter/
shared_memory.rs

1use super::MemoryTr;
2use crate::{gas::params::GasParams, InstructionResult};
3use core::{
4    cell::{Ref, RefCell, RefMut},
5    cmp::min,
6    fmt,
7    ops::Range,
8};
9use primitives::{hex, B256, U256};
10use std::{rc::Rc, vec::Vec};
11
12trait RefcellExt<T> {
13    fn dbg_borrow(&self) -> Ref<'_, T>;
14    fn dbg_borrow_mut(&self) -> RefMut<'_, T>;
15}
16
17impl<T> RefcellExt<T> for RefCell<T> {
18    #[inline]
19    fn dbg_borrow(&self) -> Ref<'_, T> {
20        match self.try_borrow() {
21            Ok(b) => b,
22            Err(e) => debug_unreachable!("{e}"),
23        }
24    }
25
26    #[inline]
27    fn dbg_borrow_mut(&self) -> RefMut<'_, T> {
28        match self.try_borrow_mut() {
29            Ok(b) => b,
30            Err(e) => debug_unreachable!("{e}"),
31        }
32    }
33}
34
35/// A sequential memory shared between calls, which uses
36/// a `Vec` for internal representation.
37/// A [SharedMemory] instance should always be obtained using
38/// the `new` static method to ensure memory safety.
39#[derive(Clone, PartialEq, Eq)]
40#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
41pub struct SharedMemory {
42    /// The underlying buffer.
43    buffer: Option<Rc<RefCell<Vec<u8>>>>,
44    /// Memory checkpoints for each depth.
45    /// Invariant: these are always in bounds of `data`.
46    my_checkpoint: usize,
47    /// Child checkpoint that we need to free context to.
48    child_checkpoint: Option<usize>,
49    /// Memory limit. See [`Cfg`](context_interface::Cfg).
50    #[cfg(feature = "memory_limit")]
51    memory_limit: u64,
52}
53
54impl fmt::Debug for SharedMemory {
55    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
56        f.debug_struct("SharedMemory")
57            .field("current_len", &self.len())
58            .field("context_memory", &hex::encode(&*self.context_memory()))
59            .finish_non_exhaustive()
60    }
61}
62
63impl Default for SharedMemory {
64    #[inline]
65    fn default() -> Self {
66        Self::new()
67    }
68}
69
70impl MemoryTr for SharedMemory {
71    fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
72        self.set_data(memory_offset, data_offset, len, data);
73    }
74
75    fn set(&mut self, memory_offset: usize, data: &[u8]) {
76        self.set(memory_offset, data);
77    }
78
79    fn size(&self) -> usize {
80        self.len()
81    }
82
83    fn copy(&mut self, destination: usize, source: usize, len: usize) {
84        self.copy(destination, source, len);
85    }
86
87    fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
88        self.slice_range(range)
89    }
90
91    fn local_memory_offset(&self) -> usize {
92        self.my_checkpoint
93    }
94
95    fn set_data_from_global(
96        &mut self,
97        memory_offset: usize,
98        data_offset: usize,
99        len: usize,
100        data_range: Range<usize>,
101    ) {
102        self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
103    }
104
105    /// Returns a byte slice of the memory region at the given offset.
106    ///
107    /// # Panics
108    ///
109    /// Panics on out of bounds access in debug builds only.
110    ///
111    /// # Safety
112    ///
113    /// In release builds, calling this method with an out-of-bounds range triggers undefined
114    /// behavior. Callers must ensure that the range is within the bounds of the buffer.
115    #[inline]
116    #[cfg_attr(debug_assertions, track_caller)]
117    fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
118        let buffer = self.buffer_ref();
119        Ref::map(buffer, |b| match b.get(range) {
120            Some(slice) => slice,
121            None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
122        })
123    }
124
125    fn resize(&mut self, new_size: usize) -> bool {
126        self.resize(new_size);
127        true
128    }
129
130    /// Returns `true` if the `new_size` for the current context memory will
131    /// make the shared buffer length exceed the `memory_limit`.
132    #[cfg(feature = "memory_limit")]
133    #[inline]
134    fn limit_reached(&self, offset: usize, len: usize) -> bool {
135        self.my_checkpoint
136            .saturating_add(offset)
137            .saturating_add(len) as u64
138            > self.memory_limit
139    }
140}
141
142impl SharedMemory {
143    /// Creates a new memory instance that can be shared between calls.
144    ///
145    /// The default initial capacity is 4KiB.
146    #[inline]
147    pub fn new() -> Self {
148        Self::with_capacity(4 * 1024) // from evmone
149    }
150
151    /// Creates a new invalid memory instance.
152    #[inline]
153    pub fn invalid() -> Self {
154        Self {
155            buffer: None,
156            my_checkpoint: 0,
157            child_checkpoint: None,
158            #[cfg(feature = "memory_limit")]
159            memory_limit: 0,
160        }
161    }
162
163    /// Creates a new memory instance with a given shared buffer.
164    pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
165        Self {
166            buffer: Some(buffer),
167            my_checkpoint: 0,
168            child_checkpoint: None,
169            #[cfg(feature = "memory_limit")]
170            memory_limit: u64::MAX,
171        }
172    }
173
174    /// Creates a new memory instance that can be shared between calls with the given `capacity`.
175    #[inline]
176    pub fn with_capacity(capacity: usize) -> Self {
177        Self {
178            buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
179            my_checkpoint: 0,
180            child_checkpoint: None,
181            #[cfg(feature = "memory_limit")]
182            memory_limit: u64::MAX,
183        }
184    }
185
186    /// Creates a new memory instance that can be shared between calls,
187    /// with `memory_limit` as upper bound for allocation size.
188    ///
189    /// The default initial capacity is 4KiB.
190    #[cfg(feature = "memory_limit")]
191    #[inline]
192    pub fn new_with_memory_limit(memory_limit: u64) -> Self {
193        Self {
194            memory_limit,
195            ..Self::new()
196        }
197    }
198
199    /// Sets the memory limit in bytes.
200    #[inline]
201    pub fn set_memory_limit(&mut self, limit: u64) {
202        #[cfg(feature = "memory_limit")]
203        {
204            self.memory_limit = limit;
205        }
206        // for clippy.
207        let _ = limit;
208    }
209
210    #[inline]
211    fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
212        debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
213        unsafe { self.buffer.as_ref().unwrap_unchecked() }
214    }
215
216    #[inline]
217    fn buffer_ref(&self) -> Ref<'_, Vec<u8>> {
218        self.buffer().dbg_borrow()
219    }
220
221    #[inline]
222    fn buffer_ref_mut(&self) -> RefMut<'_, Vec<u8>> {
223        self.buffer().dbg_borrow_mut()
224    }
225
226    /// Prepares the shared memory for a new child context.
227    ///
228    /// # Panics
229    ///
230    /// Panics if this function was already called without freeing child context.
231    #[inline]
232    pub fn new_child_context(&mut self) -> SharedMemory {
233        if self.child_checkpoint.is_some() {
234            panic!("new_child_context was already called without freeing child context");
235        }
236        let new_checkpoint = self.full_len();
237        self.child_checkpoint = Some(new_checkpoint);
238        SharedMemory {
239            buffer: Some(self.buffer().clone()),
240            my_checkpoint: new_checkpoint,
241            // child_checkpoint is same as my_checkpoint
242            child_checkpoint: None,
243            #[cfg(feature = "memory_limit")]
244            memory_limit: self.memory_limit,
245        }
246    }
247
248    /// Prepares the shared memory for returning from child context. Do nothing if there is no child context.
249    #[inline]
250    pub fn free_child_context(&mut self) {
251        let Some(child_checkpoint) = self.child_checkpoint.take() else {
252            return;
253        };
254        unsafe {
255            self.buffer_ref_mut().set_len(child_checkpoint);
256        }
257    }
258
259    /// Returns the length of the current memory range.
260    #[inline]
261    pub fn len(&self) -> usize {
262        self.full_len() - self.my_checkpoint
263    }
264
265    fn full_len(&self) -> usize {
266        self.buffer_ref().len()
267    }
268
269    /// Returns `true` if the current memory range is empty.
270    #[inline]
271    pub fn is_empty(&self) -> bool {
272        self.len() == 0
273    }
274
275    /// Resizes the memory in-place so that `len` is equal to `new_len`.
276    #[inline]
277    pub fn resize(&mut self, new_size: usize) {
278        self.buffer()
279            .dbg_borrow_mut()
280            .resize(self.my_checkpoint + new_size, 0);
281    }
282
283    /// Returns a byte slice of the memory region at the given offset.
284    ///
285    /// # Panics
286    ///
287    /// Panics on out of bounds.
288    #[inline]
289    #[cfg_attr(debug_assertions, track_caller)]
290    pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
291        self.slice_range(offset..offset + size)
292    }
293
294    /// Returns a byte slice of the memory region at the given offset.
295    ///
296    /// # Panics
297    ///
298    /// Panics on out of bounds access in debug builds only.
299    ///
300    /// # Safety
301    ///
302    /// In release builds, calling this method with an out-of-bounds range triggers undefined
303    /// behavior. Callers must ensure that the range is within the bounds of the memory (i.e.,
304    /// `range.end <= self.len()`).
305    #[inline]
306    #[cfg_attr(debug_assertions, track_caller)]
307    pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
308        let buffer = self.buffer_ref();
309        Ref::map(buffer, |b| {
310            match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
311                Some(slice) => slice,
312                None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
313            }
314        })
315    }
316
317    /// Returns a byte slice of the memory region at the given offset.
318    ///
319    /// # Panics
320    ///
321    /// Panics on out of bounds access in debug builds only.
322    ///
323    /// # Safety
324    ///
325    /// In release builds, calling this method with an out-of-bounds range triggers undefined
326    /// behavior. Callers must ensure that the range is within the bounds of the buffer.
327    #[inline]
328    #[cfg_attr(debug_assertions, track_caller)]
329    pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
330        let buffer = self.buffer_ref();
331        Ref::map(buffer, |b| match b.get(range) {
332            Some(slice) => slice,
333            None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
334        })
335    }
336
337    /// Returns a byte slice of the memory region at the given offset.
338    ///
339    /// # Panics
340    ///
341    /// Panics on out of bounds access in debug builds only.
342    ///
343    /// # Safety
344    ///
345    /// In release builds, calling this method with out-of-bounds parameters triggers undefined
346    /// behavior. Callers must ensure that `offset + size` does not exceed the length of the
347    /// memory.
348    #[inline]
349    #[cfg_attr(debug_assertions, track_caller)]
350    pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
351        let buffer = self.buffer_ref_mut();
352        RefMut::map(buffer, |b| {
353            match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
354                Some(slice) => slice,
355                None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
356            }
357        })
358    }
359
360    /// Returns the byte at the given offset.
361    ///
362    /// # Panics
363    ///
364    /// Panics on out of bounds.
365    #[inline]
366    pub fn get_byte(&self, offset: usize) -> u8 {
367        self.slice_len(offset, 1)[0]
368    }
369
370    /// Returns a 32-byte slice of the memory region at the given offset.
371    ///
372    /// # Panics
373    ///
374    /// Panics on out of bounds.
375    #[inline]
376    pub fn get_word(&self, offset: usize) -> B256 {
377        (*self.slice_len(offset, 32)).try_into().unwrap()
378    }
379
380    /// Returns a U256 of the memory region at the given offset.
381    ///
382    /// # Panics
383    ///
384    /// Panics on out of bounds.
385    #[inline]
386    pub fn get_u256(&self, offset: usize) -> U256 {
387        self.get_word(offset).into()
388    }
389
390    /// Sets the `byte` at the given `index`.
391    ///
392    /// # Panics
393    ///
394    /// Panics on out of bounds.
395    #[inline]
396    #[cfg_attr(debug_assertions, track_caller)]
397    pub fn set_byte(&mut self, offset: usize, byte: u8) {
398        self.set(offset, &[byte]);
399    }
400
401    /// Sets the given 32-byte `value` to the memory region at the given `offset`.
402    ///
403    /// # Panics
404    ///
405    /// Panics on out of bounds.
406    #[inline]
407    #[cfg_attr(debug_assertions, track_caller)]
408    pub fn set_word(&mut self, offset: usize, value: &B256) {
409        self.set(offset, &value[..]);
410    }
411
412    /// Sets the given U256 `value` to the memory region at the given `offset`.
413    ///
414    /// # Panics
415    ///
416    /// Panics on out of bounds.
417    #[inline]
418    #[cfg_attr(debug_assertions, track_caller)]
419    pub fn set_u256(&mut self, offset: usize, value: U256) {
420        self.set(offset, &value.to_be_bytes::<32>());
421    }
422
423    /// Set memory region at given `offset`.
424    ///
425    /// # Panics
426    ///
427    /// Panics on out of bounds.
428    #[inline]
429    #[cfg_attr(debug_assertions, track_caller)]
430    pub fn set(&mut self, offset: usize, value: &[u8]) {
431        if !value.is_empty() {
432            self.slice_mut(offset, value.len()).copy_from_slice(value);
433        }
434    }
435
436    /// Set memory from data. Our memory offset+len is expected to be correct but we
437    /// are doing bound checks on data/data_offeset/len and zeroing parts that is not copied.
438    ///
439    /// # Panics
440    ///
441    /// Panics if memory is out of bounds.
442    #[inline]
443    #[cfg_attr(debug_assertions, track_caller)]
444    pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
445        let mut dst = self.context_memory_mut();
446        unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
447    }
448
449    /// Set data from global memory to local memory. If global range is smaller than len, zeroes the rest.
450    #[inline]
451    #[cfg_attr(debug_assertions, track_caller)]
452    pub fn global_to_local_set_data(
453        &mut self,
454        memory_offset: usize,
455        data_offset: usize,
456        len: usize,
457        data_range: Range<usize>,
458    ) {
459        let mut buffer = self.buffer_ref_mut();
460        let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
461        let src = if data_range.is_empty() {
462            &mut []
463        } else {
464            src.get_mut(data_range).unwrap()
465        };
466        unsafe { set_data(dst, src, memory_offset, data_offset, len) };
467    }
468
469    /// Copies elements from one part of the memory to another part of itself.
470    ///
471    /// # Panics
472    ///
473    /// Panics on out of bounds.
474    #[inline]
475    #[cfg_attr(debug_assertions, track_caller)]
476    pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
477        self.context_memory_mut().copy_within(src..src + len, dst);
478    }
479
480    /// Returns a reference to the memory of the current context, the active memory.
481    ///
482    /// # Panics
483    ///
484    /// Panics if the checkpoint is invalid in debug builds only.
485    ///
486    /// # Safety
487    ///
488    /// In release builds, calling this method with an invalid checkpoint triggers undefined
489    /// behavior. The checkpoint must be within the bounds of the buffer.
490    #[inline]
491    pub fn context_memory(&self) -> Ref<'_, [u8]> {
492        let buffer = self.buffer_ref();
493        Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
494            Some(slice) => slice,
495            None => debug_unreachable!("Context memory should be always valid"),
496        })
497    }
498
499    /// Returns a mutable reference to the memory of the current context.
500    ///
501    /// # Panics
502    ///
503    /// Panics if the checkpoint is invalid in debug builds only.
504    ///
505    /// # Safety
506    ///
507    /// In release builds, calling this method with an invalid checkpoint triggers undefined
508    /// behavior. The checkpoint must be within the bounds of the buffer.
509    #[inline]
510    pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
511        let buffer = self.buffer_ref_mut();
512        RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
513            Some(slice) => slice,
514            None => debug_unreachable!("Context memory should be always valid"),
515        })
516    }
517}
518
519/// Copies data from src to dst taking into account the offsets and len.
520///
521/// If src does not have enough data, it nullifies the rest of dst that is not copied.
522///
523/// # Safety
524///
525/// Assumes that dst has enough space to copy the data.
526/// Assumes that src has enough data to copy.
527/// Assumes that dst_offset and src_offset are in bounds.
528/// Assumes that dst and src are valid.
529/// Assumes that dst and src do not overlap.
530unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
531    if len == 0 {
532        return;
533    }
534    if src_offset >= src.len() {
535        // Nullify all memory slots
536        dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
537        return;
538    }
539    let src_end = min(src_offset + len, src.len());
540    let src_len = src_end - src_offset;
541    debug_assert!(src_offset < src.len() && src_end <= src.len());
542    let data = unsafe { src.get_unchecked(src_offset..src_end) };
543    unsafe {
544        dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
545            .copy_from_slice(data)
546    };
547
548    // Nullify rest of memory slots
549    // SAFETY: Memory is assumed to be valid, and it is commented where this assumption is made.
550    unsafe {
551        dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
552            .fill(0)
553    };
554}
555
556/// Returns number of words what would fit to provided number of bytes,
557/// i.e. it rounds up the number bytes to number of words.
558#[inline]
559pub const fn num_words(len: usize) -> usize {
560    len.div_ceil(32)
561}
562
563/// Performs EVM memory resize.
564#[inline]
565pub fn resize_memory<Memory: MemoryTr>(
566    gas: &mut crate::Gas,
567    memory: &mut Memory,
568    gas_table: &GasParams,
569    offset: usize,
570    len: usize,
571) -> Result<(), InstructionResult> {
572    #[cfg(feature = "memory_limit")]
573    if memory.limit_reached(offset, len) {
574        return Err(InstructionResult::MemoryLimitOOG);
575    }
576
577    let new_num_words = num_words(offset.saturating_add(len));
578    if new_num_words > gas.memory().words_num {
579        return resize_memory_cold(gas, memory, gas_table, new_num_words);
580    }
581
582    Ok(())
583}
584
585#[cold]
586#[inline(never)]
587fn resize_memory_cold<Memory: MemoryTr>(
588    gas: &mut crate::Gas,
589    memory: &mut Memory,
590    gas_table: &GasParams,
591    new_num_words: usize,
592) -> Result<(), InstructionResult> {
593    let cost = gas_table.memory_cost(new_num_words);
594    let cost = unsafe {
595        gas.memory_mut()
596            .set_words_num(new_num_words, cost)
597            .unwrap_unchecked()
598    };
599
600    if !gas.record_cost(cost) {
601        return Err(InstructionResult::MemoryOOG);
602    }
603    memory.resize(new_num_words * 32);
604    Ok(())
605}
606
607#[cfg(test)]
608mod tests {
609    use super::*;
610
611    #[test]
612    fn test_num_words() {
613        assert_eq!(num_words(0), 0);
614        assert_eq!(num_words(1), 1);
615        assert_eq!(num_words(31), 1);
616        assert_eq!(num_words(32), 1);
617        assert_eq!(num_words(33), 2);
618        assert_eq!(num_words(63), 2);
619        assert_eq!(num_words(64), 2);
620        assert_eq!(num_words(65), 3);
621        assert_eq!(num_words(usize::MAX - 31), usize::MAX / 32);
622        assert_eq!(num_words(usize::MAX - 30), (usize::MAX / 32) + 1);
623        assert_eq!(num_words(usize::MAX), (usize::MAX / 32) + 1);
624    }
625
626    #[test]
627    fn new_free_child_context() {
628        let mut sm1 = SharedMemory::new();
629
630        assert_eq!(sm1.buffer_ref().len(), 0);
631        assert_eq!(sm1.my_checkpoint, 0);
632
633        unsafe { sm1.buffer_ref_mut().set_len(32) };
634        assert_eq!(sm1.len(), 32);
635        let mut sm2 = sm1.new_child_context();
636
637        assert_eq!(sm2.buffer_ref().len(), 32);
638        assert_eq!(sm2.my_checkpoint, 32);
639        assert_eq!(sm2.len(), 0);
640
641        unsafe { sm2.buffer_ref_mut().set_len(96) };
642        assert_eq!(sm2.len(), 64);
643        let mut sm3 = sm2.new_child_context();
644
645        assert_eq!(sm3.buffer_ref().len(), 96);
646        assert_eq!(sm3.my_checkpoint, 96);
647        assert_eq!(sm3.len(), 0);
648
649        unsafe { sm3.buffer_ref_mut().set_len(128) };
650        let sm4 = sm3.new_child_context();
651        assert_eq!(sm4.buffer_ref().len(), 128);
652        assert_eq!(sm4.my_checkpoint, 128);
653        assert_eq!(sm4.len(), 0);
654
655        // Free contexts
656        drop(sm4);
657        sm3.free_child_context();
658        assert_eq!(sm3.buffer_ref().len(), 128);
659        assert_eq!(sm3.my_checkpoint, 96);
660        assert_eq!(sm3.len(), 32);
661
662        sm2.free_child_context();
663        assert_eq!(sm2.buffer_ref().len(), 96);
664        assert_eq!(sm2.my_checkpoint, 32);
665        assert_eq!(sm2.len(), 64);
666
667        sm1.free_child_context();
668        assert_eq!(sm1.buffer_ref().len(), 32);
669        assert_eq!(sm1.my_checkpoint, 0);
670        assert_eq!(sm1.len(), 32);
671    }
672
673    #[test]
674    fn resize() {
675        let mut sm1 = SharedMemory::new();
676        sm1.resize(32);
677        assert_eq!(sm1.buffer_ref().len(), 32);
678        assert_eq!(sm1.len(), 32);
679        assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
680
681        let mut sm2 = sm1.new_child_context();
682        sm2.resize(96);
683        assert_eq!(sm2.buffer_ref().len(), 128);
684        assert_eq!(sm2.len(), 96);
685        assert_eq!(sm2.buffer_ref().get(32..128), Some(&[0_u8; 96] as &[u8]));
686
687        sm1.free_child_context();
688        assert_eq!(sm1.buffer_ref().len(), 32);
689        assert_eq!(sm1.len(), 32);
690        assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
691    }
692}