revm_interpreter/interpreter/
shared_memory.rs1use core::{
2 cell::{Ref, RefCell},
3 cmp::min,
4 fmt,
5 ops::{Deref, Range},
6};
7use primitives::{hex, B256, U256};
8use std::{rc::Rc, vec::Vec};
9
10use super::MemoryTr;
11
12#[derive(Clone, PartialEq, Eq, Hash)]
17#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
18pub struct SharedMemory {
19 buffer: Vec<u8>,
21 checkpoints: Vec<usize>,
24 last_checkpoint: usize,
26 #[cfg(feature = "memory_limit")]
28 memory_limit: u64,
29}
30
31pub const EMPTY_SHARED_MEMORY: SharedMemory = SharedMemory {
35 buffer: Vec::new(),
36 checkpoints: Vec::new(),
37 last_checkpoint: 0,
38 #[cfg(feature = "memory_limit")]
39 memory_limit: u64::MAX,
40};
41
42impl fmt::Debug for SharedMemory {
43 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
44 f.debug_struct("SharedMemory")
45 .field("current_len", &self.len())
46 .field("context_memory", &hex::encode(self.context_memory()))
47 .finish_non_exhaustive()
48 }
49}
50
51impl Default for SharedMemory {
52 #[inline]
53 fn default() -> Self {
54 Self::new()
55 }
56}
57
58pub trait MemoryGetter {
59 fn memory_mut(&mut self) -> &mut SharedMemory;
60 fn memory(&self) -> &SharedMemory;
61}
62
63impl MemoryGetter for SharedMemory {
64 #[inline]
65 fn memory_mut(&mut self) -> &mut SharedMemory {
66 self
67 }
68
69 #[inline]
70 fn memory(&self) -> &SharedMemory {
71 self
72 }
73}
74
75impl<T: MemoryGetter> MemoryTr for Rc<RefCell<T>> {
76 fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
77 self.borrow_mut()
78 .memory_mut()
79 .set_data(memory_offset, data_offset, len, data);
80 }
81
82 fn set(&mut self, memory_offset: usize, data: &[u8]) {
83 self.borrow_mut().memory_mut().set(memory_offset, data);
84 }
85
86 fn size(&self) -> usize {
87 self.borrow().memory().len()
88 }
89
90 fn copy(&mut self, destination: usize, source: usize, len: usize) {
91 self.borrow_mut()
92 .memory_mut()
93 .copy(destination, source, len);
94 }
95
96 fn slice(&self, range: Range<usize>) -> impl Deref<Target = [u8]> + '_ {
97 Ref::map(self.borrow(), |i| i.memory().slice_range(range))
98 }
99
100 fn resize(&mut self, new_size: usize) -> bool {
101 self.borrow_mut().memory_mut().resize(new_size);
102 true
103 }
104}
105
106impl SharedMemory {
107 #[inline]
111 pub fn new() -> Self {
112 Self::with_capacity(4 * 1024) }
114
115 #[inline]
117 pub fn with_capacity(capacity: usize) -> Self {
118 Self {
119 buffer: Vec::with_capacity(capacity),
120 checkpoints: Vec::with_capacity(32),
121 last_checkpoint: 0,
122 #[cfg(feature = "memory_limit")]
123 memory_limit: u64::MAX,
124 }
125 }
126
127 #[cfg(feature = "memory_limit")]
132 #[inline]
133 pub fn new_with_memory_limit(memory_limit: u64) -> Self {
134 Self {
135 memory_limit,
136 ..Self::new()
137 }
138 }
139
140 #[cfg(feature = "memory_limit")]
143 #[inline]
144 pub fn limit_reached(&self, new_size: usize) -> bool {
145 self.last_checkpoint.saturating_add(new_size) as u64 > self.memory_limit
146 }
147
148 #[inline]
150 pub fn new_context(&mut self) {
151 let new_checkpoint = self.buffer.len();
152 self.checkpoints.push(new_checkpoint);
153 self.last_checkpoint = new_checkpoint;
154 }
155
156 #[inline]
158 pub fn free_context(&mut self) {
159 if let Some(old_checkpoint) = self.checkpoints.pop() {
160 self.last_checkpoint = self.checkpoints.last().cloned().unwrap_or_default();
161 unsafe { self.buffer.set_len(old_checkpoint) };
163 }
164 }
165
166 #[inline]
168 pub fn len(&self) -> usize {
169 self.buffer.len() - self.last_checkpoint
170 }
171
172 #[inline]
174 pub fn is_empty(&self) -> bool {
175 self.len() == 0
176 }
177
178 #[inline]
180 pub fn resize(&mut self, new_size: usize) {
181 self.buffer.resize(self.last_checkpoint + new_size, 0);
182 }
183
184 #[inline]
190 #[cfg_attr(debug_assertions, track_caller)]
191 pub fn slice_len(&self, offset: usize, size: usize) -> &[u8] {
192 self.slice_range(offset..offset + size)
193 }
194
195 #[inline]
201 #[cfg_attr(debug_assertions, track_caller)]
202 pub fn slice_range(&self, range @ Range { start, end }: Range<usize>) -> &[u8] {
203 match self.context_memory().get(range) {
204 Some(slice) => slice,
205 None => debug_unreachable!("slice OOB: {start}..{end}; len: {}", self.len()),
206 }
207 }
208
209 #[inline]
215 #[cfg_attr(debug_assertions, track_caller)]
216 pub fn slice_mut(&mut self, offset: usize, size: usize) -> &mut [u8] {
217 let end = offset + size;
218 match self.context_memory_mut().get_mut(offset..end) {
219 Some(slice) => slice,
220 None => debug_unreachable!("slice OOB: {offset}..{end}"),
221 }
222 }
223
224 #[inline]
230 pub fn get_byte(&self, offset: usize) -> u8 {
231 self.slice_len(offset, 1)[0]
232 }
233
234 #[inline]
240 pub fn get_word(&self, offset: usize) -> B256 {
241 self.slice_len(offset, 32).try_into().unwrap()
242 }
243
244 #[inline]
250 pub fn get_u256(&self, offset: usize) -> U256 {
251 self.get_word(offset).into()
252 }
253
254 #[inline]
260 #[cfg_attr(debug_assertions, track_caller)]
261 pub fn set_byte(&mut self, offset: usize, byte: u8) {
262 self.set(offset, &[byte]);
263 }
264
265 #[inline]
271 #[cfg_attr(debug_assertions, track_caller)]
272 pub fn set_word(&mut self, offset: usize, value: &B256) {
273 self.set(offset, &value[..]);
274 }
275
276 #[inline]
282 #[cfg_attr(debug_assertions, track_caller)]
283 pub fn set_u256(&mut self, offset: usize, value: U256) {
284 self.set(offset, &value.to_be_bytes::<32>());
285 }
286
287 #[inline]
293 #[cfg_attr(debug_assertions, track_caller)]
294 pub fn set(&mut self, offset: usize, value: &[u8]) {
295 if !value.is_empty() {
296 self.slice_mut(offset, value.len()).copy_from_slice(value);
297 }
298 }
299
300 #[inline]
307 #[cfg_attr(debug_assertions, track_caller)]
308 pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
309 if data_offset >= data.len() {
310 self.slice_mut(memory_offset, len).fill(0);
312 return;
313 }
314 let data_end = min(data_offset + len, data.len());
315 let data_len = data_end - data_offset;
316 debug_assert!(data_offset < data.len() && data_end <= data.len());
317 let data = unsafe { data.get_unchecked(data_offset..data_end) };
318 self.slice_mut(memory_offset, data_len)
319 .copy_from_slice(data);
320
321 self.slice_mut(memory_offset + data_len, len - data_len)
324 .fill(0);
325 }
326
327 #[inline]
333 #[cfg_attr(debug_assertions, track_caller)]
334 pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
335 self.context_memory_mut().copy_within(src..src + len, dst);
336 }
337
338 #[inline]
340 pub fn context_memory(&self) -> &[u8] {
341 unsafe {
343 self.buffer
344 .get_unchecked(self.last_checkpoint..self.buffer.len())
345 }
346 }
347
348 #[inline]
350 pub fn context_memory_mut(&mut self) -> &mut [u8] {
351 let buf_len = self.buffer.len();
352 unsafe { self.buffer.get_unchecked_mut(self.last_checkpoint..buf_len) }
354 }
355}
356
357#[inline]
360pub const fn num_words(len: usize) -> usize {
361 len.saturating_add(31) / 32
362}
363
364#[cfg(test)]
365mod tests {
366 use super::*;
367
368 #[test]
369 fn test_num_words() {
370 assert_eq!(num_words(0), 0);
371 assert_eq!(num_words(1), 1);
372 assert_eq!(num_words(31), 1);
373 assert_eq!(num_words(32), 1);
374 assert_eq!(num_words(33), 2);
375 assert_eq!(num_words(63), 2);
376 assert_eq!(num_words(64), 2);
377 assert_eq!(num_words(65), 3);
378 assert_eq!(num_words(usize::MAX), usize::MAX / 32);
379 }
380
381 #[test]
382 fn new_free_context() {
383 let mut shared_memory = SharedMemory::new();
384 shared_memory.new_context();
385
386 assert_eq!(shared_memory.buffer.len(), 0);
387 assert_eq!(shared_memory.checkpoints.len(), 1);
388 assert_eq!(shared_memory.last_checkpoint, 0);
389
390 unsafe { shared_memory.buffer.set_len(32) };
391 assert_eq!(shared_memory.len(), 32);
392 shared_memory.new_context();
393
394 assert_eq!(shared_memory.buffer.len(), 32);
395 assert_eq!(shared_memory.checkpoints.len(), 2);
396 assert_eq!(shared_memory.last_checkpoint, 32);
397 assert_eq!(shared_memory.len(), 0);
398
399 unsafe { shared_memory.buffer.set_len(96) };
400 assert_eq!(shared_memory.len(), 64);
401 shared_memory.new_context();
402
403 assert_eq!(shared_memory.buffer.len(), 96);
404 assert_eq!(shared_memory.checkpoints.len(), 3);
405 assert_eq!(shared_memory.last_checkpoint, 96);
406 assert_eq!(shared_memory.len(), 0);
407
408 shared_memory.free_context();
410 assert_eq!(shared_memory.buffer.len(), 96);
411 assert_eq!(shared_memory.checkpoints.len(), 2);
412 assert_eq!(shared_memory.last_checkpoint, 32);
413 assert_eq!(shared_memory.len(), 64);
414
415 shared_memory.free_context();
416 assert_eq!(shared_memory.buffer.len(), 32);
417 assert_eq!(shared_memory.checkpoints.len(), 1);
418 assert_eq!(shared_memory.last_checkpoint, 0);
419 assert_eq!(shared_memory.len(), 32);
420
421 shared_memory.free_context();
422 assert_eq!(shared_memory.buffer.len(), 0);
423 assert_eq!(shared_memory.checkpoints.len(), 0);
424 assert_eq!(shared_memory.last_checkpoint, 0);
425 assert_eq!(shared_memory.len(), 0);
426 }
427
428 #[test]
429 fn resize() {
430 let mut shared_memory = SharedMemory::new();
431 shared_memory.new_context();
432
433 shared_memory.resize(32);
434 assert_eq!(shared_memory.buffer.len(), 32);
435 assert_eq!(shared_memory.len(), 32);
436 assert_eq!(shared_memory.buffer.get(0..32), Some(&[0_u8; 32] as &[u8]));
437
438 shared_memory.new_context();
439 shared_memory.resize(96);
440 assert_eq!(shared_memory.buffer.len(), 128);
441 assert_eq!(shared_memory.len(), 96);
442 assert_eq!(
443 shared_memory.buffer.get(32..128),
444 Some(&[0_u8; 96] as &[u8])
445 );
446
447 shared_memory.free_context();
448 shared_memory.resize(64);
449 assert_eq!(shared_memory.buffer.len(), 64);
450 assert_eq!(shared_memory.len(), 64);
451 assert_eq!(shared_memory.buffer.get(0..64), Some(&[0_u8; 64] as &[u8]));
452 }
453}