1use super::MemoryTr;
2use core::{
3 cell::{Ref, RefCell, RefMut},
4 cmp::min,
5 fmt,
6 ops::Range,
7};
8use primitives::{hex, B256, U256};
9use std::{rc::Rc, vec::Vec};
10
11#[derive(Clone, PartialEq, Eq)]
16#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
17pub struct SharedMemory {
18 buffer: Rc<RefCell<Vec<u8>>>,
20 my_checkpoint: usize,
23 child_checkpoint: Option<usize>,
25 #[cfg(feature = "memory_limit")]
27 memory_limit: u64,
28}
29
30impl fmt::Debug for SharedMemory {
31 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32 f.debug_struct("SharedMemory")
33 .field("current_len", &self.len())
34 .field("context_memory", &hex::encode(&*self.context_memory()))
35 .finish_non_exhaustive()
36 }
37}
38
39impl Default for SharedMemory {
40 #[inline]
41 fn default() -> Self {
42 Self::new()
43 }
44}
45
46impl MemoryTr for SharedMemory {
47 fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
48 self.set_data(memory_offset, data_offset, len, data);
49 }
50
51 fn set(&mut self, memory_offset: usize, data: &[u8]) {
52 self.set(memory_offset, data);
53 }
54
55 fn size(&self) -> usize {
56 self.len()
57 }
58
59 fn copy(&mut self, destination: usize, source: usize, len: usize) {
60 self.copy(destination, source, len);
61 }
62
63 fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
64 self.slice_range(range)
65 }
66
67 fn local_memory_offset(&self) -> usize {
68 self.my_checkpoint
69 }
70
71 fn set_data_from_global(
72 &mut self,
73 memory_offset: usize,
74 data_offset: usize,
75 len: usize,
76 data_range: Range<usize>,
77 ) {
78 self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
79 }
80
81 #[inline]
87 #[cfg_attr(debug_assertions, track_caller)]
88 fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
89 let buffer = self.buffer.borrow(); Ref::map(buffer, |b| match b.get(range) {
91 Some(slice) => slice,
92 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
93 })
94 }
95
96 fn resize(&mut self, new_size: usize) -> bool {
97 self.resize(new_size);
98 true
99 }
100}
101
102impl SharedMemory {
103 #[inline]
107 pub fn new() -> Self {
108 Self::with_capacity(4 * 1024) }
110
111 pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
113 Self {
114 buffer,
115 my_checkpoint: 0,
116 child_checkpoint: None,
117 #[cfg(feature = "memory_limit")]
118 memory_limit: u64::MAX,
119 }
120 }
121
122 #[inline]
124 pub fn with_capacity(capacity: usize) -> Self {
125 Self {
126 buffer: Rc::new(RefCell::new(Vec::with_capacity(capacity))),
127 my_checkpoint: 0,
128 child_checkpoint: None,
129 #[cfg(feature = "memory_limit")]
130 memory_limit: u64::MAX,
131 }
132 }
133
134 #[cfg(feature = "memory_limit")]
139 #[inline]
140 pub fn new_with_memory_limit(memory_limit: u64) -> Self {
141 Self {
142 memory_limit,
143 ..Self::new()
144 }
145 }
146
147 #[cfg(feature = "memory_limit")]
150 #[inline]
151 pub fn limit_reached(&self, new_size: usize) -> bool {
152 self.my_checkpoint.saturating_add(new_size) as u64 > self.memory_limit
153 }
154
155 #[inline]
161 pub fn new_child_context(&mut self) -> SharedMemory {
162 if self.child_checkpoint.is_some() {
163 panic!("new_child_context was already called without freeing child context");
164 }
165 let new_checkpoint = self.buffer.borrow().len();
166 self.child_checkpoint = Some(new_checkpoint);
167 SharedMemory {
168 buffer: self.buffer.clone(),
169 my_checkpoint: new_checkpoint,
170 child_checkpoint: None,
172 #[cfg(feature = "memory_limit")]
173 memory_limit: self.memory_limit,
174 }
175 }
176
177 #[inline]
179 pub fn free_child_context(&mut self) {
180 let Some(child_checkpoint) = self.child_checkpoint.take() else {
181 return;
182 };
183 unsafe {
184 self.buffer.borrow_mut().set_len(child_checkpoint);
185 }
186 }
187
188 #[inline]
190 pub fn len(&self) -> usize {
191 self.buffer.borrow().len() - self.my_checkpoint
192 }
193
194 #[inline]
196 pub fn is_empty(&self) -> bool {
197 self.len() == 0
198 }
199
200 #[inline]
202 pub fn resize(&mut self, new_size: usize) {
203 self.buffer
204 .borrow_mut()
205 .resize(self.my_checkpoint + new_size, 0);
206 }
207
208 #[inline]
214 #[cfg_attr(debug_assertions, track_caller)]
215 pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
216 self.slice_range(offset..offset + size)
217 }
218
219 #[inline]
225 #[cfg_attr(debug_assertions, track_caller)]
226 pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
227 let buffer = self.buffer.borrow(); Ref::map(buffer, |b| {
229 match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
230 Some(slice) => slice,
231 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
232 }
233 })
234 }
235
236 #[inline]
242 #[cfg_attr(debug_assertions, track_caller)]
243 pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
244 let buffer = self.buffer.borrow(); Ref::map(buffer, |b| match b.get(range) {
246 Some(slice) => slice,
247 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
248 })
249 }
250
251 #[inline]
257 #[cfg_attr(debug_assertions, track_caller)]
258 pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
259 let buffer = self.buffer.borrow_mut(); RefMut::map(buffer, |b| {
261 match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
262 Some(slice) => slice,
263 None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
264 }
265 })
266 }
267
268 #[inline]
274 pub fn get_byte(&self, offset: usize) -> u8 {
275 self.slice_len(offset, 1)[0]
276 }
277
278 #[inline]
284 pub fn get_word(&self, offset: usize) -> B256 {
285 (*self.slice_len(offset, 32)).try_into().unwrap()
286 }
287
288 #[inline]
294 pub fn get_u256(&self, offset: usize) -> U256 {
295 self.get_word(offset).into()
296 }
297
298 #[inline]
304 #[cfg_attr(debug_assertions, track_caller)]
305 pub fn set_byte(&mut self, offset: usize, byte: u8) {
306 self.set(offset, &[byte]);
307 }
308
309 #[inline]
315 #[cfg_attr(debug_assertions, track_caller)]
316 pub fn set_word(&mut self, offset: usize, value: &B256) {
317 self.set(offset, &value[..]);
318 }
319
320 #[inline]
326 #[cfg_attr(debug_assertions, track_caller)]
327 pub fn set_u256(&mut self, offset: usize, value: U256) {
328 self.set(offset, &value.to_be_bytes::<32>());
329 }
330
331 #[inline]
337 #[cfg_attr(debug_assertions, track_caller)]
338 pub fn set(&mut self, offset: usize, value: &[u8]) {
339 if !value.is_empty() {
340 self.slice_mut(offset, value.len()).copy_from_slice(value);
341 }
342 }
343
344 #[inline]
351 #[cfg_attr(debug_assertions, track_caller)]
352 pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
353 let mut dst = self.context_memory_mut();
354 unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
355 }
356
357 #[inline]
359 #[cfg_attr(debug_assertions, track_caller)]
360 pub fn global_to_local_set_data(
361 &mut self,
362 memory_offset: usize,
363 data_offset: usize,
364 len: usize,
365 data_range: Range<usize>,
366 ) {
367 let mut buffer = self.buffer.borrow_mut(); let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
369 let src = if data_range.is_empty() {
370 &mut []
371 } else {
372 src.get_mut(data_range).unwrap()
373 };
374 unsafe { set_data(dst, src, memory_offset, data_offset, len) };
375 }
376
377 #[inline]
383 #[cfg_attr(debug_assertions, track_caller)]
384 pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
385 self.context_memory_mut().copy_within(src..src + len, dst);
386 }
387
388 #[inline]
390 pub fn context_memory(&self) -> Ref<'_, [u8]> {
391 let buffer = self.buffer.borrow();
392 Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
393 Some(slice) => slice,
394 None => debug_unreachable!("Context memory should be always valid"),
395 })
396 }
397
398 #[inline]
400 pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
401 let buffer = self.buffer.borrow_mut(); RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
403 Some(slice) => slice,
404 None => debug_unreachable!("Context memory should be always valid"),
405 })
406 }
407}
408
409unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
421 if src_offset >= src.len() {
422 dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
424 return;
425 }
426 let src_end = min(src_offset + len, src.len());
427 let src_len = src_end - src_offset;
428 debug_assert!(src_offset < src.len() && src_end <= src.len());
429 let data = unsafe { src.get_unchecked(src_offset..src_end) };
430 unsafe {
431 dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
432 .copy_from_slice(data)
433 };
434
435 unsafe {
438 dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
439 .fill(0)
440 };
441}
442
443#[inline]
446pub const fn num_words(len: usize) -> usize {
447 len.saturating_add(31) / 32
448}
449
450#[cfg(test)]
451mod tests {
452 use super::*;
453
454 #[test]
455 fn test_num_words() {
456 assert_eq!(num_words(0), 0);
457 assert_eq!(num_words(1), 1);
458 assert_eq!(num_words(31), 1);
459 assert_eq!(num_words(32), 1);
460 assert_eq!(num_words(33), 2);
461 assert_eq!(num_words(63), 2);
462 assert_eq!(num_words(64), 2);
463 assert_eq!(num_words(65), 3);
464 assert_eq!(num_words(usize::MAX), usize::MAX / 32);
465 }
466
467 #[test]
468 fn new_free_child_context() {
469 let mut sm1 = SharedMemory::new();
470
471 assert_eq!(sm1.buffer.borrow().len(), 0);
472 assert_eq!(sm1.my_checkpoint, 0);
473
474 unsafe { sm1.buffer.borrow_mut().set_len(32) };
475 assert_eq!(sm1.len(), 32);
476 let mut sm2 = sm1.new_child_context();
477
478 assert_eq!(sm2.buffer.borrow().len(), 32);
479 assert_eq!(sm2.my_checkpoint, 32);
480 assert_eq!(sm2.len(), 0);
481
482 unsafe { sm2.buffer.borrow_mut().set_len(96) };
483 assert_eq!(sm2.len(), 64);
484 let mut sm3 = sm2.new_child_context();
485
486 assert_eq!(sm3.buffer.borrow().len(), 96);
487 assert_eq!(sm3.my_checkpoint, 96);
488 assert_eq!(sm3.len(), 0);
489
490 unsafe { sm3.buffer.borrow_mut().set_len(128) };
491 let sm4 = sm3.new_child_context();
492 assert_eq!(sm4.buffer.borrow().len(), 128);
493 assert_eq!(sm4.my_checkpoint, 128);
494 assert_eq!(sm4.len(), 0);
495
496 drop(sm4);
498 sm3.free_child_context();
499 assert_eq!(sm3.buffer.borrow().len(), 128);
500 assert_eq!(sm3.my_checkpoint, 96);
501 assert_eq!(sm3.len(), 32);
502
503 sm2.free_child_context();
504 assert_eq!(sm2.buffer.borrow().len(), 96);
505 assert_eq!(sm2.my_checkpoint, 32);
506 assert_eq!(sm2.len(), 64);
507
508 sm1.free_child_context();
509 assert_eq!(sm1.buffer.borrow().len(), 32);
510 assert_eq!(sm1.my_checkpoint, 0);
511 assert_eq!(sm1.len(), 32);
512 }
513
514 #[test]
515 fn resize() {
516 let mut sm1 = SharedMemory::new();
517 sm1.resize(32);
518 assert_eq!(sm1.buffer.borrow().len(), 32);
519 assert_eq!(sm1.len(), 32);
520 assert_eq!(sm1.buffer.borrow().get(0..32), Some(&[0_u8; 32] as &[u8]));
521
522 let mut sm2 = sm1.new_child_context();
523 sm2.resize(96);
524 assert_eq!(sm2.buffer.borrow().len(), 128);
525 assert_eq!(sm2.len(), 96);
526 assert_eq!(sm2.buffer.borrow().get(32..128), Some(&[0_u8; 96] as &[u8]));
527
528 sm1.free_child_context();
529 assert_eq!(sm1.buffer.borrow().len(), 32);
530 assert_eq!(sm1.len(), 32);
531 assert_eq!(sm1.buffer.borrow().get(0..32), Some(&[0_u8; 32] as &[u8]));
532 }
533}