1use super::MemoryTr;
2use core::{
3 cell::{Ref, RefCell, RefMut},
4 cmp::min,
5 fmt,
6 ops::Range,
7};
8use primitives::{hex, B256, U256};
9use std::{rc::Rc, vec::Vec};
10
11trait RefcellExt<T> {
12 fn dbg_borrow(&self) -> Ref<'_, T>;
13 fn dbg_borrow_mut(&self) -> RefMut<'_, T>;
14}
15
16impl<T> RefcellExt<T> for RefCell<T> {
17 #[inline]
18 fn dbg_borrow(&self) -> Ref<'_, T> {
19 match self.try_borrow() {
20 Ok(b) => b,
21 Err(e) => debug_unreachable!("{e}"),
22 }
23 }
24
25 #[inline]
26 fn dbg_borrow_mut(&self) -> RefMut<'_, T> {
27 match self.try_borrow_mut() {
28 Ok(b) => b,
29 Err(e) => debug_unreachable!("{e}"),
30 }
31 }
32}
33
34#[derive(Clone, PartialEq, Eq)]
39#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
40pub struct SharedMemory {
41 buffer: Option<Rc<RefCell<Vec<u8>>>>,
43 my_checkpoint: usize,
46 child_checkpoint: Option<usize>,
48 #[cfg(feature = "memory_limit")]
50 memory_limit: u64,
51}
52
53impl fmt::Debug for SharedMemory {
54 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
55 f.debug_struct("SharedMemory")
56 .field("current_len", &self.len())
57 .field("context_memory", &hex::encode(&*self.context_memory()))
58 .finish_non_exhaustive()
59 }
60}
61
62impl Default for SharedMemory {
63 #[inline]
64 fn default() -> Self {
65 Self::new()
66 }
67}
68
69impl MemoryTr for SharedMemory {
70 fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
71 self.set_data(memory_offset, data_offset, len, data);
72 }
73
74 fn set(&mut self, memory_offset: usize, data: &[u8]) {
75 self.set(memory_offset, data);
76 }
77
78 fn size(&self) -> usize {
79 self.len()
80 }
81
82 fn copy(&mut self, destination: usize, source: usize, len: usize) {
83 self.copy(destination, source, len);
84 }
85
86 fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
87 self.slice_range(range)
88 }
89
90 fn local_memory_offset(&self) -> usize {
91 self.my_checkpoint
92 }
93
94 fn set_data_from_global(
95 &mut self,
96 memory_offset: usize,
97 data_offset: usize,
98 len: usize,
99 data_range: Range<usize>,
100 ) {
101 self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
102 }
103
104 #[inline]
115 #[cfg_attr(debug_assertions, track_caller)]
116 fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
117 let buffer = self.buffer_ref();
118 Ref::map(buffer, |b| match b.get(range) {
119 Some(slice) => slice,
120 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
121 })
122 }
123
124 fn resize(&mut self, new_size: usize) -> bool {
125 self.resize(new_size);
126 true
127 }
128}
129
130impl SharedMemory {
131 #[inline]
135 pub fn new() -> Self {
136 Self::with_capacity(4 * 1024) }
138
139 #[inline]
141 pub fn invalid() -> Self {
142 Self {
143 buffer: None,
144 my_checkpoint: 0,
145 child_checkpoint: None,
146 #[cfg(feature = "memory_limit")]
147 memory_limit: 0,
148 }
149 }
150
151 pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
153 Self {
154 buffer: Some(buffer),
155 my_checkpoint: 0,
156 child_checkpoint: None,
157 #[cfg(feature = "memory_limit")]
158 memory_limit: u64::MAX,
159 }
160 }
161
162 #[inline]
164 pub fn with_capacity(capacity: usize) -> Self {
165 Self {
166 buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
167 my_checkpoint: 0,
168 child_checkpoint: None,
169 #[cfg(feature = "memory_limit")]
170 memory_limit: u64::MAX,
171 }
172 }
173
174 #[cfg(feature = "memory_limit")]
179 #[inline]
180 pub fn new_with_memory_limit(memory_limit: u64) -> Self {
181 Self {
182 memory_limit,
183 ..Self::new()
184 }
185 }
186
187 #[inline]
188 fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
189 debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
190 unsafe { self.buffer.as_ref().unwrap_unchecked() }
191 }
192
193 #[inline]
194 fn buffer_ref(&self) -> Ref<'_, Vec<u8>> {
195 self.buffer().dbg_borrow()
196 }
197
198 #[inline]
199 fn buffer_ref_mut(&self) -> RefMut<'_, Vec<u8>> {
200 self.buffer().dbg_borrow_mut()
201 }
202
203 #[cfg(feature = "memory_limit")]
206 #[inline]
207 pub fn limit_reached(&self, new_size: usize) -> bool {
208 self.my_checkpoint.saturating_add(new_size) as u64 > self.memory_limit
209 }
210
211 #[inline]
217 pub fn new_child_context(&mut self) -> SharedMemory {
218 if self.child_checkpoint.is_some() {
219 panic!("new_child_context was already called without freeing child context");
220 }
221 let new_checkpoint = self.full_len();
222 self.child_checkpoint = Some(new_checkpoint);
223 SharedMemory {
224 buffer: Some(self.buffer().clone()),
225 my_checkpoint: new_checkpoint,
226 child_checkpoint: None,
228 #[cfg(feature = "memory_limit")]
229 memory_limit: self.memory_limit,
230 }
231 }
232
233 #[inline]
235 pub fn free_child_context(&mut self) {
236 let Some(child_checkpoint) = self.child_checkpoint.take() else {
237 return;
238 };
239 unsafe {
240 self.buffer_ref_mut().set_len(child_checkpoint);
241 }
242 }
243
244 #[inline]
246 pub fn len(&self) -> usize {
247 self.full_len() - self.my_checkpoint
248 }
249
250 fn full_len(&self) -> usize {
251 self.buffer_ref().len()
252 }
253
254 #[inline]
256 pub fn is_empty(&self) -> bool {
257 self.len() == 0
258 }
259
260 #[inline]
262 pub fn resize(&mut self, new_size: usize) {
263 self.buffer()
264 .dbg_borrow_mut()
265 .resize(self.my_checkpoint + new_size, 0);
266 }
267
268 #[inline]
274 #[cfg_attr(debug_assertions, track_caller)]
275 pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
276 self.slice_range(offset..offset + size)
277 }
278
279 #[inline]
291 #[cfg_attr(debug_assertions, track_caller)]
292 pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
293 let buffer = self.buffer_ref();
294 Ref::map(buffer, |b| {
295 match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
296 Some(slice) => slice,
297 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
298 }
299 })
300 }
301
302 #[inline]
313 #[cfg_attr(debug_assertions, track_caller)]
314 pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
315 let buffer = self.buffer_ref();
316 Ref::map(buffer, |b| match b.get(range) {
317 Some(slice) => slice,
318 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
319 })
320 }
321
322 #[inline]
334 #[cfg_attr(debug_assertions, track_caller)]
335 pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
336 let buffer = self.buffer_ref_mut();
337 RefMut::map(buffer, |b| {
338 match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
339 Some(slice) => slice,
340 None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
341 }
342 })
343 }
344
345 #[inline]
351 pub fn get_byte(&self, offset: usize) -> u8 {
352 self.slice_len(offset, 1)[0]
353 }
354
355 #[inline]
361 pub fn get_word(&self, offset: usize) -> B256 {
362 (*self.slice_len(offset, 32)).try_into().unwrap()
363 }
364
365 #[inline]
371 pub fn get_u256(&self, offset: usize) -> U256 {
372 self.get_word(offset).into()
373 }
374
375 #[inline]
381 #[cfg_attr(debug_assertions, track_caller)]
382 pub fn set_byte(&mut self, offset: usize, byte: u8) {
383 self.set(offset, &[byte]);
384 }
385
386 #[inline]
392 #[cfg_attr(debug_assertions, track_caller)]
393 pub fn set_word(&mut self, offset: usize, value: &B256) {
394 self.set(offset, &value[..]);
395 }
396
397 #[inline]
403 #[cfg_attr(debug_assertions, track_caller)]
404 pub fn set_u256(&mut self, offset: usize, value: U256) {
405 self.set(offset, &value.to_be_bytes::<32>());
406 }
407
408 #[inline]
414 #[cfg_attr(debug_assertions, track_caller)]
415 pub fn set(&mut self, offset: usize, value: &[u8]) {
416 if !value.is_empty() {
417 self.slice_mut(offset, value.len()).copy_from_slice(value);
418 }
419 }
420
421 #[inline]
428 #[cfg_attr(debug_assertions, track_caller)]
429 pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
430 let mut dst = self.context_memory_mut();
431 unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
432 }
433
434 #[inline]
436 #[cfg_attr(debug_assertions, track_caller)]
437 pub fn global_to_local_set_data(
438 &mut self,
439 memory_offset: usize,
440 data_offset: usize,
441 len: usize,
442 data_range: Range<usize>,
443 ) {
444 let mut buffer = self.buffer_ref_mut();
445 let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
446 let src = if data_range.is_empty() {
447 &mut []
448 } else {
449 src.get_mut(data_range).unwrap()
450 };
451 unsafe { set_data(dst, src, memory_offset, data_offset, len) };
452 }
453
454 #[inline]
460 #[cfg_attr(debug_assertions, track_caller)]
461 pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
462 self.context_memory_mut().copy_within(src..src + len, dst);
463 }
464
465 #[inline]
476 pub fn context_memory(&self) -> Ref<'_, [u8]> {
477 let buffer = self.buffer_ref();
478 Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
479 Some(slice) => slice,
480 None => debug_unreachable!("Context memory should be always valid"),
481 })
482 }
483
484 #[inline]
495 pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
496 let buffer = self.buffer_ref_mut();
497 RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
498 Some(slice) => slice,
499 None => debug_unreachable!("Context memory should be always valid"),
500 })
501 }
502}
503
504unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
516 if src_offset >= src.len() {
517 dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
519 return;
520 }
521 let src_end = min(src_offset + len, src.len());
522 let src_len = src_end - src_offset;
523 debug_assert!(src_offset < src.len() && src_end <= src.len());
524 let data = unsafe { src.get_unchecked(src_offset..src_end) };
525 unsafe {
526 dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
527 .copy_from_slice(data)
528 };
529
530 unsafe {
533 dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
534 .fill(0)
535 };
536}
537
538#[inline]
541pub const fn num_words(len: usize) -> usize {
542 len.saturating_add(31) / 32
543}
544
545#[inline]
547#[must_use]
548pub fn resize_memory<Memory: MemoryTr>(
549 gas: &mut crate::Gas,
550 memory: &mut Memory,
551 offset: usize,
552 len: usize,
553) -> bool {
554 let new_num_words = num_words(offset.saturating_add(len));
555 if new_num_words > gas.memory().words_num {
556 resize_memory_cold(gas, memory, new_num_words)
557 } else {
558 true
559 }
560}
561
562#[cold]
563#[inline(never)]
564fn resize_memory_cold<Memory: MemoryTr>(
565 gas: &mut crate::Gas,
566 memory: &mut Memory,
567 new_num_words: usize,
568) -> bool {
569 let cost = unsafe {
570 gas.memory_mut()
571 .record_new_len(new_num_words)
572 .unwrap_unchecked()
573 };
574 if !gas.record_cost(cost) {
575 return false;
576 }
577 memory.resize(new_num_words * 32);
578 true
579}
580
581#[cfg(test)]
582mod tests {
583 use super::*;
584
585 #[test]
586 fn test_num_words() {
587 assert_eq!(num_words(0), 0);
588 assert_eq!(num_words(1), 1);
589 assert_eq!(num_words(31), 1);
590 assert_eq!(num_words(32), 1);
591 assert_eq!(num_words(33), 2);
592 assert_eq!(num_words(63), 2);
593 assert_eq!(num_words(64), 2);
594 assert_eq!(num_words(65), 3);
595 assert_eq!(num_words(usize::MAX), usize::MAX / 32);
596 }
597
598 #[test]
599 fn new_free_child_context() {
600 let mut sm1 = SharedMemory::new();
601
602 assert_eq!(sm1.buffer_ref().len(), 0);
603 assert_eq!(sm1.my_checkpoint, 0);
604
605 unsafe { sm1.buffer_ref_mut().set_len(32) };
606 assert_eq!(sm1.len(), 32);
607 let mut sm2 = sm1.new_child_context();
608
609 assert_eq!(sm2.buffer_ref().len(), 32);
610 assert_eq!(sm2.my_checkpoint, 32);
611 assert_eq!(sm2.len(), 0);
612
613 unsafe { sm2.buffer_ref_mut().set_len(96) };
614 assert_eq!(sm2.len(), 64);
615 let mut sm3 = sm2.new_child_context();
616
617 assert_eq!(sm3.buffer_ref().len(), 96);
618 assert_eq!(sm3.my_checkpoint, 96);
619 assert_eq!(sm3.len(), 0);
620
621 unsafe { sm3.buffer_ref_mut().set_len(128) };
622 let sm4 = sm3.new_child_context();
623 assert_eq!(sm4.buffer_ref().len(), 128);
624 assert_eq!(sm4.my_checkpoint, 128);
625 assert_eq!(sm4.len(), 0);
626
627 drop(sm4);
629 sm3.free_child_context();
630 assert_eq!(sm3.buffer_ref().len(), 128);
631 assert_eq!(sm3.my_checkpoint, 96);
632 assert_eq!(sm3.len(), 32);
633
634 sm2.free_child_context();
635 assert_eq!(sm2.buffer_ref().len(), 96);
636 assert_eq!(sm2.my_checkpoint, 32);
637 assert_eq!(sm2.len(), 64);
638
639 sm1.free_child_context();
640 assert_eq!(sm1.buffer_ref().len(), 32);
641 assert_eq!(sm1.my_checkpoint, 0);
642 assert_eq!(sm1.len(), 32);
643 }
644
645 #[test]
646 fn resize() {
647 let mut sm1 = SharedMemory::new();
648 sm1.resize(32);
649 assert_eq!(sm1.buffer_ref().len(), 32);
650 assert_eq!(sm1.len(), 32);
651 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
652
653 let mut sm2 = sm1.new_child_context();
654 sm2.resize(96);
655 assert_eq!(sm2.buffer_ref().len(), 128);
656 assert_eq!(sm2.len(), 96);
657 assert_eq!(sm2.buffer_ref().get(32..128), Some(&[0_u8; 96] as &[u8]));
658
659 sm1.free_child_context();
660 assert_eq!(sm1.buffer_ref().len(), 32);
661 assert_eq!(sm1.len(), 32);
662 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
663 }
664}