1use super::MemoryTr;
2use core::{
3 cell::{Ref, RefCell, RefMut},
4 cmp::min,
5 fmt,
6 ops::Range,
7};
8use primitives::{hex, B256, U256};
9use std::{rc::Rc, vec::Vec};
10
11trait RefcellExt<T> {
12 fn dbg_borrow(&self) -> Ref<'_, T>;
13 fn dbg_borrow_mut(&self) -> RefMut<'_, T>;
14}
15
16impl<T> RefcellExt<T> for RefCell<T> {
17 #[inline]
18 fn dbg_borrow(&self) -> Ref<'_, T> {
19 match self.try_borrow() {
20 Ok(b) => b,
21 Err(e) => debug_unreachable!("{e}"),
22 }
23 }
24
25 #[inline]
26 fn dbg_borrow_mut(&self) -> RefMut<'_, T> {
27 match self.try_borrow_mut() {
28 Ok(b) => b,
29 Err(e) => debug_unreachable!("{e}"),
30 }
31 }
32}
33
34#[derive(Clone, PartialEq, Eq)]
39#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
40pub struct SharedMemory {
41 buffer: Option<Rc<RefCell<Vec<u8>>>>,
43 my_checkpoint: usize,
46 child_checkpoint: Option<usize>,
48 #[cfg(feature = "memory_limit")]
50 memory_limit: u64,
51}
52
53impl fmt::Debug for SharedMemory {
54 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
55 f.debug_struct("SharedMemory")
56 .field("current_len", &self.len())
57 .field("context_memory", &hex::encode(&*self.context_memory()))
58 .finish_non_exhaustive()
59 }
60}
61
62impl Default for SharedMemory {
63 #[inline]
64 fn default() -> Self {
65 Self::new()
66 }
67}
68
69impl MemoryTr for SharedMemory {
70 fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
71 self.set_data(memory_offset, data_offset, len, data);
72 }
73
74 fn set(&mut self, memory_offset: usize, data: &[u8]) {
75 self.set(memory_offset, data);
76 }
77
78 fn size(&self) -> usize {
79 self.len()
80 }
81
82 fn copy(&mut self, destination: usize, source: usize, len: usize) {
83 self.copy(destination, source, len);
84 }
85
86 fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
87 self.slice_range(range)
88 }
89
90 fn local_memory_offset(&self) -> usize {
91 self.my_checkpoint
92 }
93
94 fn set_data_from_global(
95 &mut self,
96 memory_offset: usize,
97 data_offset: usize,
98 len: usize,
99 data_range: Range<usize>,
100 ) {
101 self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
102 }
103
104 #[inline]
115 #[cfg_attr(debug_assertions, track_caller)]
116 fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
117 let buffer = self.buffer_ref();
118 Ref::map(buffer, |b| match b.get(range) {
119 Some(slice) => slice,
120 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
121 })
122 }
123
124 fn resize(&mut self, new_size: usize) -> bool {
125 self.resize(new_size);
126 true
127 }
128}
129
130impl SharedMemory {
131 #[inline]
135 pub fn new() -> Self {
136 Self::with_capacity(4 * 1024) }
138
139 #[inline]
141 pub fn invalid() -> Self {
142 Self {
143 buffer: None,
144 my_checkpoint: 0,
145 child_checkpoint: None,
146 #[cfg(feature = "memory_limit")]
147 memory_limit: 0,
148 }
149 }
150
151 pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
153 Self {
154 buffer: Some(buffer),
155 my_checkpoint: 0,
156 child_checkpoint: None,
157 #[cfg(feature = "memory_limit")]
158 memory_limit: u64::MAX,
159 }
160 }
161
162 #[inline]
164 pub fn with_capacity(capacity: usize) -> Self {
165 Self {
166 buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
167 my_checkpoint: 0,
168 child_checkpoint: None,
169 #[cfg(feature = "memory_limit")]
170 memory_limit: u64::MAX,
171 }
172 }
173
174 #[cfg(feature = "memory_limit")]
179 #[inline]
180 pub fn new_with_memory_limit(memory_limit: u64) -> Self {
181 Self {
182 memory_limit,
183 ..Self::new()
184 }
185 }
186
187 #[inline]
188 fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
189 debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
190 unsafe { self.buffer.as_ref().unwrap_unchecked() }
191 }
192
193 #[inline]
194 fn buffer_ref(&self) -> Ref<'_, Vec<u8>> {
195 self.buffer().dbg_borrow()
196 }
197
198 #[inline]
199 fn buffer_ref_mut(&self) -> RefMut<'_, Vec<u8>> {
200 self.buffer().dbg_borrow_mut()
201 }
202
203 #[cfg(feature = "memory_limit")]
206 #[inline]
207 pub fn limit_reached(&self, new_size: usize) -> bool {
208 self.my_checkpoint.saturating_add(new_size) as u64 > self.memory_limit
209 }
210
211 #[inline]
217 pub fn new_child_context(&mut self) -> SharedMemory {
218 if self.child_checkpoint.is_some() {
219 panic!("new_child_context was already called without freeing child context");
220 }
221 let new_checkpoint = self.full_len();
222 self.child_checkpoint = Some(new_checkpoint);
223 SharedMemory {
224 buffer: Some(self.buffer().clone()),
225 my_checkpoint: new_checkpoint,
226 child_checkpoint: None,
228 #[cfg(feature = "memory_limit")]
229 memory_limit: self.memory_limit,
230 }
231 }
232
233 #[inline]
235 pub fn free_child_context(&mut self) {
236 let Some(child_checkpoint) = self.child_checkpoint.take() else {
237 return;
238 };
239 unsafe {
240 self.buffer_ref_mut().set_len(child_checkpoint);
241 }
242 }
243
244 #[inline]
246 pub fn len(&self) -> usize {
247 self.full_len() - self.my_checkpoint
248 }
249
250 fn full_len(&self) -> usize {
251 self.buffer_ref().len()
252 }
253
254 #[inline]
256 pub fn is_empty(&self) -> bool {
257 self.len() == 0
258 }
259
260 #[inline]
262 pub fn resize(&mut self, new_size: usize) {
263 self.buffer()
264 .dbg_borrow_mut()
265 .resize(self.my_checkpoint + new_size, 0);
266 }
267
268 #[inline]
274 #[cfg_attr(debug_assertions, track_caller)]
275 pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
276 self.slice_range(offset..offset + size)
277 }
278
279 #[inline]
291 #[cfg_attr(debug_assertions, track_caller)]
292 pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
293 let buffer = self.buffer_ref();
294 Ref::map(buffer, |b| {
295 match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
296 Some(slice) => slice,
297 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
298 }
299 })
300 }
301
302 #[inline]
313 #[cfg_attr(debug_assertions, track_caller)]
314 pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
315 let buffer = self.buffer_ref();
316 Ref::map(buffer, |b| match b.get(range) {
317 Some(slice) => slice,
318 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
319 })
320 }
321
322 #[inline]
334 #[cfg_attr(debug_assertions, track_caller)]
335 pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
336 let buffer = self.buffer_ref_mut();
337 RefMut::map(buffer, |b| {
338 match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
339 Some(slice) => slice,
340 None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
341 }
342 })
343 }
344
345 #[inline]
351 pub fn get_byte(&self, offset: usize) -> u8 {
352 self.slice_len(offset, 1)[0]
353 }
354
355 #[inline]
361 pub fn get_word(&self, offset: usize) -> B256 {
362 (*self.slice_len(offset, 32)).try_into().unwrap()
363 }
364
365 #[inline]
371 pub fn get_u256(&self, offset: usize) -> U256 {
372 self.get_word(offset).into()
373 }
374
375 #[inline]
381 #[cfg_attr(debug_assertions, track_caller)]
382 pub fn set_byte(&mut self, offset: usize, byte: u8) {
383 self.set(offset, &[byte]);
384 }
385
386 #[inline]
392 #[cfg_attr(debug_assertions, track_caller)]
393 pub fn set_word(&mut self, offset: usize, value: &B256) {
394 self.set(offset, &value[..]);
395 }
396
397 #[inline]
403 #[cfg_attr(debug_assertions, track_caller)]
404 pub fn set_u256(&mut self, offset: usize, value: U256) {
405 self.set(offset, &value.to_be_bytes::<32>());
406 }
407
408 #[inline]
414 #[cfg_attr(debug_assertions, track_caller)]
415 pub fn set(&mut self, offset: usize, value: &[u8]) {
416 if !value.is_empty() {
417 self.slice_mut(offset, value.len()).copy_from_slice(value);
418 }
419 }
420
421 #[inline]
428 #[cfg_attr(debug_assertions, track_caller)]
429 pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
430 let mut dst = self.context_memory_mut();
431 unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
432 }
433
434 #[inline]
436 #[cfg_attr(debug_assertions, track_caller)]
437 pub fn global_to_local_set_data(
438 &mut self,
439 memory_offset: usize,
440 data_offset: usize,
441 len: usize,
442 data_range: Range<usize>,
443 ) {
444 let mut buffer = self.buffer_ref_mut();
445 let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
446 let src = if data_range.is_empty() {
447 &mut []
448 } else {
449 src.get_mut(data_range).unwrap()
450 };
451 unsafe { set_data(dst, src, memory_offset, data_offset, len) };
452 }
453
454 #[inline]
460 #[cfg_attr(debug_assertions, track_caller)]
461 pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
462 self.context_memory_mut().copy_within(src..src + len, dst);
463 }
464
465 #[inline]
476 pub fn context_memory(&self) -> Ref<'_, [u8]> {
477 let buffer = self.buffer_ref();
478 Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
479 Some(slice) => slice,
480 None => debug_unreachable!("Context memory should be always valid"),
481 })
482 }
483
484 #[inline]
495 pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
496 let buffer = self.buffer_ref_mut();
497 RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
498 Some(slice) => slice,
499 None => debug_unreachable!("Context memory should be always valid"),
500 })
501 }
502}
503
504unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
516 if len == 0 {
517 return;
518 }
519 if src_offset >= src.len() {
520 dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
522 return;
523 }
524 let src_end = min(src_offset + len, src.len());
525 let src_len = src_end - src_offset;
526 debug_assert!(src_offset < src.len() && src_end <= src.len());
527 let data = unsafe { src.get_unchecked(src_offset..src_end) };
528 unsafe {
529 dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
530 .copy_from_slice(data)
531 };
532
533 unsafe {
536 dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
537 .fill(0)
538 };
539}
540
541#[inline]
544pub const fn num_words(len: usize) -> usize {
545 len.saturating_add(31) / 32
546}
547
548#[inline]
550#[must_use]
551pub fn resize_memory<Memory: MemoryTr>(
552 gas: &mut crate::Gas,
553 memory: &mut Memory,
554 offset: usize,
555 len: usize,
556) -> bool {
557 let new_num_words = num_words(offset.saturating_add(len));
558 if new_num_words > gas.memory().words_num {
559 resize_memory_cold(gas, memory, new_num_words)
560 } else {
561 true
562 }
563}
564
565#[cold]
566#[inline(never)]
567fn resize_memory_cold<Memory: MemoryTr>(
568 gas: &mut crate::Gas,
569 memory: &mut Memory,
570 new_num_words: usize,
571) -> bool {
572 let cost = unsafe {
573 gas.memory_mut()
574 .record_new_len(new_num_words)
575 .unwrap_unchecked()
576 };
577 if !gas.record_cost(cost) {
578 return false;
579 }
580 memory.resize(new_num_words * 32);
581 true
582}
583
584#[cfg(test)]
585mod tests {
586 use super::*;
587
588 #[test]
589 fn test_num_words() {
590 assert_eq!(num_words(0), 0);
591 assert_eq!(num_words(1), 1);
592 assert_eq!(num_words(31), 1);
593 assert_eq!(num_words(32), 1);
594 assert_eq!(num_words(33), 2);
595 assert_eq!(num_words(63), 2);
596 assert_eq!(num_words(64), 2);
597 assert_eq!(num_words(65), 3);
598 assert_eq!(num_words(usize::MAX), usize::MAX / 32);
599 }
600
601 #[test]
602 fn new_free_child_context() {
603 let mut sm1 = SharedMemory::new();
604
605 assert_eq!(sm1.buffer_ref().len(), 0);
606 assert_eq!(sm1.my_checkpoint, 0);
607
608 unsafe { sm1.buffer_ref_mut().set_len(32) };
609 assert_eq!(sm1.len(), 32);
610 let mut sm2 = sm1.new_child_context();
611
612 assert_eq!(sm2.buffer_ref().len(), 32);
613 assert_eq!(sm2.my_checkpoint, 32);
614 assert_eq!(sm2.len(), 0);
615
616 unsafe { sm2.buffer_ref_mut().set_len(96) };
617 assert_eq!(sm2.len(), 64);
618 let mut sm3 = sm2.new_child_context();
619
620 assert_eq!(sm3.buffer_ref().len(), 96);
621 assert_eq!(sm3.my_checkpoint, 96);
622 assert_eq!(sm3.len(), 0);
623
624 unsafe { sm3.buffer_ref_mut().set_len(128) };
625 let sm4 = sm3.new_child_context();
626 assert_eq!(sm4.buffer_ref().len(), 128);
627 assert_eq!(sm4.my_checkpoint, 128);
628 assert_eq!(sm4.len(), 0);
629
630 drop(sm4);
632 sm3.free_child_context();
633 assert_eq!(sm3.buffer_ref().len(), 128);
634 assert_eq!(sm3.my_checkpoint, 96);
635 assert_eq!(sm3.len(), 32);
636
637 sm2.free_child_context();
638 assert_eq!(sm2.buffer_ref().len(), 96);
639 assert_eq!(sm2.my_checkpoint, 32);
640 assert_eq!(sm2.len(), 64);
641
642 sm1.free_child_context();
643 assert_eq!(sm1.buffer_ref().len(), 32);
644 assert_eq!(sm1.my_checkpoint, 0);
645 assert_eq!(sm1.len(), 32);
646 }
647
648 #[test]
649 fn resize() {
650 let mut sm1 = SharedMemory::new();
651 sm1.resize(32);
652 assert_eq!(sm1.buffer_ref().len(), 32);
653 assert_eq!(sm1.len(), 32);
654 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
655
656 let mut sm2 = sm1.new_child_context();
657 sm2.resize(96);
658 assert_eq!(sm2.buffer_ref().len(), 128);
659 assert_eq!(sm2.len(), 96);
660 assert_eq!(sm2.buffer_ref().get(32..128), Some(&[0_u8; 96] as &[u8]));
661
662 sm1.free_child_context();
663 assert_eq!(sm1.buffer_ref().len(), 32);
664 assert_eq!(sm1.len(), 32);
665 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
666 }
667}