1use super::MemoryTr;
2use crate::InstructionResult;
3use context_interface::cfg::GasParams;
4use core::{
5 cell::{Ref, RefCell, RefMut},
6 cmp::min,
7 fmt,
8 ops::Range,
9};
10use primitives::{hex, B256, U256};
11use std::{rc::Rc, vec::Vec};
12
13trait RefcellExt<T> {
14 fn dbg_borrow(&self) -> Ref<'_, T>;
15 fn dbg_borrow_mut(&self) -> RefMut<'_, T>;
16}
17
18impl<T> RefcellExt<T> for RefCell<T> {
19 #[inline]
20 fn dbg_borrow(&self) -> Ref<'_, T> {
21 match self.try_borrow() {
22 Ok(b) => b,
23 Err(e) => debug_unreachable!("{e}"),
24 }
25 }
26
27 #[inline]
28 fn dbg_borrow_mut(&self) -> RefMut<'_, T> {
29 match self.try_borrow_mut() {
30 Ok(b) => b,
31 Err(e) => debug_unreachable!("{e}"),
32 }
33 }
34}
35
36#[derive(Clone, PartialEq, Eq)]
41#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
42pub struct SharedMemory {
43 buffer: Option<Rc<RefCell<Vec<u8>>>>,
45 my_checkpoint: usize,
48 child_checkpoint: Option<usize>,
50 #[cfg(feature = "memory_limit")]
52 memory_limit: u64,
53}
54
55impl fmt::Debug for SharedMemory {
56 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
57 f.debug_struct("SharedMemory")
58 .field("current_len", &self.len())
59 .field("context_memory", &hex::encode(&*self.context_memory()))
60 .finish_non_exhaustive()
61 }
62}
63
64impl Default for SharedMemory {
65 #[inline]
66 fn default() -> Self {
67 Self::new()
68 }
69}
70
71impl MemoryTr for SharedMemory {
72 fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
73 self.set_data(memory_offset, data_offset, len, data);
74 }
75
76 fn set(&mut self, memory_offset: usize, data: &[u8]) {
77 self.set(memory_offset, data);
78 }
79
80 fn size(&self) -> usize {
81 self.len()
82 }
83
84 fn copy(&mut self, destination: usize, source: usize, len: usize) {
85 self.copy(destination, source, len);
86 }
87
88 fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
89 self.slice_range(range)
90 }
91
92 fn local_memory_offset(&self) -> usize {
93 self.my_checkpoint
94 }
95
96 fn set_data_from_global(
97 &mut self,
98 memory_offset: usize,
99 data_offset: usize,
100 len: usize,
101 data_range: Range<usize>,
102 ) {
103 self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
104 }
105
106 #[inline]
117 #[cfg_attr(debug_assertions, track_caller)]
118 fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
119 self.global_slice_range(range)
120 }
121
122 fn resize(&mut self, new_size: usize) -> bool {
123 self.resize(new_size);
124 true
125 }
126
127 #[cfg(feature = "memory_limit")]
130 #[inline]
131 fn limit_reached(&self, offset: usize, len: usize) -> bool {
132 self.my_checkpoint
133 .saturating_add(offset)
134 .saturating_add(len) as u64
135 > self.memory_limit
136 }
137}
138
139impl SharedMemory {
140 #[inline]
144 pub fn new() -> Self {
145 Self::with_capacity(4 * 1024) }
147
148 #[inline]
150 pub fn invalid() -> Self {
151 Self {
152 buffer: None,
153 my_checkpoint: 0,
154 child_checkpoint: None,
155 #[cfg(feature = "memory_limit")]
156 memory_limit: 0,
157 }
158 }
159
160 pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
162 Self {
163 buffer: Some(buffer),
164 my_checkpoint: 0,
165 child_checkpoint: None,
166 #[cfg(feature = "memory_limit")]
167 memory_limit: u64::MAX,
168 }
169 }
170
171 #[inline]
173 pub fn with_capacity(capacity: usize) -> Self {
174 Self {
175 buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
176 my_checkpoint: 0,
177 child_checkpoint: None,
178 #[cfg(feature = "memory_limit")]
179 memory_limit: u64::MAX,
180 }
181 }
182
183 #[cfg(feature = "memory_limit")]
188 #[inline]
189 pub fn new_with_memory_limit(memory_limit: u64) -> Self {
190 Self {
191 memory_limit,
192 ..Self::new()
193 }
194 }
195
196 #[inline]
198 pub fn set_memory_limit(&mut self, limit: u64) {
199 #[cfg(feature = "memory_limit")]
200 {
201 self.memory_limit = limit;
202 }
203 let _ = limit;
205 }
206
207 #[inline]
208 fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
209 debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
210 unsafe { self.buffer.as_ref().unwrap_unchecked() }
211 }
212
213 #[inline]
214 fn buffer_ref(&self) -> Ref<'_, Vec<u8>> {
215 self.buffer().dbg_borrow()
216 }
217
218 #[inline]
219 fn buffer_ref_mut(&self) -> RefMut<'_, Vec<u8>> {
220 self.buffer().dbg_borrow_mut()
221 }
222
223 #[inline]
225 #[cfg_attr(debug_assertions, track_caller)]
226 fn slice_range_with_base(&self, range: Range<usize>, base: usize) -> Ref<'_, [u8]> {
227 let buffer = self.buffer_ref();
228 Ref::map(buffer, |b| {
229 let range = range.start + base..range.end + base;
230 match b.get(range.clone()) {
231 Some(slice) => slice,
232 None => debug_unreachable!("slice OOB: {range:?}; len: {}", self.len()),
233 }
234 })
235 }
236
237 #[inline]
243 pub fn new_child_context(&mut self) -> SharedMemory {
244 if self.child_checkpoint.is_some() {
245 panic!("new_child_context was already called without freeing child context");
246 }
247 let new_checkpoint = self.full_len();
248 self.child_checkpoint = Some(new_checkpoint);
249 SharedMemory {
250 buffer: Some(self.buffer().clone()),
251 my_checkpoint: new_checkpoint,
252 child_checkpoint: None,
254 #[cfg(feature = "memory_limit")]
255 memory_limit: self.memory_limit,
256 }
257 }
258
259 #[inline]
261 pub fn free_child_context(&mut self) {
262 let Some(child_checkpoint) = self.child_checkpoint.take() else {
263 return;
264 };
265 unsafe {
266 self.buffer_ref_mut().set_len(child_checkpoint);
267 }
268 }
269
270 #[inline]
272 pub fn len(&self) -> usize {
273 self.full_len() - self.my_checkpoint
274 }
275
276 fn full_len(&self) -> usize {
277 self.buffer_ref().len()
278 }
279
280 #[inline]
282 pub fn is_empty(&self) -> bool {
283 self.len() == 0
284 }
285
286 #[inline]
288 pub fn resize(&mut self, new_size: usize) {
289 self.buffer()
290 .dbg_borrow_mut()
291 .resize(self.my_checkpoint + new_size, 0);
292 }
293
294 #[inline]
300 #[cfg_attr(debug_assertions, track_caller)]
301 pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
302 self.slice_range(offset..offset + size)
303 }
304
305 #[inline]
317 #[cfg_attr(debug_assertions, track_caller)]
318 pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
319 self.slice_range_with_base(range, self.my_checkpoint)
320 }
321
322 #[inline]
333 #[cfg_attr(debug_assertions, track_caller)]
334 pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
335 self.slice_range_with_base(range, 0)
336 }
337
338 #[inline]
350 #[cfg_attr(debug_assertions, track_caller)]
351 pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
352 let buffer = self.buffer_ref_mut();
353 RefMut::map(buffer, |b| {
354 match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
355 Some(slice) => slice,
356 None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
357 }
358 })
359 }
360
361 #[inline]
367 pub fn get_byte(&self, offset: usize) -> u8 {
368 self.slice_len(offset, 1)[0]
369 }
370
371 #[inline]
377 pub fn get_word(&self, offset: usize) -> B256 {
378 (*self.slice_len(offset, 32)).try_into().unwrap()
379 }
380
381 #[inline]
387 pub fn get_u256(&self, offset: usize) -> U256 {
388 self.get_word(offset).into()
389 }
390
391 #[inline]
397 #[cfg_attr(debug_assertions, track_caller)]
398 pub fn set_byte(&mut self, offset: usize, byte: u8) {
399 self.set(offset, &[byte]);
400 }
401
402 #[inline]
408 #[cfg_attr(debug_assertions, track_caller)]
409 pub fn set_word(&mut self, offset: usize, value: &B256) {
410 self.set(offset, &value[..]);
411 }
412
413 #[inline]
419 #[cfg_attr(debug_assertions, track_caller)]
420 pub fn set_u256(&mut self, offset: usize, value: U256) {
421 self.set(offset, &value.to_be_bytes::<32>());
422 }
423
424 #[inline]
430 #[cfg_attr(debug_assertions, track_caller)]
431 pub fn set(&mut self, offset: usize, value: &[u8]) {
432 if !value.is_empty() {
433 self.slice_mut(offset, value.len()).copy_from_slice(value);
434 }
435 }
436
437 #[inline]
444 #[cfg_attr(debug_assertions, track_caller)]
445 pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
446 let mut dst = self.context_memory_mut();
447 unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
448 }
449
450 #[inline]
452 #[cfg_attr(debug_assertions, track_caller)]
453 pub fn global_to_local_set_data(
454 &mut self,
455 memory_offset: usize,
456 data_offset: usize,
457 len: usize,
458 data_range: Range<usize>,
459 ) {
460 let mut buffer = self.buffer_ref_mut();
461 let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
462 let src = if data_range.is_empty() {
463 &mut []
464 } else {
465 src.get_mut(data_range).unwrap()
466 };
467 unsafe { set_data(dst, src, memory_offset, data_offset, len) };
468 }
469
470 #[inline]
476 #[cfg_attr(debug_assertions, track_caller)]
477 pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
478 self.context_memory_mut().copy_within(src..src + len, dst);
479 }
480
481 #[inline]
492 pub fn context_memory(&self) -> Ref<'_, [u8]> {
493 let buffer = self.buffer_ref();
494 Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
495 Some(slice) => slice,
496 None => debug_unreachable!("Context memory should be always valid"),
497 })
498 }
499
500 #[inline]
511 pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
512 let buffer = self.buffer_ref_mut();
513 RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
514 Some(slice) => slice,
515 None => debug_unreachable!("Context memory should be always valid"),
516 })
517 }
518}
519
520unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
532 if len == 0 {
533 return;
534 }
535 if src_offset >= src.len() {
536 dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
538 return;
539 }
540 let src_end = min(src_offset + len, src.len());
541 let src_len = src_end - src_offset;
542 debug_assert!(src_offset < src.len() && src_end <= src.len());
543 let data = unsafe { src.get_unchecked(src_offset..src_end) };
544 unsafe {
545 dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
546 .copy_from_slice(data)
547 };
548
549 unsafe {
552 dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
553 .fill(0)
554 };
555}
556
557#[inline]
560pub const fn num_words(len: usize) -> usize {
561 len.div_ceil(32)
562}
563
564#[inline]
566pub fn resize_memory<Memory: MemoryTr>(
567 gas: &mut crate::Gas,
568 memory: &mut Memory,
569 gas_table: &GasParams,
570 offset: usize,
571 len: usize,
572) -> Result<(), InstructionResult> {
573 #[cfg(feature = "memory_limit")]
574 if memory.limit_reached(offset, len) {
575 return Err(InstructionResult::MemoryLimitOOG);
576 }
577
578 let new_num_words = num_words(offset.saturating_add(len));
579 if new_num_words > gas.memory().words_num {
580 return resize_memory_cold(gas, memory, gas_table, new_num_words);
581 }
582
583 Ok(())
584}
585
586#[cold]
587#[inline(never)]
588fn resize_memory_cold<Memory: MemoryTr>(
589 gas: &mut crate::Gas,
590 memory: &mut Memory,
591 gas_table: &GasParams,
592 new_num_words: usize,
593) -> Result<(), InstructionResult> {
594 let cost = gas_table.memory_cost(new_num_words);
595 let cost = unsafe {
596 gas.memory_mut()
597 .set_words_num(new_num_words, cost)
598 .unwrap_unchecked()
599 };
600
601 if !gas.record_cost(cost) {
602 return Err(InstructionResult::MemoryOOG);
603 }
604 memory.resize(new_num_words * 32);
605 Ok(())
606}
607
608#[cfg(test)]
609mod tests {
610 use super::*;
611
612 #[test]
613 fn test_num_words() {
614 assert_eq!(num_words(0), 0);
615 assert_eq!(num_words(1), 1);
616 assert_eq!(num_words(31), 1);
617 assert_eq!(num_words(32), 1);
618 assert_eq!(num_words(33), 2);
619 assert_eq!(num_words(63), 2);
620 assert_eq!(num_words(64), 2);
621 assert_eq!(num_words(65), 3);
622 assert_eq!(num_words(usize::MAX - 31), usize::MAX / 32);
623 assert_eq!(num_words(usize::MAX - 30), (usize::MAX / 32) + 1);
624 assert_eq!(num_words(usize::MAX), (usize::MAX / 32) + 1);
625 }
626
627 #[test]
628 fn new_free_child_context() {
629 let mut sm1 = SharedMemory::new();
630
631 assert_eq!(sm1.buffer_ref().len(), 0);
632 assert_eq!(sm1.my_checkpoint, 0);
633
634 unsafe { sm1.buffer_ref_mut().set_len(32) };
635 assert_eq!(sm1.len(), 32);
636 let mut sm2 = sm1.new_child_context();
637
638 assert_eq!(sm2.buffer_ref().len(), 32);
639 assert_eq!(sm2.my_checkpoint, 32);
640 assert_eq!(sm2.len(), 0);
641
642 unsafe { sm2.buffer_ref_mut().set_len(96) };
643 assert_eq!(sm2.len(), 64);
644 let mut sm3 = sm2.new_child_context();
645
646 assert_eq!(sm3.buffer_ref().len(), 96);
647 assert_eq!(sm3.my_checkpoint, 96);
648 assert_eq!(sm3.len(), 0);
649
650 unsafe { sm3.buffer_ref_mut().set_len(128) };
651 let sm4 = sm3.new_child_context();
652 assert_eq!(sm4.buffer_ref().len(), 128);
653 assert_eq!(sm4.my_checkpoint, 128);
654 assert_eq!(sm4.len(), 0);
655
656 drop(sm4);
658 sm3.free_child_context();
659 assert_eq!(sm3.buffer_ref().len(), 128);
660 assert_eq!(sm3.my_checkpoint, 96);
661 assert_eq!(sm3.len(), 32);
662
663 sm2.free_child_context();
664 assert_eq!(sm2.buffer_ref().len(), 96);
665 assert_eq!(sm2.my_checkpoint, 32);
666 assert_eq!(sm2.len(), 64);
667
668 sm1.free_child_context();
669 assert_eq!(sm1.buffer_ref().len(), 32);
670 assert_eq!(sm1.my_checkpoint, 0);
671 assert_eq!(sm1.len(), 32);
672 }
673
674 #[test]
675 fn resize() {
676 let mut sm1 = SharedMemory::new();
677 sm1.resize(32);
678 assert_eq!(sm1.buffer_ref().len(), 32);
679 assert_eq!(sm1.len(), 32);
680 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
681
682 let mut sm2 = sm1.new_child_context();
683 sm2.resize(96);
684 assert_eq!(sm2.buffer_ref().len(), 128);
685 assert_eq!(sm2.len(), 96);
686 assert_eq!(sm2.buffer_ref().get(32..128), Some(&[0_u8; 96] as &[u8]));
687
688 sm1.free_child_context();
689 assert_eq!(sm1.buffer_ref().len(), 32);
690 assert_eq!(sm1.len(), 32);
691 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
692 }
693}