1use super::MemoryTr;
2use crate::InstructionResult;
3use context_interface::cfg::GasParams;
4use core::{
5 cell::{Ref, RefCell, RefMut},
6 cmp::min,
7 fmt,
8 ops::Range,
9};
10use primitives::{hex, B256, U256};
11use std::{rc::Rc, vec::Vec};
12
13trait RefcellExt<T> {
14 fn dbg_borrow(&self) -> Ref<'_, T>;
15 fn dbg_borrow_mut(&self) -> RefMut<'_, T>;
16}
17
18impl<T> RefcellExt<T> for RefCell<T> {
19 #[inline]
20 fn dbg_borrow(&self) -> Ref<'_, T> {
21 match self.try_borrow() {
22 Ok(b) => b,
23 Err(e) => debug_unreachable!("{e}"),
24 }
25 }
26
27 #[inline]
28 fn dbg_borrow_mut(&self) -> RefMut<'_, T> {
29 match self.try_borrow_mut() {
30 Ok(b) => b,
31 Err(e) => debug_unreachable!("{e}"),
32 }
33 }
34}
35
36#[derive(Clone, PartialEq, Eq)]
41#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
42pub struct SharedMemory {
43 buffer: Option<Rc<RefCell<Vec<u8>>>>,
45 my_checkpoint: usize,
48 child_checkpoint: Option<usize>,
50 #[cfg(feature = "memory_limit")]
52 memory_limit: u64,
53}
54
55impl fmt::Debug for SharedMemory {
56 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
57 f.debug_struct("SharedMemory")
58 .field("current_len", &self.len())
59 .field("context_memory", &hex::encode(&*self.context_memory()))
60 .finish_non_exhaustive()
61 }
62}
63
64impl Default for SharedMemory {
65 #[inline]
66 fn default() -> Self {
67 Self::new()
68 }
69}
70
71impl MemoryTr for SharedMemory {
72 fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
73 self.set_data(memory_offset, data_offset, len, data);
74 }
75
76 fn set(&mut self, memory_offset: usize, data: &[u8]) {
77 self.set(memory_offset, data);
78 }
79
80 fn size(&self) -> usize {
81 self.len()
82 }
83
84 fn copy(&mut self, destination: usize, source: usize, len: usize) {
85 self.copy(destination, source, len);
86 }
87
88 fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
89 self.slice_range(range)
90 }
91
92 fn local_memory_offset(&self) -> usize {
93 self.my_checkpoint
94 }
95
96 fn set_data_from_global(
97 &mut self,
98 memory_offset: usize,
99 data_offset: usize,
100 len: usize,
101 data_range: Range<usize>,
102 ) {
103 self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
104 }
105
106 #[inline]
117 #[cfg_attr(debug_assertions, track_caller)]
118 fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
119 let buffer = self.buffer_ref();
120 Ref::map(buffer, |b| match b.get(range) {
121 Some(slice) => slice,
122 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
123 })
124 }
125
126 fn resize(&mut self, new_size: usize) -> bool {
127 self.resize(new_size);
128 true
129 }
130
131 #[cfg(feature = "memory_limit")]
134 #[inline]
135 fn limit_reached(&self, offset: usize, len: usize) -> bool {
136 self.my_checkpoint
137 .saturating_add(offset)
138 .saturating_add(len) as u64
139 > self.memory_limit
140 }
141}
142
143impl SharedMemory {
144 #[inline]
148 pub fn new() -> Self {
149 Self::with_capacity(4 * 1024) }
151
152 #[inline]
154 pub fn invalid() -> Self {
155 Self {
156 buffer: None,
157 my_checkpoint: 0,
158 child_checkpoint: None,
159 #[cfg(feature = "memory_limit")]
160 memory_limit: 0,
161 }
162 }
163
164 pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
166 Self {
167 buffer: Some(buffer),
168 my_checkpoint: 0,
169 child_checkpoint: None,
170 #[cfg(feature = "memory_limit")]
171 memory_limit: u64::MAX,
172 }
173 }
174
175 #[inline]
177 pub fn with_capacity(capacity: usize) -> Self {
178 Self {
179 buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
180 my_checkpoint: 0,
181 child_checkpoint: None,
182 #[cfg(feature = "memory_limit")]
183 memory_limit: u64::MAX,
184 }
185 }
186
187 #[cfg(feature = "memory_limit")]
192 #[inline]
193 pub fn new_with_memory_limit(memory_limit: u64) -> Self {
194 Self {
195 memory_limit,
196 ..Self::new()
197 }
198 }
199
200 #[inline]
202 pub fn set_memory_limit(&mut self, limit: u64) {
203 #[cfg(feature = "memory_limit")]
204 {
205 self.memory_limit = limit;
206 }
207 let _ = limit;
209 }
210
211 #[inline]
212 fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
213 debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
214 unsafe { self.buffer.as_ref().unwrap_unchecked() }
215 }
216
217 #[inline]
218 fn buffer_ref(&self) -> Ref<'_, Vec<u8>> {
219 self.buffer().dbg_borrow()
220 }
221
222 #[inline]
223 fn buffer_ref_mut(&self) -> RefMut<'_, Vec<u8>> {
224 self.buffer().dbg_borrow_mut()
225 }
226
227 #[inline]
233 pub fn new_child_context(&mut self) -> SharedMemory {
234 if self.child_checkpoint.is_some() {
235 panic!("new_child_context was already called without freeing child context");
236 }
237 let new_checkpoint = self.full_len();
238 self.child_checkpoint = Some(new_checkpoint);
239 SharedMemory {
240 buffer: Some(self.buffer().clone()),
241 my_checkpoint: new_checkpoint,
242 child_checkpoint: None,
244 #[cfg(feature = "memory_limit")]
245 memory_limit: self.memory_limit,
246 }
247 }
248
249 #[inline]
251 pub fn free_child_context(&mut self) {
252 let Some(child_checkpoint) = self.child_checkpoint.take() else {
253 return;
254 };
255 unsafe {
256 self.buffer_ref_mut().set_len(child_checkpoint);
257 }
258 }
259
260 #[inline]
262 pub fn len(&self) -> usize {
263 self.full_len() - self.my_checkpoint
264 }
265
266 fn full_len(&self) -> usize {
267 self.buffer_ref().len()
268 }
269
270 #[inline]
272 pub fn is_empty(&self) -> bool {
273 self.len() == 0
274 }
275
276 #[inline]
278 pub fn resize(&mut self, new_size: usize) {
279 self.buffer()
280 .dbg_borrow_mut()
281 .resize(self.my_checkpoint + new_size, 0);
282 }
283
284 #[inline]
290 #[cfg_attr(debug_assertions, track_caller)]
291 pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
292 self.slice_range(offset..offset + size)
293 }
294
295 #[inline]
307 #[cfg_attr(debug_assertions, track_caller)]
308 pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
309 let buffer = self.buffer_ref();
310 Ref::map(buffer, |b| {
311 match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
312 Some(slice) => slice,
313 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
314 }
315 })
316 }
317
318 #[inline]
329 #[cfg_attr(debug_assertions, track_caller)]
330 pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
331 let buffer = self.buffer_ref();
332 Ref::map(buffer, |b| match b.get(range) {
333 Some(slice) => slice,
334 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
335 })
336 }
337
338 #[inline]
350 #[cfg_attr(debug_assertions, track_caller)]
351 pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
352 let buffer = self.buffer_ref_mut();
353 RefMut::map(buffer, |b| {
354 match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
355 Some(slice) => slice,
356 None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
357 }
358 })
359 }
360
361 #[inline]
367 pub fn get_byte(&self, offset: usize) -> u8 {
368 self.slice_len(offset, 1)[0]
369 }
370
371 #[inline]
377 pub fn get_word(&self, offset: usize) -> B256 {
378 (*self.slice_len(offset, 32)).try_into().unwrap()
379 }
380
381 #[inline]
387 pub fn get_u256(&self, offset: usize) -> U256 {
388 self.get_word(offset).into()
389 }
390
391 #[inline]
397 #[cfg_attr(debug_assertions, track_caller)]
398 pub fn set_byte(&mut self, offset: usize, byte: u8) {
399 self.set(offset, &[byte]);
400 }
401
402 #[inline]
408 #[cfg_attr(debug_assertions, track_caller)]
409 pub fn set_word(&mut self, offset: usize, value: &B256) {
410 self.set(offset, &value[..]);
411 }
412
413 #[inline]
419 #[cfg_attr(debug_assertions, track_caller)]
420 pub fn set_u256(&mut self, offset: usize, value: U256) {
421 self.set(offset, &value.to_be_bytes::<32>());
422 }
423
424 #[inline]
430 #[cfg_attr(debug_assertions, track_caller)]
431 pub fn set(&mut self, offset: usize, value: &[u8]) {
432 if !value.is_empty() {
433 self.slice_mut(offset, value.len()).copy_from_slice(value);
434 }
435 }
436
437 #[inline]
444 #[cfg_attr(debug_assertions, track_caller)]
445 pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
446 let mut dst = self.context_memory_mut();
447 unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
448 }
449
450 #[inline]
452 #[cfg_attr(debug_assertions, track_caller)]
453 pub fn global_to_local_set_data(
454 &mut self,
455 memory_offset: usize,
456 data_offset: usize,
457 len: usize,
458 data_range: Range<usize>,
459 ) {
460 let mut buffer = self.buffer_ref_mut();
461 let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
462 let src = if data_range.is_empty() {
463 &mut []
464 } else {
465 src.get_mut(data_range).unwrap()
466 };
467 unsafe { set_data(dst, src, memory_offset, data_offset, len) };
468 }
469
470 #[inline]
476 #[cfg_attr(debug_assertions, track_caller)]
477 pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
478 self.context_memory_mut().copy_within(src..src + len, dst);
479 }
480
481 #[inline]
492 pub fn context_memory(&self) -> Ref<'_, [u8]> {
493 let buffer = self.buffer_ref();
494 Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
495 Some(slice) => slice,
496 None => debug_unreachable!("Context memory should be always valid"),
497 })
498 }
499
500 #[inline]
511 pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
512 let buffer = self.buffer_ref_mut();
513 RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
514 Some(slice) => slice,
515 None => debug_unreachable!("Context memory should be always valid"),
516 })
517 }
518}
519
520unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
532 if len == 0 {
533 return;
534 }
535 if src_offset >= src.len() {
536 dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
538 return;
539 }
540 let src_end = min(src_offset + len, src.len());
541 let src_len = src_end - src_offset;
542 debug_assert!(src_offset < src.len() && src_end <= src.len());
543 let data = unsafe { src.get_unchecked(src_offset..src_end) };
544 unsafe {
545 dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
546 .copy_from_slice(data)
547 };
548
549 unsafe {
552 dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
553 .fill(0)
554 };
555}
556
557#[inline]
560pub const fn num_words(len: usize) -> usize {
561 len.div_ceil(32)
562}
563
564#[inline]
566pub fn resize_memory<Memory: MemoryTr>(
567 gas: &mut crate::Gas,
568 memory: &mut Memory,
569 gas_table: &GasParams,
570 offset: usize,
571 len: usize,
572) -> Result<(), InstructionResult> {
573 #[cfg(feature = "memory_limit")]
574 if memory.limit_reached(offset, len) {
575 return Err(InstructionResult::MemoryLimitOOG);
576 }
577
578 let new_num_words = num_words(offset.saturating_add(len));
579 if new_num_words > gas.memory().words_num {
580 return resize_memory_cold(gas, memory, gas_table, new_num_words);
581 }
582
583 Ok(())
584}
585
586#[cold]
587#[inline(never)]
588fn resize_memory_cold<Memory: MemoryTr>(
589 gas: &mut crate::Gas,
590 memory: &mut Memory,
591 gas_table: &GasParams,
592 new_num_words: usize,
593) -> Result<(), InstructionResult> {
594 let cost = gas_table.memory_cost(new_num_words);
595 let cost = unsafe {
596 gas.memory_mut()
597 .set_words_num(new_num_words, cost)
598 .unwrap_unchecked()
599 };
600
601 if !gas.record_cost(cost) {
602 return Err(InstructionResult::MemoryOOG);
603 }
604 memory.resize(new_num_words * 32);
605 Ok(())
606}
607
608#[cfg(test)]
609mod tests {
610 use super::*;
611
612 #[test]
613 fn test_num_words() {
614 assert_eq!(num_words(0), 0);
615 assert_eq!(num_words(1), 1);
616 assert_eq!(num_words(31), 1);
617 assert_eq!(num_words(32), 1);
618 assert_eq!(num_words(33), 2);
619 assert_eq!(num_words(63), 2);
620 assert_eq!(num_words(64), 2);
621 assert_eq!(num_words(65), 3);
622 assert_eq!(num_words(usize::MAX - 31), usize::MAX / 32);
623 assert_eq!(num_words(usize::MAX - 30), (usize::MAX / 32) + 1);
624 assert_eq!(num_words(usize::MAX), (usize::MAX / 32) + 1);
625 }
626
627 #[test]
628 fn new_free_child_context() {
629 let mut sm1 = SharedMemory::new();
630
631 assert_eq!(sm1.buffer_ref().len(), 0);
632 assert_eq!(sm1.my_checkpoint, 0);
633
634 unsafe { sm1.buffer_ref_mut().set_len(32) };
635 assert_eq!(sm1.len(), 32);
636 let mut sm2 = sm1.new_child_context();
637
638 assert_eq!(sm2.buffer_ref().len(), 32);
639 assert_eq!(sm2.my_checkpoint, 32);
640 assert_eq!(sm2.len(), 0);
641
642 unsafe { sm2.buffer_ref_mut().set_len(96) };
643 assert_eq!(sm2.len(), 64);
644 let mut sm3 = sm2.new_child_context();
645
646 assert_eq!(sm3.buffer_ref().len(), 96);
647 assert_eq!(sm3.my_checkpoint, 96);
648 assert_eq!(sm3.len(), 0);
649
650 unsafe { sm3.buffer_ref_mut().set_len(128) };
651 let sm4 = sm3.new_child_context();
652 assert_eq!(sm4.buffer_ref().len(), 128);
653 assert_eq!(sm4.my_checkpoint, 128);
654 assert_eq!(sm4.len(), 0);
655
656 drop(sm4);
658 sm3.free_child_context();
659 assert_eq!(sm3.buffer_ref().len(), 128);
660 assert_eq!(sm3.my_checkpoint, 96);
661 assert_eq!(sm3.len(), 32);
662
663 sm2.free_child_context();
664 assert_eq!(sm2.buffer_ref().len(), 96);
665 assert_eq!(sm2.my_checkpoint, 32);
666 assert_eq!(sm2.len(), 64);
667
668 sm1.free_child_context();
669 assert_eq!(sm1.buffer_ref().len(), 32);
670 assert_eq!(sm1.my_checkpoint, 0);
671 assert_eq!(sm1.len(), 32);
672 }
673
674 #[test]
675 fn resize() {
676 let mut sm1 = SharedMemory::new();
677 sm1.resize(32);
678 assert_eq!(sm1.buffer_ref().len(), 32);
679 assert_eq!(sm1.len(), 32);
680 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
681
682 let mut sm2 = sm1.new_child_context();
683 sm2.resize(96);
684 assert_eq!(sm2.buffer_ref().len(), 128);
685 assert_eq!(sm2.len(), 96);
686 assert_eq!(sm2.buffer_ref().get(32..128), Some(&[0_u8; 96] as &[u8]));
687
688 sm1.free_child_context();
689 assert_eq!(sm1.buffer_ref().len(), 32);
690 assert_eq!(sm1.len(), 32);
691 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
692 }
693}