1use super::MemoryTr;
2use core::{
3 cell::{Ref, RefCell, RefMut},
4 cmp::min,
5 fmt,
6 ops::Range,
7};
8use primitives::{hex, B256, U256};
9use std::{rc::Rc, vec::Vec};
10
11trait RefcellExt<T> {
12 fn dbg_borrow(&self) -> Ref<'_, T>;
13 fn dbg_borrow_mut(&self) -> RefMut<'_, T>;
14}
15
16impl<T> RefcellExt<T> for RefCell<T> {
17 #[inline]
18 fn dbg_borrow(&self) -> Ref<'_, T> {
19 match self.try_borrow() {
20 Ok(b) => b,
21 Err(e) => debug_unreachable!("{e}"),
22 }
23 }
24
25 #[inline]
26 fn dbg_borrow_mut(&self) -> RefMut<'_, T> {
27 match self.try_borrow_mut() {
28 Ok(b) => b,
29 Err(e) => debug_unreachable!("{e}"),
30 }
31 }
32}
33
34#[derive(Clone, PartialEq, Eq)]
39#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
40pub struct SharedMemory {
41 buffer: Option<Rc<RefCell<Vec<u8>>>>,
43 my_checkpoint: usize,
46 child_checkpoint: Option<usize>,
48 #[cfg(feature = "memory_limit")]
50 memory_limit: u64,
51}
52
53impl fmt::Debug for SharedMemory {
54 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
55 f.debug_struct("SharedMemory")
56 .field("current_len", &self.len())
57 .field("context_memory", &hex::encode(&*self.context_memory()))
58 .finish_non_exhaustive()
59 }
60}
61
62impl Default for SharedMemory {
63 #[inline]
64 fn default() -> Self {
65 Self::new()
66 }
67}
68
69impl MemoryTr for SharedMemory {
70 fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
71 self.set_data(memory_offset, data_offset, len, data);
72 }
73
74 fn set(&mut self, memory_offset: usize, data: &[u8]) {
75 self.set(memory_offset, data);
76 }
77
78 fn size(&self) -> usize {
79 self.len()
80 }
81
82 fn copy(&mut self, destination: usize, source: usize, len: usize) {
83 self.copy(destination, source, len);
84 }
85
86 fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
87 self.slice_range(range)
88 }
89
90 fn local_memory_offset(&self) -> usize {
91 self.my_checkpoint
92 }
93
94 fn set_data_from_global(
95 &mut self,
96 memory_offset: usize,
97 data_offset: usize,
98 len: usize,
99 data_range: Range<usize>,
100 ) {
101 self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
102 }
103
104 #[inline]
115 #[cfg_attr(debug_assertions, track_caller)]
116 fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
117 let buffer = self.buffer_ref();
118 Ref::map(buffer, |b| match b.get(range) {
119 Some(slice) => slice,
120 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
121 })
122 }
123
124 fn resize(&mut self, new_size: usize) -> bool {
125 self.resize(new_size);
126 true
127 }
128
129 #[cfg(feature = "memory_limit")]
132 #[inline]
133 fn limit_reached(&self, offset: usize, len: usize) -> bool {
134 self.my_checkpoint
135 .saturating_add(offset)
136 .saturating_add(len) as u64
137 > self.memory_limit
138 }
139}
140
141impl SharedMemory {
142 #[inline]
146 pub fn new() -> Self {
147 Self::with_capacity(4 * 1024) }
149
150 #[inline]
152 pub fn invalid() -> Self {
153 Self {
154 buffer: None,
155 my_checkpoint: 0,
156 child_checkpoint: None,
157 #[cfg(feature = "memory_limit")]
158 memory_limit: 0,
159 }
160 }
161
162 pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
164 Self {
165 buffer: Some(buffer),
166 my_checkpoint: 0,
167 child_checkpoint: None,
168 #[cfg(feature = "memory_limit")]
169 memory_limit: u64::MAX,
170 }
171 }
172
173 #[inline]
175 pub fn with_capacity(capacity: usize) -> Self {
176 Self {
177 buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
178 my_checkpoint: 0,
179 child_checkpoint: None,
180 #[cfg(feature = "memory_limit")]
181 memory_limit: u64::MAX,
182 }
183 }
184
185 #[cfg(feature = "memory_limit")]
190 #[inline]
191 pub fn new_with_memory_limit(memory_limit: u64) -> Self {
192 Self {
193 memory_limit,
194 ..Self::new()
195 }
196 }
197
198 #[inline]
200 pub fn set_memory_limit(&mut self, limit: u64) {
201 #[cfg(feature = "memory_limit")]
202 {
203 self.memory_limit = limit;
204 }
205 }
206
207 #[inline]
208 fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
209 debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
210 unsafe { self.buffer.as_ref().unwrap_unchecked() }
211 }
212
213 #[inline]
214 fn buffer_ref(&self) -> Ref<'_, Vec<u8>> {
215 self.buffer().dbg_borrow()
216 }
217
218 #[inline]
219 fn buffer_ref_mut(&self) -> RefMut<'_, Vec<u8>> {
220 self.buffer().dbg_borrow_mut()
221 }
222
223 #[inline]
229 pub fn new_child_context(&mut self) -> SharedMemory {
230 if self.child_checkpoint.is_some() {
231 panic!("new_child_context was already called without freeing child context");
232 }
233 let new_checkpoint = self.full_len();
234 self.child_checkpoint = Some(new_checkpoint);
235 SharedMemory {
236 buffer: Some(self.buffer().clone()),
237 my_checkpoint: new_checkpoint,
238 child_checkpoint: None,
240 #[cfg(feature = "memory_limit")]
241 memory_limit: self.memory_limit,
242 }
243 }
244
245 #[inline]
247 pub fn free_child_context(&mut self) {
248 let Some(child_checkpoint) = self.child_checkpoint.take() else {
249 return;
250 };
251 unsafe {
252 self.buffer_ref_mut().set_len(child_checkpoint);
253 }
254 }
255
256 #[inline]
258 pub fn len(&self) -> usize {
259 self.full_len() - self.my_checkpoint
260 }
261
262 fn full_len(&self) -> usize {
263 self.buffer_ref().len()
264 }
265
266 #[inline]
268 pub fn is_empty(&self) -> bool {
269 self.len() == 0
270 }
271
272 #[inline]
274 pub fn resize(&mut self, new_size: usize) {
275 self.buffer()
276 .dbg_borrow_mut()
277 .resize(self.my_checkpoint + new_size, 0);
278 }
279
280 #[inline]
286 #[cfg_attr(debug_assertions, track_caller)]
287 pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
288 self.slice_range(offset..offset + size)
289 }
290
291 #[inline]
303 #[cfg_attr(debug_assertions, track_caller)]
304 pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
305 let buffer = self.buffer_ref();
306 Ref::map(buffer, |b| {
307 match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
308 Some(slice) => slice,
309 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
310 }
311 })
312 }
313
314 #[inline]
325 #[cfg_attr(debug_assertions, track_caller)]
326 pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
327 let buffer = self.buffer_ref();
328 Ref::map(buffer, |b| match b.get(range) {
329 Some(slice) => slice,
330 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
331 })
332 }
333
334 #[inline]
346 #[cfg_attr(debug_assertions, track_caller)]
347 pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
348 let buffer = self.buffer_ref_mut();
349 RefMut::map(buffer, |b| {
350 match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
351 Some(slice) => slice,
352 None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
353 }
354 })
355 }
356
357 #[inline]
363 pub fn get_byte(&self, offset: usize) -> u8 {
364 self.slice_len(offset, 1)[0]
365 }
366
367 #[inline]
373 pub fn get_word(&self, offset: usize) -> B256 {
374 (*self.slice_len(offset, 32)).try_into().unwrap()
375 }
376
377 #[inline]
383 pub fn get_u256(&self, offset: usize) -> U256 {
384 self.get_word(offset).into()
385 }
386
387 #[inline]
393 #[cfg_attr(debug_assertions, track_caller)]
394 pub fn set_byte(&mut self, offset: usize, byte: u8) {
395 self.set(offset, &[byte]);
396 }
397
398 #[inline]
404 #[cfg_attr(debug_assertions, track_caller)]
405 pub fn set_word(&mut self, offset: usize, value: &B256) {
406 self.set(offset, &value[..]);
407 }
408
409 #[inline]
415 #[cfg_attr(debug_assertions, track_caller)]
416 pub fn set_u256(&mut self, offset: usize, value: U256) {
417 self.set(offset, &value.to_be_bytes::<32>());
418 }
419
420 #[inline]
426 #[cfg_attr(debug_assertions, track_caller)]
427 pub fn set(&mut self, offset: usize, value: &[u8]) {
428 if !value.is_empty() {
429 self.slice_mut(offset, value.len()).copy_from_slice(value);
430 }
431 }
432
433 #[inline]
440 #[cfg_attr(debug_assertions, track_caller)]
441 pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
442 let mut dst = self.context_memory_mut();
443 unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
444 }
445
446 #[inline]
448 #[cfg_attr(debug_assertions, track_caller)]
449 pub fn global_to_local_set_data(
450 &mut self,
451 memory_offset: usize,
452 data_offset: usize,
453 len: usize,
454 data_range: Range<usize>,
455 ) {
456 let mut buffer = self.buffer_ref_mut();
457 let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
458 let src = if data_range.is_empty() {
459 &mut []
460 } else {
461 src.get_mut(data_range).unwrap()
462 };
463 unsafe { set_data(dst, src, memory_offset, data_offset, len) };
464 }
465
466 #[inline]
472 #[cfg_attr(debug_assertions, track_caller)]
473 pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
474 self.context_memory_mut().copy_within(src..src + len, dst);
475 }
476
477 #[inline]
488 pub fn context_memory(&self) -> Ref<'_, [u8]> {
489 let buffer = self.buffer_ref();
490 Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
491 Some(slice) => slice,
492 None => debug_unreachable!("Context memory should be always valid"),
493 })
494 }
495
496 #[inline]
507 pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
508 let buffer = self.buffer_ref_mut();
509 RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
510 Some(slice) => slice,
511 None => debug_unreachable!("Context memory should be always valid"),
512 })
513 }
514}
515
516unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
528 if len == 0 {
529 return;
530 }
531 if src_offset >= src.len() {
532 dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
534 return;
535 }
536 let src_end = min(src_offset + len, src.len());
537 let src_len = src_end - src_offset;
538 debug_assert!(src_offset < src.len() && src_end <= src.len());
539 let data = unsafe { src.get_unchecked(src_offset..src_end) };
540 unsafe {
541 dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
542 .copy_from_slice(data)
543 };
544
545 unsafe {
548 dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
549 .fill(0)
550 };
551}
552
553#[inline]
556pub const fn num_words(len: usize) -> usize {
557 len.saturating_add(31) / 32
558}
559
560#[inline]
562#[must_use]
563pub fn resize_memory<Memory: MemoryTr>(
564 gas: &mut crate::Gas,
565 memory: &mut Memory,
566 offset: usize,
567 len: usize,
568) -> bool {
569 let new_num_words = num_words(offset.saturating_add(len));
570 if new_num_words > gas.memory().words_num {
571 resize_memory_cold(gas, memory, new_num_words)
572 } else {
573 true
574 }
575}
576
577#[cold]
578#[inline(never)]
579fn resize_memory_cold<Memory: MemoryTr>(
580 gas: &mut crate::Gas,
581 memory: &mut Memory,
582 new_num_words: usize,
583) -> bool {
584 let cost = unsafe {
585 gas.memory_mut()
586 .record_new_len(new_num_words)
587 .unwrap_unchecked()
588 };
589 if !gas.record_cost(cost) {
590 return false;
591 }
592 memory.resize(new_num_words * 32);
593 true
594}
595
596#[cfg(test)]
597mod tests {
598 use super::*;
599
600 #[test]
601 fn test_num_words() {
602 assert_eq!(num_words(0), 0);
603 assert_eq!(num_words(1), 1);
604 assert_eq!(num_words(31), 1);
605 assert_eq!(num_words(32), 1);
606 assert_eq!(num_words(33), 2);
607 assert_eq!(num_words(63), 2);
608 assert_eq!(num_words(64), 2);
609 assert_eq!(num_words(65), 3);
610 assert_eq!(num_words(usize::MAX), usize::MAX / 32);
611 }
612
613 #[test]
614 fn new_free_child_context() {
615 let mut sm1 = SharedMemory::new();
616
617 assert_eq!(sm1.buffer_ref().len(), 0);
618 assert_eq!(sm1.my_checkpoint, 0);
619
620 unsafe { sm1.buffer_ref_mut().set_len(32) };
621 assert_eq!(sm1.len(), 32);
622 let mut sm2 = sm1.new_child_context();
623
624 assert_eq!(sm2.buffer_ref().len(), 32);
625 assert_eq!(sm2.my_checkpoint, 32);
626 assert_eq!(sm2.len(), 0);
627
628 unsafe { sm2.buffer_ref_mut().set_len(96) };
629 assert_eq!(sm2.len(), 64);
630 let mut sm3 = sm2.new_child_context();
631
632 assert_eq!(sm3.buffer_ref().len(), 96);
633 assert_eq!(sm3.my_checkpoint, 96);
634 assert_eq!(sm3.len(), 0);
635
636 unsafe { sm3.buffer_ref_mut().set_len(128) };
637 let sm4 = sm3.new_child_context();
638 assert_eq!(sm4.buffer_ref().len(), 128);
639 assert_eq!(sm4.my_checkpoint, 128);
640 assert_eq!(sm4.len(), 0);
641
642 drop(sm4);
644 sm3.free_child_context();
645 assert_eq!(sm3.buffer_ref().len(), 128);
646 assert_eq!(sm3.my_checkpoint, 96);
647 assert_eq!(sm3.len(), 32);
648
649 sm2.free_child_context();
650 assert_eq!(sm2.buffer_ref().len(), 96);
651 assert_eq!(sm2.my_checkpoint, 32);
652 assert_eq!(sm2.len(), 64);
653
654 sm1.free_child_context();
655 assert_eq!(sm1.buffer_ref().len(), 32);
656 assert_eq!(sm1.my_checkpoint, 0);
657 assert_eq!(sm1.len(), 32);
658 }
659
660 #[test]
661 fn resize() {
662 let mut sm1 = SharedMemory::new();
663 sm1.resize(32);
664 assert_eq!(sm1.buffer_ref().len(), 32);
665 assert_eq!(sm1.len(), 32);
666 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
667
668 let mut sm2 = sm1.new_child_context();
669 sm2.resize(96);
670 assert_eq!(sm2.buffer_ref().len(), 128);
671 assert_eq!(sm2.len(), 96);
672 assert_eq!(sm2.buffer_ref().get(32..128), Some(&[0_u8; 96] as &[u8]));
673
674 sm1.free_child_context();
675 assert_eq!(sm1.buffer_ref().len(), 32);
676 assert_eq!(sm1.len(), 32);
677 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
678 }
679}