1use super::MemoryTr;
2use crate::{gas::params::GasParams, InstructionResult};
3use core::{
4 cell::{Ref, RefCell, RefMut},
5 cmp::min,
6 fmt,
7 ops::Range,
8};
9use primitives::{hex, B256, U256};
10use std::{rc::Rc, vec::Vec};
11
12trait RefcellExt<T> {
13 fn dbg_borrow(&self) -> Ref<'_, T>;
14 fn dbg_borrow_mut(&self) -> RefMut<'_, T>;
15}
16
17impl<T> RefcellExt<T> for RefCell<T> {
18 #[inline]
19 fn dbg_borrow(&self) -> Ref<'_, T> {
20 match self.try_borrow() {
21 Ok(b) => b,
22 Err(e) => debug_unreachable!("{e}"),
23 }
24 }
25
26 #[inline]
27 fn dbg_borrow_mut(&self) -> RefMut<'_, T> {
28 match self.try_borrow_mut() {
29 Ok(b) => b,
30 Err(e) => debug_unreachable!("{e}"),
31 }
32 }
33}
34
35#[derive(Clone, PartialEq, Eq)]
40#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
41pub struct SharedMemory {
42 buffer: Option<Rc<RefCell<Vec<u8>>>>,
44 my_checkpoint: usize,
47 child_checkpoint: Option<usize>,
49 #[cfg(feature = "memory_limit")]
51 memory_limit: u64,
52}
53
54impl fmt::Debug for SharedMemory {
55 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
56 f.debug_struct("SharedMemory")
57 .field("current_len", &self.len())
58 .field("context_memory", &hex::encode(&*self.context_memory()))
59 .finish_non_exhaustive()
60 }
61}
62
63impl Default for SharedMemory {
64 #[inline]
65 fn default() -> Self {
66 Self::new()
67 }
68}
69
70impl MemoryTr for SharedMemory {
71 fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
72 self.set_data(memory_offset, data_offset, len, data);
73 }
74
75 fn set(&mut self, memory_offset: usize, data: &[u8]) {
76 self.set(memory_offset, data);
77 }
78
79 fn size(&self) -> usize {
80 self.len()
81 }
82
83 fn copy(&mut self, destination: usize, source: usize, len: usize) {
84 self.copy(destination, source, len);
85 }
86
87 fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
88 self.slice_range(range)
89 }
90
91 fn local_memory_offset(&self) -> usize {
92 self.my_checkpoint
93 }
94
95 fn set_data_from_global(
96 &mut self,
97 memory_offset: usize,
98 data_offset: usize,
99 len: usize,
100 data_range: Range<usize>,
101 ) {
102 self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
103 }
104
105 #[inline]
116 #[cfg_attr(debug_assertions, track_caller)]
117 fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
118 let buffer = self.buffer_ref();
119 Ref::map(buffer, |b| match b.get(range) {
120 Some(slice) => slice,
121 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
122 })
123 }
124
125 fn resize(&mut self, new_size: usize) -> bool {
126 self.resize(new_size);
127 true
128 }
129
130 #[cfg(feature = "memory_limit")]
133 #[inline]
134 fn limit_reached(&self, offset: usize, len: usize) -> bool {
135 self.my_checkpoint
136 .saturating_add(offset)
137 .saturating_add(len) as u64
138 > self.memory_limit
139 }
140}
141
142impl SharedMemory {
143 #[inline]
147 pub fn new() -> Self {
148 Self::with_capacity(4 * 1024) }
150
151 #[inline]
153 pub fn invalid() -> Self {
154 Self {
155 buffer: None,
156 my_checkpoint: 0,
157 child_checkpoint: None,
158 #[cfg(feature = "memory_limit")]
159 memory_limit: 0,
160 }
161 }
162
163 pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
165 Self {
166 buffer: Some(buffer),
167 my_checkpoint: 0,
168 child_checkpoint: None,
169 #[cfg(feature = "memory_limit")]
170 memory_limit: u64::MAX,
171 }
172 }
173
174 #[inline]
176 pub fn with_capacity(capacity: usize) -> Self {
177 Self {
178 buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
179 my_checkpoint: 0,
180 child_checkpoint: None,
181 #[cfg(feature = "memory_limit")]
182 memory_limit: u64::MAX,
183 }
184 }
185
186 #[cfg(feature = "memory_limit")]
191 #[inline]
192 pub fn new_with_memory_limit(memory_limit: u64) -> Self {
193 Self {
194 memory_limit,
195 ..Self::new()
196 }
197 }
198
199 #[inline]
201 pub fn set_memory_limit(&mut self, limit: u64) {
202 #[cfg(feature = "memory_limit")]
203 {
204 self.memory_limit = limit;
205 }
206 let _ = limit;
208 }
209
210 #[inline]
211 fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
212 debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
213 unsafe { self.buffer.as_ref().unwrap_unchecked() }
214 }
215
216 #[inline]
217 fn buffer_ref(&self) -> Ref<'_, Vec<u8>> {
218 self.buffer().dbg_borrow()
219 }
220
221 #[inline]
222 fn buffer_ref_mut(&self) -> RefMut<'_, Vec<u8>> {
223 self.buffer().dbg_borrow_mut()
224 }
225
226 #[inline]
232 pub fn new_child_context(&mut self) -> SharedMemory {
233 if self.child_checkpoint.is_some() {
234 panic!("new_child_context was already called without freeing child context");
235 }
236 let new_checkpoint = self.full_len();
237 self.child_checkpoint = Some(new_checkpoint);
238 SharedMemory {
239 buffer: Some(self.buffer().clone()),
240 my_checkpoint: new_checkpoint,
241 child_checkpoint: None,
243 #[cfg(feature = "memory_limit")]
244 memory_limit: self.memory_limit,
245 }
246 }
247
248 #[inline]
250 pub fn free_child_context(&mut self) {
251 let Some(child_checkpoint) = self.child_checkpoint.take() else {
252 return;
253 };
254 unsafe {
255 self.buffer_ref_mut().set_len(child_checkpoint);
256 }
257 }
258
259 #[inline]
261 pub fn len(&self) -> usize {
262 self.full_len() - self.my_checkpoint
263 }
264
265 fn full_len(&self) -> usize {
266 self.buffer_ref().len()
267 }
268
269 #[inline]
271 pub fn is_empty(&self) -> bool {
272 self.len() == 0
273 }
274
275 #[inline]
277 pub fn resize(&mut self, new_size: usize) {
278 self.buffer()
279 .dbg_borrow_mut()
280 .resize(self.my_checkpoint + new_size, 0);
281 }
282
283 #[inline]
289 #[cfg_attr(debug_assertions, track_caller)]
290 pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
291 self.slice_range(offset..offset + size)
292 }
293
294 #[inline]
306 #[cfg_attr(debug_assertions, track_caller)]
307 pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
308 let buffer = self.buffer_ref();
309 Ref::map(buffer, |b| {
310 match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
311 Some(slice) => slice,
312 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
313 }
314 })
315 }
316
317 #[inline]
328 #[cfg_attr(debug_assertions, track_caller)]
329 pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
330 let buffer = self.buffer_ref();
331 Ref::map(buffer, |b| match b.get(range) {
332 Some(slice) => slice,
333 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
334 })
335 }
336
337 #[inline]
349 #[cfg_attr(debug_assertions, track_caller)]
350 pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
351 let buffer = self.buffer_ref_mut();
352 RefMut::map(buffer, |b| {
353 match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
354 Some(slice) => slice,
355 None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
356 }
357 })
358 }
359
360 #[inline]
366 pub fn get_byte(&self, offset: usize) -> u8 {
367 self.slice_len(offset, 1)[0]
368 }
369
370 #[inline]
376 pub fn get_word(&self, offset: usize) -> B256 {
377 (*self.slice_len(offset, 32)).try_into().unwrap()
378 }
379
380 #[inline]
386 pub fn get_u256(&self, offset: usize) -> U256 {
387 self.get_word(offset).into()
388 }
389
390 #[inline]
396 #[cfg_attr(debug_assertions, track_caller)]
397 pub fn set_byte(&mut self, offset: usize, byte: u8) {
398 self.set(offset, &[byte]);
399 }
400
401 #[inline]
407 #[cfg_attr(debug_assertions, track_caller)]
408 pub fn set_word(&mut self, offset: usize, value: &B256) {
409 self.set(offset, &value[..]);
410 }
411
412 #[inline]
418 #[cfg_attr(debug_assertions, track_caller)]
419 pub fn set_u256(&mut self, offset: usize, value: U256) {
420 self.set(offset, &value.to_be_bytes::<32>());
421 }
422
423 #[inline]
429 #[cfg_attr(debug_assertions, track_caller)]
430 pub fn set(&mut self, offset: usize, value: &[u8]) {
431 if !value.is_empty() {
432 self.slice_mut(offset, value.len()).copy_from_slice(value);
433 }
434 }
435
436 #[inline]
443 #[cfg_attr(debug_assertions, track_caller)]
444 pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
445 let mut dst = self.context_memory_mut();
446 unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
447 }
448
449 #[inline]
451 #[cfg_attr(debug_assertions, track_caller)]
452 pub fn global_to_local_set_data(
453 &mut self,
454 memory_offset: usize,
455 data_offset: usize,
456 len: usize,
457 data_range: Range<usize>,
458 ) {
459 let mut buffer = self.buffer_ref_mut();
460 let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
461 let src = if data_range.is_empty() {
462 &mut []
463 } else {
464 src.get_mut(data_range).unwrap()
465 };
466 unsafe { set_data(dst, src, memory_offset, data_offset, len) };
467 }
468
469 #[inline]
475 #[cfg_attr(debug_assertions, track_caller)]
476 pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
477 self.context_memory_mut().copy_within(src..src + len, dst);
478 }
479
480 #[inline]
491 pub fn context_memory(&self) -> Ref<'_, [u8]> {
492 let buffer = self.buffer_ref();
493 Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
494 Some(slice) => slice,
495 None => debug_unreachable!("Context memory should be always valid"),
496 })
497 }
498
499 #[inline]
510 pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
511 let buffer = self.buffer_ref_mut();
512 RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
513 Some(slice) => slice,
514 None => debug_unreachable!("Context memory should be always valid"),
515 })
516 }
517}
518
519unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
531 if len == 0 {
532 return;
533 }
534 if src_offset >= src.len() {
535 dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
537 return;
538 }
539 let src_end = min(src_offset + len, src.len());
540 let src_len = src_end - src_offset;
541 debug_assert!(src_offset < src.len() && src_end <= src.len());
542 let data = unsafe { src.get_unchecked(src_offset..src_end) };
543 unsafe {
544 dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
545 .copy_from_slice(data)
546 };
547
548 unsafe {
551 dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
552 .fill(0)
553 };
554}
555
556#[inline]
559pub const fn num_words(len: usize) -> usize {
560 len.div_ceil(32)
561}
562
563#[inline]
565pub fn resize_memory<Memory: MemoryTr>(
566 gas: &mut crate::Gas,
567 memory: &mut Memory,
568 gas_table: &GasParams,
569 offset: usize,
570 len: usize,
571) -> Result<(), InstructionResult> {
572 #[cfg(feature = "memory_limit")]
573 if memory.limit_reached(offset, len) {
574 return Err(InstructionResult::MemoryLimitOOG);
575 }
576
577 let new_num_words = num_words(offset.saturating_add(len));
578 if new_num_words > gas.memory().words_num {
579 return resize_memory_cold(gas, memory, gas_table, new_num_words);
580 }
581
582 Ok(())
583}
584
585#[cold]
586#[inline(never)]
587fn resize_memory_cold<Memory: MemoryTr>(
588 gas: &mut crate::Gas,
589 memory: &mut Memory,
590 gas_table: &GasParams,
591 new_num_words: usize,
592) -> Result<(), InstructionResult> {
593 let cost = gas_table.memory_cost(new_num_words);
594 let cost = unsafe {
595 gas.memory_mut()
596 .set_words_num(new_num_words, cost)
597 .unwrap_unchecked()
598 };
599
600 if !gas.record_cost(cost) {
601 return Err(InstructionResult::MemoryOOG);
602 }
603 memory.resize(new_num_words * 32);
604 Ok(())
605}
606
607#[cfg(test)]
608mod tests {
609 use super::*;
610
611 #[test]
612 fn test_num_words() {
613 assert_eq!(num_words(0), 0);
614 assert_eq!(num_words(1), 1);
615 assert_eq!(num_words(31), 1);
616 assert_eq!(num_words(32), 1);
617 assert_eq!(num_words(33), 2);
618 assert_eq!(num_words(63), 2);
619 assert_eq!(num_words(64), 2);
620 assert_eq!(num_words(65), 3);
621 assert_eq!(num_words(usize::MAX - 31), usize::MAX / 32);
622 assert_eq!(num_words(usize::MAX - 30), (usize::MAX / 32) + 1);
623 assert_eq!(num_words(usize::MAX), (usize::MAX / 32) + 1);
624 }
625
626 #[test]
627 fn new_free_child_context() {
628 let mut sm1 = SharedMemory::new();
629
630 assert_eq!(sm1.buffer_ref().len(), 0);
631 assert_eq!(sm1.my_checkpoint, 0);
632
633 unsafe { sm1.buffer_ref_mut().set_len(32) };
634 assert_eq!(sm1.len(), 32);
635 let mut sm2 = sm1.new_child_context();
636
637 assert_eq!(sm2.buffer_ref().len(), 32);
638 assert_eq!(sm2.my_checkpoint, 32);
639 assert_eq!(sm2.len(), 0);
640
641 unsafe { sm2.buffer_ref_mut().set_len(96) };
642 assert_eq!(sm2.len(), 64);
643 let mut sm3 = sm2.new_child_context();
644
645 assert_eq!(sm3.buffer_ref().len(), 96);
646 assert_eq!(sm3.my_checkpoint, 96);
647 assert_eq!(sm3.len(), 0);
648
649 unsafe { sm3.buffer_ref_mut().set_len(128) };
650 let sm4 = sm3.new_child_context();
651 assert_eq!(sm4.buffer_ref().len(), 128);
652 assert_eq!(sm4.my_checkpoint, 128);
653 assert_eq!(sm4.len(), 0);
654
655 drop(sm4);
657 sm3.free_child_context();
658 assert_eq!(sm3.buffer_ref().len(), 128);
659 assert_eq!(sm3.my_checkpoint, 96);
660 assert_eq!(sm3.len(), 32);
661
662 sm2.free_child_context();
663 assert_eq!(sm2.buffer_ref().len(), 96);
664 assert_eq!(sm2.my_checkpoint, 32);
665 assert_eq!(sm2.len(), 64);
666
667 sm1.free_child_context();
668 assert_eq!(sm1.buffer_ref().len(), 32);
669 assert_eq!(sm1.my_checkpoint, 0);
670 assert_eq!(sm1.len(), 32);
671 }
672
673 #[test]
674 fn resize() {
675 let mut sm1 = SharedMemory::new();
676 sm1.resize(32);
677 assert_eq!(sm1.buffer_ref().len(), 32);
678 assert_eq!(sm1.len(), 32);
679 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
680
681 let mut sm2 = sm1.new_child_context();
682 sm2.resize(96);
683 assert_eq!(sm2.buffer_ref().len(), 128);
684 assert_eq!(sm2.len(), 96);
685 assert_eq!(sm2.buffer_ref().get(32..128), Some(&[0_u8; 96] as &[u8]));
686
687 sm1.free_child_context();
688 assert_eq!(sm1.buffer_ref().len(), 32);
689 assert_eq!(sm1.len(), 32);
690 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
691 }
692}