slvm/vm/
storage.rs

1//! Vm code to access storage, heap, stack, globals, etc.
2
3use crate::heap::Error;
4use crate::{CallFrame, Chunk, Continuation, Handle, Heap, Interned, MutState, VMResult, Value};
5use std::sync::Arc;
6
7use crate::GVm;
8use crate::io::HeapIo;
9use crate::vm_hashmap::VMHashMap;
10
11pub struct CallStackIter<'vm, ENV> {
12    vm: &'vm GVm<ENV>,
13    current: usize,
14    last_current: usize,
15}
16
17impl<'vm, ENV> CallStackIter<'vm, ENV> {
18    pub fn new(vm: &'vm GVm<ENV>) -> Self {
19        CallStackIter {
20            vm,
21            current: vm.stack_top,
22            last_current: 1,
23        }
24    }
25}
26
27impl<'vm, ENV> Iterator for CallStackIter<'vm, ENV> {
28    type Item = &'vm CallFrame;
29
30    fn next(&mut self) -> Option<Self::Item> {
31        if let Some(frame) = self.vm.call_frame_idx(self.current) {
32            if self.last_current == 0 {
33                None
34            } else {
35                self.last_current = self.current;
36                self.current = frame.stack_top;
37                Some(frame)
38            }
39        } else {
40            None
41        }
42    }
43}
44
45impl<ENV> GVm<ENV> {
46    pub fn clear_err_frame(&mut self) {
47        self.err_frame = None;
48    }
49
50    pub fn err_frame(&self) -> &Option<CallFrame> {
51        &self.err_frame
52    }
53
54    pub fn get_registers(&self, start: usize, end: usize) -> &[Value] {
55        &self.stack_slice()[start..end]
56    }
57
58    pub fn get_current_registers(&self) -> &[Value] {
59        let start = self.stack_top;
60        let end = self.stack_max;
61        &self.stack_slice()[start..=end]
62    }
63
64    pub fn get_stack(&self, idx: usize) -> Value {
65        self.stack(idx)
66    }
67
68    pub fn stack_max(&self) -> usize {
69        self.stack_max
70    }
71
72    pub fn get_interned(&self, i: Interned) -> &'static str {
73        self.interner.get_string(i).expect("Invalid interned value")
74    }
75
76    pub fn intern_static(&mut self, string: &'static str) -> Interned {
77        self.interner.intern_static(string)
78    }
79
80    pub fn intern(&mut self, string: &str) -> Interned {
81        self.interner.intern(string)
82    }
83
84    pub fn get_if_interned(&self, string: &str) -> Option<Interned> {
85        self.interner.get_if_interned(string)
86    }
87
88    pub fn set_global(&mut self, slot: u32, value: Value) {
89        self.globals.set(slot, value);
90    }
91
92    pub fn reserve_global(&mut self) -> u32 {
93        self.globals.reserve()
94    }
95
96    pub fn get_call_stack(&self) -> CallStackIter<ENV> {
97        CallStackIter::new(self)
98    }
99
100    pub fn sizeof_heap_object() -> usize {
101        Heap::sizeof_object()
102    }
103
104    pub fn alloc_pair(&mut self, car: Value, cdr: Value) -> Value {
105        let mut heap = self.heap.take().expect("VM must have a Heap!");
106        let res = heap.alloc_pair(car, cdr, MutState::Mutable, |heap| self.mark_roots(heap));
107        self.heap = Some(heap);
108        res
109    }
110
111    pub fn alloc_pair_ro(&mut self, car: Value, cdr: Value) -> Value {
112        let mut heap = self.heap.take().expect("VM must have a Heap!");
113        let res = heap.alloc_pair(car, cdr, MutState::Immutable, |heap| self.mark_roots(heap));
114        self.heap = Some(heap);
115        res
116    }
117
118    pub fn alloc_string(&mut self, s: String) -> Value {
119        let mut heap = self.heap.take().expect("VM must have a Heap!");
120        let res = heap.alloc_string(s, MutState::Mutable, |heap| self.mark_roots(heap));
121        self.heap = Some(heap);
122        res
123    }
124
125    pub fn alloc_string_ro(&mut self, s: String) -> Value {
126        let mut heap = self.heap.take().expect("VM must have a Heap!");
127        let res = heap.alloc_string(s, MutState::Immutable, |heap| self.mark_roots(heap));
128        self.heap = Some(heap);
129        res
130    }
131
132    pub fn alloc_char(&mut self, ch: &str) -> Value {
133        if ch.len() < 7 {
134            let mut v: [u8; 6] = [0; 6];
135            for (i, c) in ch.bytes().enumerate() {
136                v[i] = c;
137            }
138            Value::CharCluster(ch.len() as u8, v)
139        } else if let Value::String(handle) = self.alloc_string_ro(ch.to_string()) {
140            Value::CharClusterLong(handle)
141        } else {
142            panic!("Invalid alloc_string!");
143        }
144    }
145
146    pub fn alloc_vector(&mut self, v: Vec<Value>) -> Value {
147        let mut heap = self.heap.take().expect("VM must have a Heap!");
148        let res = heap.alloc_vector(v, MutState::Mutable, |heap| self.mark_roots(heap));
149        self.heap = Some(heap);
150        res
151    }
152
153    pub fn alloc_vector_ro(&mut self, v: Vec<Value>) -> Value {
154        let mut heap = self.heap.take().expect("VM must have a Heap!");
155        let res = heap.alloc_vector(v, MutState::Immutable, |heap| self.mark_roots(heap));
156        self.heap = Some(heap);
157        res
158    }
159
160    pub fn alloc_map(&mut self, map: VMHashMap) -> Value {
161        let mut heap = self.heap.take().expect("VM must have a Heap!");
162        let res = heap.alloc_map(map, MutState::Mutable, |heap| self.mark_roots(heap));
163        self.heap = Some(heap);
164        res
165    }
166
167    pub fn alloc_map_ro(&mut self, map: VMHashMap) -> Value {
168        let mut heap = self.heap.take().expect("VM must have a Heap!");
169        let res = heap.alloc_map(map, MutState::Immutable, |heap| self.mark_roots(heap));
170        self.heap = Some(heap);
171        res
172    }
173
174    pub fn alloc_list_ro(&mut self, v: Vec<Value>) -> Value {
175        let mut heap = self.heap.take().expect("VM must have a Heap!");
176        let res = Value::List(
177            heap.alloc_vector(v, MutState::Immutable, |heap| self.mark_roots(heap))
178                .get_handle()
179                .expect("Allocated vector not a vector?"),
180            0,
181        );
182        self.heap = Some(heap);
183        res
184    }
185
186    pub fn alloc_bytes(&mut self, v: Vec<u8>) -> Value {
187        let mut heap = self.heap.take().expect("VM must have a Heap!");
188        // alloc must not save mark_roots (it does not) since we broke heap away from self.
189        let res = heap.alloc_bytes(v, MutState::Mutable, |heap| self.mark_roots(heap));
190        self.heap = Some(heap);
191        res
192    }
193
194    pub fn alloc_lambda(&mut self, l: Arc<Chunk>) -> Value {
195        let mut heap = self.heap.take().expect("VM must have a Heap!");
196        // alloc must not save mark_roots (it does not) since we broke heap away from self.
197        let res = heap.alloc_lambda(l, |heap| self.mark_roots(heap));
198        self.heap = Some(heap);
199        res
200    }
201
202    pub fn alloc_closure(&mut self, l: Arc<Chunk>, v: Vec<Handle>) -> Value {
203        let mut heap = self.heap.take().expect("VM must have a Heap!");
204        // alloc must not save mark_roots (it does not) since we broke heap away from self.
205        let res = heap.alloc_closure(l, v, |heap| self.mark_roots(heap));
206        self.heap = Some(heap);
207        res
208    }
209
210    pub fn alloc_continuation(&mut self, k: Continuation) -> Value {
211        let mut heap = self.heap.take().expect("VM must have a Heap!");
212        // alloc must not save mark_roots (it does not) since we broke heap away from self.
213        let res = heap.alloc_continuation(k, |heap| self.mark_roots(heap));
214        self.heap = Some(heap);
215        res
216    }
217
218    pub fn alloc_callframe(&mut self, frame: CallFrame) -> Value {
219        let mut heap = self.heap.take().expect("VM must have a Heap!");
220        // alloc must not save mark_roots (it does not) since we broke heap away from self.
221        let res = heap.alloc_callframe(frame, |heap| self.mark_roots(heap));
222        self.heap = Some(heap);
223        res
224    }
225
226    /// Allocate a Value on the heap.  Moving a value to the heap is useful for captured variable
227    /// for instance.
228    pub fn alloc_value(&mut self, val: Value) -> Value {
229        let mut heap = self.heap.take().expect("VM must have a Heap!");
230        let res = heap.alloc_value(val, MutState::Mutable, |heap| self.mark_roots(heap));
231        self.heap = Some(heap);
232        res
233    }
234
235    /// Allocate an Error on the heap.
236    pub fn alloc_error(&mut self, err: Error) -> Value {
237        let mut heap = self.heap.take().expect("VM must have a Heap!");
238        let res = heap.alloc_error(err, MutState::Mutable, |heap| self.mark_roots(heap));
239        self.heap = Some(heap);
240        res
241    }
242
243    /// Allocate a Value on the heap.  Moving a value to the heap is useful for captured variable
244    /// for instance.
245    pub fn alloc_io(&mut self, io: HeapIo) -> Value {
246        let mut heap = self.heap.take().expect("VM must have a Heap!");
247        let res = heap.alloc_io(io, MutState::Mutable, |heap| self.mark_roots(heap));
248        self.heap = Some(heap);
249        res
250    }
251
252    pub fn heap_immutable(&mut self, val: Value) {
253        self.heap_mut().immutable(val);
254    }
255
256    pub fn heap_sticky(&mut self, val: Value) {
257        self.heap_mut().sticky(val);
258    }
259
260    pub fn heap_unsticky(&mut self, val: Value) {
261        self.heap_mut().unsticky(val);
262    }
263
264    /// Pause garbage collection.
265    /// Each pause_gc must have an unpause_gc before GC resumes (it is a counter that must be 0).
266    pub fn pause_gc(&mut self) {
267        self.heap_mut().pause_gc();
268    }
269
270    /// UnPause garbage collection.
271    /// Each pause_gc must have an unpause_gc before GC resumes (it is a counter that must be 0).
272    pub fn unpause_gc(&mut self) {
273        self.heap_mut().unpause_gc();
274    }
275
276    pub fn get_heap_property(&self, key_val: Value, prop: &str) -> Option<Value> {
277        if let Some(interned) = self.get_if_interned(prop) {
278            self.heap().get_property(key_val, interned)
279        } else {
280            None
281        }
282    }
283
284    pub fn set_heap_property(&mut self, key_val: Value, prop: &str, value: Value) {
285        let str_ref = self.intern(prop);
286        self.heap_mut().set_property(key_val, str_ref, value)
287    }
288
289    pub fn get_heap_property_interned(&self, key_val: Value, prop: Interned) -> Option<Value> {
290        self.heap().get_property(key_val, prop)
291    }
292
293    pub fn set_heap_property_interned(&mut self, key_val: Value, prop: Interned, value: Value) {
294        self.heap_mut().set_property(key_val, prop, value)
295    }
296
297    pub fn get_global_property(&self, global: u32, prop: Interned) -> Option<Value> {
298        self.globals.get_property(global, prop)
299    }
300
301    pub fn set_global_property(&mut self, global: u32, prop: Interned, value: Value) {
302        self.globals.set_property(global, prop, value)
303    }
304
305    pub fn get_global(&self, idx: u32) -> Value {
306        self.globals.get(idx)
307    }
308
309    pub fn get_string(&self, handle: Handle) -> &str {
310        self.heap().get_string(handle)
311    }
312
313    pub fn get_string_mut(&mut self, handle: Handle) -> VMResult<&mut String> {
314        self.heap_mut().get_string_mut(handle)
315    }
316
317    pub fn get_vector(&self, handle: Handle) -> &[Value] {
318        self.heap().get_vector(handle)
319    }
320
321    pub fn get_vector_mut(&mut self, handle: Handle) -> VMResult<&mut Vec<Value>> {
322        self.heap_mut().get_vector_mut(handle)
323    }
324
325    pub fn get_map(&self, handle: Handle) -> &VMHashMap {
326        self.heap().get_map(handle)
327    }
328
329    pub fn get_map_mut(&mut self, handle: Handle) -> VMResult<&mut VMHashMap> {
330        self.heap_mut().get_map_mut(handle)
331    }
332
333    pub fn get_bytes(&self, handle: Handle) -> &[u8] {
334        self.heap().get_bytes(handle)
335    }
336
337    pub fn get_pair(&self, handle: Handle) -> (Value, Value) {
338        self.heap().get_pair(handle)
339    }
340
341    pub fn get_pair_mut(&mut self, handle: Handle) -> VMResult<(&mut Value, &mut Value)> {
342        self.heap_mut().get_pair_mut(handle)
343    }
344
345    pub fn get_pair_mut_override(&mut self, handle: Handle) -> (&mut Value, &mut Value) {
346        self.heap_mut().get_pair_mut_override(handle)
347    }
348
349    pub fn get_lambda(&self, handle: Handle) -> Arc<Chunk> {
350        self.heap().get_lambda(handle)
351    }
352
353    pub fn get_closure(&self, handle: Handle) -> (Arc<Chunk>, &[Handle]) {
354        self.heap().get_closure(handle)
355    }
356
357    pub fn get_continuation(&self, handle: Handle) -> &Continuation {
358        self.heap().get_continuation(handle)
359    }
360
361    pub fn get_callframe(&self, handle: Handle) -> &CallFrame {
362        self.heap().get_callframe(handle)
363    }
364
365    pub fn get_value(&self, handle: Handle) -> Value {
366        self.heap().get_value(handle)
367    }
368
369    pub fn get_value_mut(&mut self, handle: Handle) -> &mut Value {
370        self.heap_mut().get_value_mut(handle)
371    }
372
373    pub fn get_error(&self, handle: Handle) -> Error {
374        self.heap().get_error(handle)
375    }
376
377    pub fn get_io(&self, handle: Handle) -> &HeapIo {
378        self.heap().get_io(handle)
379    }
380
381    pub fn new_upval(&mut self, val: Value) -> Value {
382        self.alloc_value(val)
383    }
384
385    pub fn call_frame(&self) -> Option<&CallFrame> {
386        self.call_frame_idx(self.stack_top)
387    }
388
389    pub fn make_err(&mut self, key: &'static str, data: Value) -> Value {
390        let keyword = self.intern_static(key);
391        let err = Error { keyword, data };
392        self.alloc_error(err)
393    }
394
395    pub(super) fn call_frame_idx(&self, idx: usize) -> Option<&CallFrame> {
396        match self.stack(idx) {
397            Value::CallFrame(handle) => Some(self.get_callframe(handle)),
398            _ => None,
399        }
400    }
401
402    pub(super) fn copy_frame_defers(&mut self) {
403        if let Some(frame) = self.call_frame() {
404            // Need to break the call frame lifetime from self to avoid extra work (allocations).
405            // This should safe because the stack and heap are not touched so the reference is
406            // stable.  The unwrap() is OK because the frame can not be NULL.
407            let frame: &CallFrame = unsafe { (frame as *const CallFrame).as_ref().unwrap() };
408            self.defers.resize(frame.defers.len(), Value::Undefined);
409            // Generally self.defers will be empty but if not don't loose them!
410            self.defers.copy_from_slice(&frame.defers[..]);
411        }
412    }
413
414    fn mark_roots(&mut self, heap: &mut Heap) -> VMResult<()> {
415        self.globals.mark(heap);
416        // TODO- add a bound to ENV so we can call a mark_roots?  I think we need this for the
417        // temporarily held doc_string for instance but also generally useful?
418        for i in 0..self.stack_max {
419            heap.mark(self.stack(i));
420        }
421        if let Some(this_fn) = self.this_fn {
422            heap.mark(this_fn);
423        }
424        if let Some(on_error) = self.on_error {
425            heap.mark(on_error);
426        }
427        // TODO: XXX do we need this?  Probably but maybe not.
428        if let Some(err_frame) = &self.err_frame {
429            heap.mark_call_frame(err_frame);
430        }
431        for defer in &self.defers {
432            heap.mark(*defer);
433        }
434        Ok(())
435    }
436}