slvm/heap/
storage.rs

1use crate::bits::{
2    FLAG_MARK, FLAG_MUT, FLAG_STICKY, FLAG_TRACED, is_live, is_marked, is_mutable, is_traced,
3};
4use crate::{clear_bit, is_bit_set, set_bit};
5
6#[derive(Debug)]
7pub(super) struct Storage<T: Clone> {
8    flags: Vec<u8>,
9    vals: Vec<T>,
10    capacity: usize,
11    live_objects: usize,
12    sticky_objects: usize,
13    grow_factor: f64,
14}
15
16impl<T: Clone> Storage<T> {
17    pub fn with_capacity(capacity: usize) -> Self {
18        Self {
19            flags: Vec::with_capacity(capacity),
20            // Keep one extra slot to do sway on replace.
21            vals: Vec::with_capacity(capacity + 1),
22            capacity,
23            live_objects: 0,
24            sticky_objects: 0,
25            grow_factor: 2.0,
26        }
27    }
28
29    pub fn get(&self, idx: usize) -> Option<&T> {
30        self.vals.get(idx)
31    }
32
33    pub fn get_mut(&mut self, idx: usize) -> Option<&mut T> {
34        self.vals.get_mut(idx)
35    }
36
37    pub fn set_grow_factor(&mut self, grow_factor: f64) {
38        self.grow_factor = grow_factor;
39    }
40
41    pub fn capacity(&self) -> usize {
42        self.capacity
43    }
44
45    pub fn live_objects(&self) -> usize {
46        self.live_objects
47    }
48
49    pub fn alloc(&mut self, obj: T, flags: u8) -> u32 {
50        if self.live_objects >= self.capacity {
51            let new_min = (self.live_objects as f64 * self.grow_factor) as usize;
52            if new_min > self.capacity {
53                self.capacity = new_min;
54                self.flags.reserve(new_min - self.flags.len());
55                self.vals.reserve((new_min - self.vals.len()) + 1);
56            }
57        }
58        if self.vals.len() < self.capacity {
59            let idx = self.vals.len();
60            self.vals.push(obj);
61            self.flags.push(flags | FLAG_MARK);
62            self.live_objects += 1;
63            idx as u32
64        } else {
65            for (idx, flag) in self.flags.iter_mut().enumerate() {
66                if !is_live(*flag) {
67                    self.live_objects += 1;
68                    *flag = flags | FLAG_MARK;
69                    self.vals.push(obj);
70                    self.vals.swap_remove(idx);
71                    return idx as u32;
72                }
73            }
74            panic!("Failed to allocate to heap- no free objects and no capacity!");
75        }
76    }
77
78    pub fn clear_marks(&mut self) {
79        self.live_objects = 0;
80        self.live_objects = 0;
81        for flag in self.flags.iter_mut() {
82            clear_bit!(*flag, FLAG_MARK);
83            clear_bit!(*flag, FLAG_TRACED);
84            // if it is sticky mark it
85            if is_bit_set!(*flag, FLAG_STICKY) {
86                self.live_objects += 1;
87                set_bit!(*flag, FLAG_MARK);
88            }
89        }
90    }
91
92    /// Is the object at index still alive after GC.
93    pub fn is_live(&self, idx: usize) -> bool {
94        if let Some(flag) = self.flags.get(idx) {
95            is_live(*flag)
96        } else {
97            false
98        }
99    }
100
101    /// Is the object at index mutable.
102    pub fn is_mutable(&self, idx: usize) -> bool {
103        if let Some(flag) = self.flags.get(idx) {
104            is_mutable(*flag)
105        } else {
106            false
107        }
108    }
109
110    /// Mark the object at index immutable.
111    pub fn immutable(&mut self, idx: usize) {
112        if let Some(flag) = self.flags.get_mut(idx) {
113            clear_bit!(*flag, FLAG_MUT);
114        } else {
115            panic!("Invalid object handle in immutable!")
116        }
117    }
118
119    pub fn mark(&mut self, idx: usize) {
120        if let Some(flag) = self.flags.get_mut(idx) {
121            if !is_marked(*flag) {
122                self.live_objects += 1;
123                set_bit!(*flag, FLAG_MARK);
124            }
125        } else {
126            panic!("Invalid object handle in mark!")
127        }
128    }
129
130    pub fn is_traced_and_set(&mut self, idx: usize) -> bool {
131        if let Some(flag) = self.flags.get_mut(idx) {
132            let ret = is_traced(*flag);
133            set_bit!(*flag, FLAG_TRACED);
134            ret
135        } else {
136            panic!("Invalid object handle in traced!")
137        }
138    }
139
140    pub fn sticky(&mut self, idx: usize) {
141        if let Some(flag) = self.flags.get_mut(idx) {
142            if !is_bit_set!(*flag, FLAG_STICKY) {
143                self.sticky_objects += 1;
144                set_bit!(*flag, FLAG_STICKY);
145            }
146        } else {
147            panic!("Invalid object handle in sticky!")
148        }
149    }
150
151    pub fn unsticky(&mut self, idx: usize) {
152        if let Some(flag) = self.flags.get_mut(idx) {
153            if is_bit_set!(*flag, FLAG_STICKY) {
154                self.sticky_objects -= 1;
155                clear_bit!(*flag, FLAG_STICKY);
156            }
157        } else {
158            panic!("Invalid object handle in unsticky!")
159        }
160    }
161
162    /// For any dead, live bit not set, objects in heap set them to val.
163    pub fn set_all_dead(&mut self, val: T) {
164        for (cur, flag) in self.flags.iter().enumerate() {
165            if !is_live(*flag) {
166                self.vals.push(val.clone());
167                self.vals.swap_remove(cur);
168            }
169        }
170    }
171
172    pub fn trace_all_live<FN: FnMut(&T)>(&mut self, mut trace: FN) {
173        for (flag, value) in self.flags.iter_mut().zip(self.vals.iter()) {
174            if is_live(*flag) {
175                set_bit!(*flag, FLAG_TRACED);
176                trace(value);
177            }
178        }
179    }
180}
181
182impl<T: Clone> Default for Storage<T> {
183    fn default() -> Self {
184        Self::with_capacity(512)
185    }
186}