1use std::sync::Arc;
4
5use crate::{CallFrame, Chunk, Continuation, GVm, VMError, VMResult, Value, mov_register};
6
7impl<ENV> GVm<ENV> {
8 pub(crate) fn setup_rest(
10 &mut self,
11 chunk: &Arc<Chunk>,
12 first_reg: u16,
13 num_args: u16,
14 ) -> (usize, Value) {
15 let rest_reg = first_reg + chunk.args + chunk.opt_args;
16 let v = if num_args < (chunk.args + chunk.opt_args) {
17 Value::Nil
18 } else {
19 let rest_len = (num_args - (chunk.args + chunk.opt_args)) as usize + 1;
20 let mut r = vec![Value::Undefined; rest_len];
21 r.copy_from_slice(
22 &self.register_slice()[rest_reg as usize..(rest_reg as usize + rest_len)],
23 );
24 self.alloc_list_ro(r)
25 };
26 (rest_reg.into(), v)
27 }
28
29 fn k_unshared_stack(&self, stack_top: usize, k: &Continuation) -> Option<(usize, &Vec<Value>)> {
30 if !k.stack.is_empty() {
31 if k.frame.stack_top >= stack_top {
32 if let Value::CallFrame(h) = self.stack(stack_top) {
33 let frame = self.heap().get_callframe(h);
34 if let Value::CallFrame(k_h) = k.stack[stack_top] {
35 let k_frame = self.heap().get_callframe(k_h);
36 if frame.id != k_frame.id {
37 return Some((frame.stack_top, &frame.defers));
38 }
39 } else {
40 return Some((frame.stack_top, &frame.defers));
41 }
42 }
43 } else if let Value::CallFrame(h) = self.stack(stack_top) {
44 let frame = self.heap().get_callframe(h);
45 return if k.frame.id == frame.id {
48 None
49 } else {
50 Some((frame.stack_top, &frame.defers))
51 };
52 }
53 }
54 None
55 }
56
57 fn k_defers(&self, k: &Continuation) -> Option<usize> {
58 if !self.defers.is_empty() {
59 return None;
60 }
61 let mut stack_top = if let Some(k_stack_top) = self.k_stack_top {
62 k_stack_top
63 } else {
64 self.stack_top
65 };
66 while let Some((next_stack_top, defers)) = self.k_unshared_stack(stack_top, k) {
67 if stack_top == next_stack_top {
68 break;
69 }
70 if !defers.is_empty() {
71 return Some(stack_top);
72 }
73 stack_top = next_stack_top;
74 }
75 None
76 }
77
78 pub(crate) fn make_call_frame(
80 &mut self,
81 chunk: Arc<Chunk>,
82 called: Value,
83 with_defers: bool,
84 ) -> CallFrame {
85 let defers = if with_defers {
86 std::mem::take(&mut self.defers)
87 } else {
88 Vec::new()
89 };
90 let frame = CallFrame {
91 id: self.callframe_id,
92 chunk,
93 ip: self.ip_ptr,
94 current_ip: self.current_ip_ptr,
95 stack_top: self.stack_top,
96 this_fn: self.this_fn,
97 defers,
98 on_error: self.on_error,
99 called,
100 };
101 self.callframe_id += 1;
102 frame
103 }
104
105 fn finish_special_call(
106 &mut self,
107 chunk: Arc<Chunk>,
108 tail_call: bool,
109 first_reg: u16,
110 res: Value,
111 ) -> Arc<Chunk> {
112 let res_reg = self.stack_top + first_reg as usize;
113 if tail_call {
114 if let Some(frame) = self.call_frame() {
116 let stack_top = frame.stack_top;
117 let ip_ptr = frame.ip;
118 let current_ip = frame.current_ip;
119 let this_fn = frame.this_fn;
120 let on_error = frame.on_error;
121 let new_chunk = frame.chunk.clone();
122 self.copy_frame_defers(); self.stack_top = stack_top;
124 self.stack_max = self.stack_top + new_chunk.input_regs + new_chunk.extra_regs;
125 self.ip_ptr = ip_ptr;
126 self.current_ip_ptr = current_ip;
127 self.this_fn = this_fn;
128 self.on_error = on_error;
129 *self.stack_mut(res_reg) = res;
130 new_chunk
131 } else {
132 *self.stack_mut(res_reg) = res;
133 chunk
134 }
135 } else {
136 *self.stack_mut(res_reg) = res;
137 chunk
138 }
139 }
140 pub fn make_call(
142 &mut self,
143 lambda: Value,
144 chunk: Arc<Chunk>,
145 first_reg: u16,
146 num_args: u16,
147 tail_call: bool,
148 ) -> Result<Arc<Chunk>, (VMError, Arc<Chunk>)> {
149 let mut do_cont = false;
150 let result = match lambda {
151 Value::Builtin(f_idx) => {
152 let last_reg = (first_reg + num_args + 1) as usize;
153 let f = &self.builtins[f_idx as usize];
154 let regs = self.register_slice();
155
156 let res =
157 (f.func)(self, ®s[(first_reg + 1) as usize..last_reg]).map_err(|e| {
158 if self.err_frame().is_some() {
159 let frame = self.make_call_frame(chunk.clone(), lambda, false);
162 self.pause_gc(); let call_frame = self.alloc_callframe(frame);
164 self.unpause_gc();
165 mov_register!(self, first_reg as usize, call_frame);
166 self.stack_top += first_reg as usize;
167 }
168 (e, chunk.clone())
169 })?;
170 Ok(self.finish_special_call(chunk, tail_call, first_reg, res))
171 }
172 Value::Lambda(handle) => {
173 let l = self.heap().get_lambda(handle);
174 check_num_args(&l, num_args).map_err(|e| (e, chunk.clone()))?;
175 if l.rest {
176 let (rest_reg, h) = self.setup_rest(&l, first_reg, num_args);
177 *self.stack_mut(self.stack_top + rest_reg) = h;
178 }
179 if !tail_call {
180 let frame = self.make_call_frame(chunk, lambda, true);
181 self.pause_gc(); let aframe = self.alloc_callframe(frame);
183 self.unpause_gc();
184 mov_register!(self, first_reg as usize, aframe);
185 self.stack_top += first_reg as usize;
186 }
187 self.stack_max = self.stack_top + l.input_regs + l.extra_regs;
188 self.this_fn = Some(lambda);
189 self.ip_ptr = get_code!(l);
190 self.clear_opts(&l, first_reg, num_args);
192 Ok(l)
193 }
194 Value::Closure(handle) => {
195 let stack_top = self.stack_top;
196 let (l, _) = self.heap().get_closure(handle);
197 check_num_args(&l, num_args).map_err(|e| (e, chunk.clone()))?;
198 let cap_first = if l.rest {
199 let (rest_reg, h) = self.setup_rest(&l, first_reg, num_args);
200 *self.stack_mut(self.stack_top + rest_reg) = h;
201 rest_reg + 1
202 } else {
203 (first_reg + l.args + l.opt_args + 1) as usize
204 };
205
206 let heap = self.heap.take().expect("VM must have a Heap!");
208 let caps = heap.get_closure_captures(handle);
209 for (i, c) in caps.iter().enumerate() {
210 *self.stack_mut(self.stack_top + cap_first + i) = Value::Value(*c);
211 }
212 self.heap = Some(heap);
214
215 let frame = if !tail_call {
216 let frame = self.make_call_frame(chunk, lambda, true);
217 self.stack_top += first_reg as usize;
218 Some(frame)
219 } else {
220 assert_eq!(first_reg, 0);
221 None
222 };
223 self.stack_max = self.stack_top + l.input_regs + l.extra_regs;
224 self.this_fn = Some(lambda);
225 self.ip_ptr = get_code!(l);
226 if let Some(frame) = frame {
227 self.pause_gc(); let aframe = self.alloc_callframe(frame);
229 self.unpause_gc();
230 *self.stack_mut(stack_top + first_reg as usize) = aframe;
231 }
232 self.clear_opts(&l, first_reg, num_args);
233 Ok(l)
234 }
235 Value::Continuation(handle) => {
236 let k = self.heap().get_continuation(handle);
237 if num_args != 1 {
238 return Err((VMError::new_vm("Continuation takes one argument."), chunk));
239 }
240 let from = self.k_defers(k);
241 if let Some(from) = from {
242 let frame = self.call_frame_idx(from).expect("Invalid frame index!");
243 let frame: &CallFrame =
247 unsafe { (frame as *const CallFrame).as_ref().unwrap() };
248 self.defers.resize(frame.defers.len(), Value::Undefined);
249 self.defers.copy_from_slice(&frame.defers[..]);
250 self.k_stack_top = Some(frame.stack_top);
251 }
252 if let Some(defer) = self.defers.pop() {
253 let first_reg = (chunk.input_regs + chunk.extra_regs + 1) as u16;
254 self.ip_ptr = self.current_ip_ptr;
255 self.make_call(defer, chunk, first_reg, 0, false)
256 } else {
257 self.k_stack_top = None;
258 do_cont = true;
259 Ok(chunk)
260 }
261 }
262 Value::Map(handle) => {
263 let res = self
264 .call_map(handle, first_reg, num_args)
265 .map_err(|e| (e, chunk.clone()))?;
266 Ok(self.finish_special_call(chunk, tail_call, first_reg, res))
267 }
268 Value::Vector(handle) => {
269 let res = self
270 .call_vector(handle, first_reg, num_args)
271 .map_err(|e| (e, chunk.clone()))?;
272 Ok(self.finish_special_call(chunk, tail_call, first_reg, res))
273 }
274 Value::Pair(_) | Value::List(_, _) => {
275 let res = self
276 .call_list(lambda, first_reg, num_args)
277 .map_err(|e| (e, chunk.clone()))?;
278 Ok(self.finish_special_call(chunk, tail_call, first_reg, res))
279 }
280 Value::Value(handle) => {
281 self.make_call(
283 self.get_value(handle),
284 chunk,
285 first_reg,
286 num_args,
287 tail_call,
288 )
289 }
290 _ => Err((
291 VMError::new_vm(format!("CALL: Not a callable {lambda:?}.")),
292 chunk,
293 )),
294 };
295 if do_cont {
296 match lambda {
298 Value::Continuation(h) => {
299 let heap = self.heap.take().expect("VM must have a Heap!");
301 let k = heap.get_continuation(h);
302 let arg = self.register(first_reg as usize + 1);
303
304 self.defers.resize(k.frame.defers.len(), Value::Undefined);
305 self.defers.copy_from_slice(&k.frame.defers[..]);
306
307 self.stack_slice_mut()[..k.stack.len()].copy_from_slice(&k.stack[..]);
308 *self.stack_mut(k.arg_reg) = arg;
309 self.stack_top = k.frame.stack_top;
310 self.stack_max =
311 self.stack_top + k.frame.chunk.input_regs + k.frame.chunk.extra_regs;
312 self.ip_ptr = k.frame.ip;
313 self.current_ip_ptr = k.frame.current_ip;
314 self.this_fn = k.frame.this_fn;
315 self.on_error = k.frame.on_error;
316 let chunk = k.frame.chunk.clone();
317 self.heap = Some(heap);
319 Ok(chunk)
320 }
321 _ => panic!("Must be a continuation!"),
322 }
323 } else {
324 result
325 }
326 }
327
328 fn clear_opts(&mut self, l: &Chunk, first_reg: u16, num_args: u16) {
331 let num_args = if l.rest && num_args == 0 {
333 1
335 } else {
336 num_args
337 };
338 let end_arg = if l.rest {
339 l.args + l.opt_args - 1
341 } else {
342 l.args + l.opt_args
343 };
344 if num_args < end_arg {
345 for r in num_args..end_arg {
346 mov_register!(
347 self,
348 first_reg as usize + (r + 1) as usize,
349 Value::Undefined
350 );
351 }
352 }
353 if l.extra_regs > 0 {
355 for r in l.input_regs..=l.input_regs + l.extra_regs {
356 mov_register!(self, first_reg as usize + r, Value::Undefined);
357 }
358 }
359 }
360}
361
362fn check_num_args(l: &Chunk, num_args: u16) -> VMResult<()> {
364 if l.rest {
365 if num_args < (l.args - 1) {
366 return Err(VMError::new_vm(format!(
367 "To few arguments, expected at least {} got {}.",
368 l.args - 1,
369 num_args
370 )));
371 }
372 } else {
373 if num_args < l.args {
374 return Err(VMError::new_vm(format!(
375 "To few arguments, expected at least {} got {}.",
376 l.args, num_args
377 )));
378 }
379 if num_args > (l.args + l.opt_args) {
380 return Err(VMError::new_vm(format!(
381 "To many arguments, expected no more than {} got {}.",
382 (l.args + l.opt_args),
383 num_args
384 )));
385 }
386 }
387 Ok(())
388}