snix_eval/
observer.rs

1//! Implements traits for things that wish to observe internal state
2//! changes of snix-eval.
3//!
4//! This can be used to gain insights from compilation, to trace the
5//! runtime, and so on.
6//!
7//! All methods are optional, that is, observers can implement only
8/// what they are interested in observing.
9use std::io::Write;
10use std::rc::Rc;
11use std::time::Instant;
12use tabwriter::TabWriter;
13
14use crate::SourceCode;
15use crate::Value;
16use crate::chunk::Chunk;
17use crate::generators::VMRequest;
18use crate::opcode::{CodeIdx, Op};
19use crate::value::Lambda;
20
21/// Implemented by types that wish to observe internal happenings of
22/// the Snix compiler.
23pub trait CompilerObserver {
24    /// Called when the compiler finishes compilation of the top-level
25    /// of an expression (usually the root Nix expression of a file).
26    fn observe_compiled_toplevel(&mut self, _: &Rc<Lambda>) {}
27
28    /// Called when the compiler finishes compilation of a
29    /// user-defined function.
30    ///
31    /// Note that in Nix there are only single argument functions, so
32    /// in an expression like `a: b: c: ...` this method will be
33    /// called three times.
34    fn observe_compiled_lambda(&mut self, _: &Rc<Lambda>) {}
35
36    /// Called when the compiler finishes compilation of a thunk.
37    fn observe_compiled_thunk(&mut self, _: &Rc<Lambda>) {}
38}
39
40/// Implemented by types that wish to observe internal happenings of
41/// the Snix virtual machine at runtime.
42pub trait RuntimeObserver {
43    /// Called when the runtime enters a new call frame.
44    fn observe_enter_call_frame(&mut self, _arg_count: usize, _: &Rc<Lambda>, _call_depth: usize) {}
45
46    /// Called when the runtime exits a call frame.
47    fn observe_exit_call_frame(&mut self, _frame_at: usize, _stack: &[Value]) {}
48
49    /// Called when the runtime suspends a call frame.
50    fn observe_suspend_call_frame(&mut self, _frame_at: usize, _stack: &[Value]) {}
51
52    /// Called when the runtime enters a generator frame.
53    fn observe_enter_generator(&mut self, _frame_at: usize, _name: &str, _stack: &[Value]) {}
54
55    /// Called when the runtime exits a generator frame.
56    fn observe_exit_generator(&mut self, _frame_at: usize, _name: &str, _stack: &[Value]) {}
57
58    /// Called when the runtime suspends a generator frame.
59    fn observe_suspend_generator(&mut self, _frame_at: usize, _name: &str, _stack: &[Value]) {}
60
61    /// Called when a generator requests an action from the VM.
62    fn observe_generator_request(&mut self, _name: &str, _msg: &VMRequest) {}
63
64    /// Called when the runtime replaces the current call frame for a
65    /// tail call.
66    fn observe_tail_call(&mut self, _frame_at: usize, _: &Rc<Lambda>) {}
67
68    /// Called when the runtime enters a builtin.
69    fn observe_enter_builtin(&mut self, _name: &'static str) {}
70
71    /// Called when the runtime exits a builtin.
72    fn observe_exit_builtin(&mut self, _name: &'static str, _stack: &[Value]) {}
73
74    /// Called when the runtime *begins* executing an instruction. The
75    /// provided stack is the state at the beginning of the operation.
76    fn observe_execute_op(&mut self, _ip: CodeIdx, _: &Op, _: &[Value]) {}
77}
78
79#[derive(Default)]
80pub struct NoOpObserver {}
81
82impl CompilerObserver for NoOpObserver {}
83impl RuntimeObserver for NoOpObserver {}
84
85/// Compiler observer that is optimised for the case where no observer is being used.
86///
87/// The trait RuntimeObserver is implemented on the Optional<dyn
88/// RuntimeObserver>. This removes the dynamic dispatch overhead when
89/// no observer is being used.
90#[derive(Default)]
91pub struct OptionalCompilerObserver<'o>(pub Option<&'o mut dyn CompilerObserver>);
92impl<'o> CompilerObserver for OptionalCompilerObserver<'o> {
93    fn observe_compiled_toplevel(&mut self, lambda: &Rc<Lambda>) {
94        if let Some(ref mut obs) = self.0 {
95            obs.observe_compiled_toplevel(lambda);
96        }
97    }
98
99    fn observe_compiled_lambda(&mut self, lambda: &Rc<Lambda>) {
100        if let Some(ref mut obs) = self.0 {
101            obs.observe_compiled_lambda(lambda);
102        }
103    }
104
105    fn observe_compiled_thunk(&mut self, lambda: &Rc<Lambda>) {
106        if let Some(ref mut obs) = self.0 {
107            obs.observe_compiled_thunk(lambda)
108        }
109    }
110}
111
112impl<'o> From<&'o mut dyn CompilerObserver> for OptionalCompilerObserver<'o> {
113    fn from(val: &'o mut dyn CompilerObserver) -> Self {
114        OptionalCompilerObserver(Some(val))
115    }
116}
117
118impl<'o> From<Option<&'o mut dyn CompilerObserver>> for OptionalCompilerObserver<'o> {
119    fn from(val: Option<&'o mut dyn CompilerObserver>) -> Self {
120        Self(val)
121    }
122}
123
124/// Runtime observer that is optimised for the case where no observer is being used.
125///
126/// The trait RuntimeObserver is implemented on the Optional<dyn
127/// RuntimeObserver>. This removes the dynamic dispatch overhead when
128/// no observer is being used.
129pub struct OptionalRuntimeObserver<'o>(pub Option<&'o mut dyn RuntimeObserver>);
130
131impl<'o> From<&'o mut dyn RuntimeObserver> for OptionalRuntimeObserver<'o> {
132    fn from(val: &'o mut dyn RuntimeObserver) -> Self {
133        OptionalRuntimeObserver(Some(val))
134    }
135}
136
137impl<'o> RuntimeObserver for OptionalRuntimeObserver<'o> {
138    #[inline(always)]
139    fn observe_enter_call_frame(
140        &mut self,
141        arg_count: usize,
142        lambda: &Rc<Lambda>,
143        call_depth: usize,
144    ) {
145        if let Some(ref mut obs) = self.0 {
146            obs.observe_enter_call_frame(arg_count, lambda, call_depth);
147        }
148    }
149
150    #[inline(always)]
151    fn observe_exit_call_frame(&mut self, frame_at: usize, stack: &[Value]) {
152        if let Some(ref mut obs) = self.0 {
153            obs.observe_exit_call_frame(frame_at, stack);
154        }
155    }
156
157    #[inline(always)]
158    fn observe_suspend_call_frame(&mut self, frame_at: usize, stack: &[Value]) {
159        if let Some(ref mut obs) = self.0 {
160            obs.observe_suspend_call_frame(frame_at, stack);
161        }
162    }
163
164    #[inline(always)]
165    fn observe_enter_generator(&mut self, frame_at: usize, name: &str, stack: &[Value]) {
166        if let Some(ref mut obs) = self.0 {
167            obs.observe_enter_generator(frame_at, name, stack);
168        }
169    }
170
171    #[inline(always)]
172    fn observe_exit_generator(&mut self, frame_at: usize, name: &str, stack: &[Value]) {
173        if let Some(ref mut obs) = self.0 {
174            obs.observe_exit_generator(frame_at, name, stack);
175        }
176    }
177
178    #[inline(always)]
179    fn observe_suspend_generator(&mut self, frame_at: usize, name: &str, stack: &[Value]) {
180        if let Some(ref mut obs) = self.0 {
181            obs.observe_suspend_generator(frame_at, name, stack);
182        }
183    }
184
185    #[inline(always)]
186    fn observe_generator_request(&mut self, name: &str, msg: &VMRequest) {
187        if let Some(ref mut obs) = self.0 {
188            obs.observe_generator_request(name, msg);
189        }
190    }
191
192    #[inline(always)]
193    fn observe_tail_call(&mut self, frame_at: usize, lambda: &Rc<Lambda>) {
194        if let Some(ref mut obs) = self.0 {
195            obs.observe_tail_call(frame_at, lambda);
196        }
197    }
198
199    #[inline(always)]
200    fn observe_enter_builtin(&mut self, name: &'static str) {
201        if let Some(ref mut obs) = self.0 {
202            obs.observe_enter_builtin(name);
203        }
204    }
205
206    #[inline(always)]
207    fn observe_exit_builtin(&mut self, name: &'static str, stack: &[Value]) {
208        if let Some(ref mut obs) = self.0 {
209            obs.observe_exit_builtin(name, stack);
210        }
211    }
212
213    #[inline(always)]
214    fn observe_execute_op(&mut self, ip: CodeIdx, op: &Op, stack: &[Value]) {
215        if let Some(ref mut obs) = self.0 {
216            obs.observe_execute_op(ip, op, stack);
217        }
218    }
219}
220
221/// An observer that prints disassembled chunk information to its
222/// internal writer whenwever the compiler emits a toplevel function,
223/// closure or thunk.
224pub struct DisassemblingObserver<W: Write> {
225    source: SourceCode,
226    writer: TabWriter<W>,
227}
228
229impl<W: Write> DisassemblingObserver<W> {
230    pub fn new(source: SourceCode, writer: W) -> Self {
231        Self {
232            source,
233            writer: TabWriter::new(writer),
234        }
235    }
236
237    fn lambda_header(&mut self, kind: &str, lambda: &Rc<Lambda>) {
238        let _ = writeln!(
239            &mut self.writer,
240            "=== compiled {} @ {:p} ({} ops, {} length) ===",
241            kind,
242            *lambda,
243            lambda.chunk.op_count(),
244            lambda.chunk.code.len(),
245        );
246    }
247
248    fn disassemble_chunk(&mut self, chunk: &Chunk) {
249        // calculate width of the widest address in the chunk
250        let width = format!("{:#x}", chunk.code.len() - 1).len();
251
252        let mut idx = 0;
253        while idx < chunk.code.len() {
254            let size = chunk
255                .disassemble_op(&mut self.writer, &self.source, width, CodeIdx(idx))
256                .expect("writing debug output should work");
257            idx += size;
258        }
259    }
260}
261
262impl<W: Write> CompilerObserver for DisassemblingObserver<W> {
263    fn observe_compiled_toplevel(&mut self, lambda: &Rc<Lambda>) {
264        self.lambda_header("toplevel", lambda);
265        self.disassemble_chunk(&lambda.chunk);
266        let _ = self.writer.flush();
267    }
268
269    fn observe_compiled_lambda(&mut self, lambda: &Rc<Lambda>) {
270        self.lambda_header("lambda", lambda);
271        self.disassemble_chunk(&lambda.chunk);
272        let _ = self.writer.flush();
273    }
274
275    fn observe_compiled_thunk(&mut self, lambda: &Rc<Lambda>) {
276        self.lambda_header("thunk", lambda);
277        self.disassemble_chunk(&lambda.chunk);
278        let _ = self.writer.flush();
279    }
280}
281
282/// An observer that collects a textual representation of an entire
283/// runtime execution.
284pub struct TracingObserver<W: Write> {
285    // If timing is enabled, contains the timestamp of the last-emitted trace event
286    last_event: Option<Instant>,
287    writer: TabWriter<W>,
288}
289
290impl<W: Write> TracingObserver<W> {
291    pub fn new(writer: W) -> Self {
292        Self {
293            last_event: None,
294            writer: TabWriter::new(writer),
295        }
296    }
297
298    /// Write the time of each runtime event, relative to when this method is called
299    pub fn enable_timing(&mut self) {
300        self.last_event = Some(Instant::now());
301    }
302
303    fn maybe_write_time(&mut self) {
304        if let Some(last_event) = &mut self.last_event {
305            let _ = write!(&mut self.writer, "+{}ns\t", last_event.elapsed().as_nanos());
306            *last_event = Instant::now();
307        }
308    }
309
310    fn write_value(&mut self, val: &Value) {
311        let _ = match val {
312            // Potentially large types which we only want to print
313            // the type of (and avoid recursing).
314            Value::List(l) => write!(&mut self.writer, "list[{}] ", l.len()),
315            Value::Attrs(a) => write!(&mut self.writer, "attrs[{}] ", a.len()),
316            Value::Thunk(t) if t.is_evaluated() => {
317                self.write_value(&t.value());
318                Ok(())
319            }
320
321            // For other value types, defer to the standard value printer.
322            _ => write!(&mut self.writer, "{val} "),
323        };
324    }
325
326    fn write_stack(&mut self, stack: &[Value]) {
327        let _ = write!(&mut self.writer, "[ ");
328
329        // Print out a maximum of 6 values from the top of the stack,
330        // before abbreviating it to `...`.
331        for (i, val) in stack.iter().rev().enumerate() {
332            if i == 6 {
333                let _ = write!(&mut self.writer, "...");
334                break;
335            }
336
337            self.write_value(val);
338        }
339
340        let _ = writeln!(&mut self.writer, "]");
341    }
342}
343
344impl<W: Write> RuntimeObserver for TracingObserver<W> {
345    fn observe_enter_call_frame(
346        &mut self,
347        arg_count: usize,
348        lambda: &Rc<Lambda>,
349        call_depth: usize,
350    ) {
351        self.maybe_write_time();
352
353        let _ = write!(&mut self.writer, "=== entering ");
354
355        let _ = if arg_count == 0 {
356            write!(&mut self.writer, "thunk ")
357        } else {
358            write!(&mut self.writer, "closure ")
359        };
360
361        if let Some(name) = &lambda.name {
362            let _ = write!(&mut self.writer, "'{name}' ");
363        }
364
365        let _ = writeln!(
366            &mut self.writer,
367            "in frame[{}] @ {:p} ===",
368            call_depth, *lambda
369        );
370    }
371
372    /// Called when the runtime exits a call frame.
373    fn observe_exit_call_frame(&mut self, frame_at: usize, stack: &[Value]) {
374        self.maybe_write_time();
375        let _ = write!(&mut self.writer, "=== exiting frame {frame_at} ===\t ");
376        self.write_stack(stack);
377    }
378
379    fn observe_suspend_call_frame(&mut self, frame_at: usize, stack: &[Value]) {
380        self.maybe_write_time();
381        let _ = write!(&mut self.writer, "=== suspending frame {frame_at} ===\t");
382
383        self.write_stack(stack);
384    }
385
386    fn observe_enter_generator(&mut self, frame_at: usize, name: &str, stack: &[Value]) {
387        self.maybe_write_time();
388        let _ = write!(
389            &mut self.writer,
390            "=== entering generator frame '{name}' [{frame_at}] ===\t",
391        );
392
393        self.write_stack(stack);
394    }
395
396    fn observe_exit_generator(&mut self, frame_at: usize, name: &str, stack: &[Value]) {
397        self.maybe_write_time();
398        let _ = write!(
399            &mut self.writer,
400            "=== exiting generator '{name}' [{frame_at}] ===\t"
401        );
402
403        self.write_stack(stack);
404    }
405
406    fn observe_suspend_generator(&mut self, frame_at: usize, name: &str, stack: &[Value]) {
407        self.maybe_write_time();
408        let _ = write!(
409            &mut self.writer,
410            "=== suspending generator '{name}' [{frame_at}] ===\t"
411        );
412
413        self.write_stack(stack);
414    }
415
416    fn observe_generator_request(&mut self, name: &str, msg: &VMRequest) {
417        self.maybe_write_time();
418        let _ = writeln!(
419            &mut self.writer,
420            "=== generator '{name}' requested {msg} ==="
421        );
422    }
423
424    fn observe_enter_builtin(&mut self, name: &'static str) {
425        self.maybe_write_time();
426        let _ = writeln!(&mut self.writer, "=== entering builtin {name} ===");
427    }
428
429    fn observe_exit_builtin(&mut self, name: &'static str, stack: &[Value]) {
430        self.maybe_write_time();
431        let _ = write!(&mut self.writer, "=== exiting builtin {name} ===\t");
432        self.write_stack(stack);
433    }
434
435    fn observe_tail_call(&mut self, frame_at: usize, lambda: &Rc<Lambda>) {
436        self.maybe_write_time();
437        let _ = writeln!(
438            &mut self.writer,
439            "=== tail-calling {:p} in frame[{}] ===",
440            *lambda, frame_at
441        );
442    }
443
444    fn observe_execute_op(&mut self, ip: CodeIdx, op: &Op, stack: &[Value]) {
445        self.maybe_write_time();
446        let _ = write!(&mut self.writer, "{:04} {:?}\t", ip.0, op);
447        self.write_stack(stack);
448    }
449}
450
451impl<W: Write> Drop for TracingObserver<W> {
452    fn drop(&mut self) {
453        let _ = self.writer.flush();
454    }
455}