snix_eval/compiler/
mod.rs

1//! This module implements a compiler for compiling the rnix AST
2//! representation to Snix bytecode.
3//!
4//! A note on `unwrap()`: This module contains a lot of calls to
5//! `unwrap()` or `expect(...)` on data structures returned by `rnix`.
6//! The reason for this is that rnix uses the same data structures to
7//! represent broken and correct ASTs, so all typed AST variants have
8//! the ability to represent an incorrect node.
9//!
10//! However, at the time that the AST is passed to the compiler we
11//! have verified that `rnix` considers the code to be correct, so all
12//! variants are fulfilled. In cases where the invariant is guaranteed
13//! by the code in this module, `debug_assert!` has been used to catch
14//! mistakes early during development.
15
16mod bindings;
17mod import;
18mod optimiser;
19mod scope;
20
21use codemap::Span;
22use rnix::ast::{self, AstToken};
23use rustc_hash::FxHashMap;
24use smol_str::SmolStr;
25use std::collections::BTreeMap;
26use std::path::{Path, PathBuf};
27use std::rc::{Rc, Weak};
28
29use crate::CoercionKind;
30use crate::SourceCode;
31use crate::chunk::Chunk;
32use crate::errors::{CatchableErrorKind, Error, ErrorKind, EvalResult};
33use crate::observer::CompilerObserver;
34use crate::opcode::{CodeIdx, Op, Position, UpvalueIdx};
35use crate::spans::ToSpan;
36use crate::value::{Closure, Formals, Lambda, NixAttrs, Thunk, Value};
37use crate::warnings::{EvalWarning, WarningKind};
38
39use self::scope::{LocalIdx, LocalPosition, Scope, Upvalue, UpvalueKind};
40
41/// Represents the result of compiling a piece of Nix code. If
42/// compilation was successful, the resulting bytecode can be passed
43/// to the VM.
44pub struct CompilationOutput {
45    pub lambda: Rc<Lambda>,
46    pub warnings: Vec<EvalWarning>,
47    pub errors: Vec<Error>,
48}
49
50/// Represents the lambda currently being compiled.
51struct LambdaCtx {
52    lambda: Lambda,
53    scope: Scope,
54    captures_with_stack: bool,
55}
56
57impl LambdaCtx {
58    fn new() -> Self {
59        LambdaCtx {
60            lambda: Lambda::default(),
61            scope: Default::default(),
62            captures_with_stack: false,
63        }
64    }
65
66    fn inherit(&self) -> Self {
67        LambdaCtx {
68            lambda: Lambda::default(),
69            scope: self.scope.inherit(),
70            captures_with_stack: false,
71        }
72    }
73}
74
75/// When compiling functions with an argument attribute set destructuring pattern,
76/// we need to do multiple passes over the declared formal arguments when setting
77/// up their local bindings (similarly to `let … in` expressions and recursive
78/// attribute sets. For this purpose, this struct is used to represent the two
79/// kinds of formal arguments:
80///
81/// - `TrackedFormal::NoDefault` is always required and causes an evaluation error
82///   if the corresponding attribute is missing in a function call.
83/// - `TrackedFormal::WithDefault` may be missing in the passed attribute set—
84///   in which case a `default_expr` will be evaluated and placed in the formal
85///   argument's local variable slot.
86enum TrackedFormal {
87    NoDefault {
88        local_idx: LocalIdx,
89        pattern_entry: ast::PatEntry,
90    },
91    WithDefault {
92        local_idx: LocalIdx,
93        /// Extra phantom local used for coordinating runtime dispatching not observable to
94        /// the language user. Detailed description in `compile_param_pattern()`.
95        finalise_request_idx: LocalIdx,
96        default_expr: ast::Expr,
97        pattern_entry: ast::PatEntry,
98    },
99}
100
101impl TrackedFormal {
102    fn pattern_entry(&self) -> &ast::PatEntry {
103        match self {
104            TrackedFormal::NoDefault { pattern_entry, .. } => pattern_entry,
105            TrackedFormal::WithDefault { pattern_entry, .. } => pattern_entry,
106        }
107    }
108    fn local_idx(&self) -> LocalIdx {
109        match self {
110            TrackedFormal::NoDefault { local_idx, .. } => *local_idx,
111            TrackedFormal::WithDefault { local_idx, .. } => *local_idx,
112        }
113    }
114}
115
116/// The map of globally available functions and other values that
117/// should implicitly be resolvable in the global scope.
118pub type GlobalsMap = FxHashMap<&'static str, Value>;
119
120/// Set of builtins that (if they exist) should be made available in
121/// the global scope, meaning that they can be accessed not just
122/// through `builtins.<name>`, but directly as `<name>`. This is not
123/// configurable, it is based on what Nix 2.3 exposed.
124const GLOBAL_BUILTINS: &[&str] = &[
125    "abort",
126    "baseNameOf",
127    "derivation",
128    "derivationStrict",
129    "dirOf",
130    "fetchGit",
131    "fetchMercurial",
132    "fetchTarball",
133    "fromTOML",
134    "import",
135    "isNull",
136    "map",
137    "placeholder",
138    "removeAttrs",
139    "scopedImport",
140    "throw",
141    "toString",
142    "__curPos",
143];
144
145pub struct Compiler<'source, 'observer> {
146    contexts: Vec<LambdaCtx>,
147    warnings: Vec<EvalWarning>,
148    errors: Vec<Error>,
149    root_dir: PathBuf,
150
151    /// Carries all known global tokens; the full set of which is
152    /// created when the compiler is invoked.
153    ///
154    /// Each global has an associated token, which when encountered as
155    /// an identifier is resolved against the scope poisoning logic,
156    /// and a function that should emit code for the token.
157    globals: Rc<GlobalsMap>,
158
159    /// Reference to the struct holding all of the source code, which
160    /// is used for error creation.
161    source: &'source SourceCode,
162
163    /// File reference in the source map for the current file, which
164    /// is used for creating spans.
165    file: &'source codemap::File,
166
167    /// Carry an observer for the compilation process, which is called
168    /// whenever a chunk is emitted.
169    observer: &'observer mut dyn CompilerObserver,
170
171    /// Carry a count of nested scopes which have requested the
172    /// compiler not to emit anything. This used for compiling dead
173    /// code branches to catch errors & warnings in them.
174    dead_scope: usize,
175}
176
177impl Compiler<'_, '_> {
178    pub(super) fn span_for<S: ToSpan>(&self, to_span: &S) -> Span {
179        to_span.span_for(self.file)
180    }
181}
182
183/// Compiler construction
184impl<'source, 'observer> Compiler<'source, 'observer> {
185    pub(crate) fn new(
186        location: Option<PathBuf>,
187        globals: Rc<GlobalsMap>,
188        env: Option<&FxHashMap<SmolStr, Value>>,
189        source: &'source SourceCode,
190        file: &'source codemap::File,
191        observer: &'observer mut dyn CompilerObserver,
192    ) -> EvalResult<Self> {
193        let mut root_dir = match location {
194            Some(dir) if cfg!(target_arch = "wasm32") || dir.is_absolute() => Ok(dir),
195            _ => {
196                let current_dir = std::env::current_dir().map_err(|e| {
197                    Error::new(
198                        ErrorKind::RelativePathResolution(format!(
199                            "could not determine current directory: {e}"
200                        )),
201                        file.span,
202                        source.clone(),
203                    )
204                })?;
205                if let Some(dir) = location {
206                    Ok(current_dir.join(dir))
207                } else {
208                    Ok(current_dir)
209                }
210            }
211        }?;
212
213        // If the path passed from the caller points to a file, the
214        // filename itself needs to be truncated as this must point to a
215        // directory.
216        if root_dir.is_file() {
217            root_dir.pop();
218        }
219
220        #[cfg(not(target_arch = "wasm32"))]
221        debug_assert!(root_dir.is_absolute());
222
223        let mut compiler = Self {
224            root_dir,
225            source,
226            file,
227            observer,
228            globals,
229            contexts: vec![LambdaCtx::new()],
230            warnings: vec![],
231            errors: vec![],
232            dead_scope: 0,
233        };
234
235        if let Some(env) = env {
236            compiler.compile_env(env);
237        }
238
239        Ok(compiler)
240    }
241}
242
243// Helper functions for emitting code and metadata to the internal
244// structures of the compiler.
245impl Compiler<'_, '_> {
246    fn context(&self) -> &LambdaCtx {
247        &self.contexts[self.contexts.len() - 1]
248    }
249
250    fn context_mut(&mut self) -> &mut LambdaCtx {
251        let idx = self.contexts.len() - 1;
252        &mut self.contexts[idx]
253    }
254
255    fn chunk(&mut self) -> &mut Chunk {
256        &mut self.context_mut().lambda.chunk
257    }
258
259    fn scope(&self) -> &Scope {
260        &self.context().scope
261    }
262
263    fn scope_mut(&mut self) -> &mut Scope {
264        &mut self.context_mut().scope
265    }
266
267    /// Push a single instruction to the current bytecode chunk and
268    /// track the source span from which it was compiled.
269    fn push_op<T: ToSpan>(&mut self, data: Op, node: &T) -> CodeIdx {
270        if self.dead_scope > 0 {
271            return CodeIdx(0);
272        }
273
274        let span = self.span_for(node);
275        CodeIdx(self.chunk().push_op(data, span))
276    }
277
278    fn push_u8(&mut self, data: u8) {
279        if self.dead_scope > 0 {
280            return;
281        }
282
283        self.chunk().code.push(data);
284    }
285
286    fn push_uvarint(&mut self, data: u64) {
287        if self.dead_scope > 0 {
288            return;
289        }
290
291        self.chunk().push_uvarint(data);
292    }
293
294    fn push_u16(&mut self, data: u16) {
295        if self.dead_scope > 0 {
296            return;
297        }
298
299        self.chunk().push_u16(data);
300    }
301
302    /// Emit a single constant to the current bytecode chunk and track
303    /// the source span from which it was compiled.
304    pub(super) fn emit_constant<T: ToSpan>(&mut self, value: Value, node: &T) {
305        if self.dead_scope > 0 {
306            return;
307        }
308
309        let idx = self.chunk().push_constant(value);
310        self.push_op(Op::Constant, node);
311        self.push_uvarint(idx.0 as u64);
312    }
313}
314
315// Actual code-emitting AST traversal methods.
316impl Compiler<'_, '_> {
317    fn compile(&mut self, slot: LocalIdx, expr: ast::Expr) {
318        let expr = optimiser::optimise_expr(self, slot, expr);
319
320        match &expr {
321            ast::Expr::Literal(literal) => self.compile_literal(literal),
322            ast::Expr::Path(path) => self.compile_path(slot, path),
323            ast::Expr::Str(s) => self.compile_str(slot, s),
324
325            ast::Expr::UnaryOp(op) => self.thunk(slot, op, move |c, s| c.compile_unary_op(s, op)),
326
327            ast::Expr::BinOp(binop) => {
328                self.thunk(slot, binop, move |c, s| c.compile_binop(s, binop))
329            }
330
331            ast::Expr::HasAttr(has_attr) => {
332                self.thunk(slot, has_attr, move |c, s| c.compile_has_attr(s, has_attr))
333            }
334
335            ast::Expr::List(list) => self.thunk(slot, list, move |c, s| c.compile_list(s, list)),
336
337            ast::Expr::AttrSet(attrs) => {
338                self.thunk(slot, attrs, move |c, s| c.compile_attr_set(s, attrs))
339            }
340
341            ast::Expr::Select(select) => {
342                self.thunk(slot, select, move |c, s| c.compile_select(s, select))
343            }
344
345            ast::Expr::Assert(assert) => {
346                self.thunk(slot, assert, move |c, s| c.compile_assert(s, assert))
347            }
348            ast::Expr::IfElse(if_else) => {
349                self.thunk(slot, if_else, move |c, s| c.compile_if_else(s, if_else))
350            }
351
352            ast::Expr::LetIn(let_in) => {
353                self.thunk(slot, let_in, move |c, s| c.compile_let_in(s, let_in))
354            }
355
356            ast::Expr::Ident(ident) => self.compile_ident(slot, ident),
357            ast::Expr::With(with) => self.thunk(slot, with, |c, s| c.compile_with(s, with)),
358            ast::Expr::Lambda(lambda) => self.thunk(slot, lambda, move |c, s| {
359                c.compile_lambda_or_thunk(false, s, lambda, |c, s| c.compile_lambda(s, lambda))
360            }),
361            ast::Expr::Apply(apply) => {
362                self.thunk(slot, apply, move |c, s| c.compile_apply(s, apply))
363            }
364
365            // Parenthesized expressions are simply unwrapped, leaving
366            // their value on the stack.
367            ast::Expr::Paren(paren) => self.compile(slot, paren.expr().unwrap()),
368
369            ast::Expr::LegacyLet(legacy_let) => self.thunk(slot, legacy_let, move |c, s| {
370                c.compile_legacy_let(s, legacy_let)
371            }),
372
373            ast::Expr::Root(_) => unreachable!("there cannot be more than one root"),
374            ast::Expr::Error(_) => unreachable!("compile is only called on validated trees"),
375        }
376    }
377
378    /// Compiles an expression, but does not emit any code for it as
379    /// it is considered dead. This will still catch errors and
380    /// warnings in that expression.
381    ///
382    /// A warning about the that code being dead is assumed to already be
383    /// emitted by the caller of this.
384    fn compile_dead_code(&mut self, slot: LocalIdx, node: ast::Expr) {
385        self.dead_scope += 1;
386        self.compile(slot, node);
387        self.dead_scope -= 1;
388    }
389
390    fn compile_literal(&mut self, node: &ast::Literal) {
391        let value = match node.kind() {
392            ast::LiteralKind::Float(f) => Value::Float(f.value().unwrap()),
393            ast::LiteralKind::Integer(i) => match i.value() {
394                Ok(v) => Value::Integer(v),
395                Err(err) => return self.emit_error(node, err.into()),
396            },
397
398            ast::LiteralKind::Uri(u) => {
399                self.emit_warning(node, WarningKind::DeprecatedLiteralURL);
400                Value::from(u.syntax().text())
401            }
402        };
403
404        self.emit_constant(value, node);
405    }
406
407    fn compile_path(&mut self, slot: LocalIdx, node: &ast::Path) {
408        // TODO(tazjin): placeholder implementation while waiting for
409        // https://github.com/nix-community/rnix-parser/pull/96
410
411        let raw_path = node.to_string();
412        let path = if raw_path.starts_with('/') {
413            Path::new(&raw_path).to_owned()
414        } else if raw_path.starts_with('~') {
415            // We assume that home paths start with ~/ or fail to parse
416            // TODO: this should be checked using a parse-fail test.
417            debug_assert!(raw_path.len() > 2 && raw_path.starts_with("~/"));
418
419            let home_relative_path = &raw_path[2..(raw_path.len())];
420            self.emit_constant(
421                Value::UnresolvedPath(Box::new(home_relative_path.into())),
422                node,
423            );
424            self.push_op(Op::ResolveHomePath, node);
425            return;
426        } else if raw_path.starts_with('<') {
427            // TODO: decide what to do with findFile
428            if raw_path.len() == 2 {
429                return self.emit_constant(
430                    Value::Catchable(Box::new(CatchableErrorKind::NixPathResolution(
431                        "Empty <> path not allowed".into(),
432                    ))),
433                    node,
434                );
435            }
436            let path = &raw_path[1..(raw_path.len() - 1)];
437            // Make a thunk to resolve the path (without using `findFile`, at least for now?)
438            return self.thunk(slot, node, move |c, _| {
439                c.emit_constant(Value::UnresolvedPath(Box::new(path.into())), node);
440                c.push_op(Op::FindFile, node);
441            });
442        } else {
443            let mut buf = self.root_dir.clone();
444            buf.push(&raw_path);
445            buf
446        };
447
448        // TODO: Use https://github.com/rust-lang/rfcs/issues/2208
449        // once it is available
450        let value = Value::Path(Box::new(crate::value::canon_path(path)));
451        self.emit_constant(value, node);
452    }
453
454    /// Helper that compiles the given string parts strictly. The caller
455    /// (`compile_str`) needs to figure out if the result of compiling this
456    /// needs to be thunked or not.
457    fn compile_str_parts(
458        &mut self,
459        slot: LocalIdx,
460        parent_node: &ast::Str,
461        parts: Vec<ast::InterpolPart<String>>,
462    ) {
463        // The string parts are produced in literal order, however
464        // they need to be reversed on the stack in order to
465        // efficiently create the real string in case of
466        // interpolation.
467        for part in parts.iter().rev() {
468            match part {
469                // Interpolated expressions are compiled as normal and
470                // dealt with by the VM before being assembled into
471                // the final string. We need to coerce them here,
472                // so OpInterpolate definitely has a string to consume.
473                ast::InterpolPart::Interpolation(ipol) => {
474                    self.compile(slot, ipol.expr().unwrap());
475                    // implicitly forces as well
476                    self.push_op(Op::CoerceToString, ipol);
477
478                    let encoded: u8 = CoercionKind {
479                        strong: false,
480                        import_paths: true,
481                    }
482                    .into();
483
484                    self.push_u8(encoded);
485                }
486
487                ast::InterpolPart::Literal(lit) => {
488                    self.emit_constant(Value::from(lit.as_str()), parent_node);
489                }
490            }
491        }
492
493        if parts.len() != 1 {
494            self.push_op(Op::Interpolate, parent_node);
495            self.push_uvarint(parts.len() as u64);
496        }
497    }
498
499    fn compile_str(&mut self, slot: LocalIdx, node: &ast::Str) {
500        let parts = node.normalized_parts();
501
502        // We need to thunk string expressions if they are the result of
503        // interpolation. A string that only consists of a single part (`"${foo}"`)
504        // can't desugar to the enclosed expression (`foo`) because we need to
505        // coerce the result to a string value. This would require forcing the
506        // value of the inner expression, so we need to wrap it in another thunk.
507        if parts.len() != 1 || matches!(&parts[0], ast::InterpolPart::Interpolation(_)) {
508            self.thunk(slot, node, move |c, s| {
509                c.compile_str_parts(s, node, parts);
510            });
511        } else {
512            self.compile_str_parts(slot, node, parts);
513        }
514    }
515
516    fn compile_unary_op(&mut self, slot: LocalIdx, op: &ast::UnaryOp) {
517        self.compile(slot, op.expr().unwrap());
518        self.emit_force(op);
519
520        let opcode = match op.operator().unwrap() {
521            ast::UnaryOpKind::Invert => Op::Invert,
522            ast::UnaryOpKind::Negate => Op::Negate,
523        };
524
525        self.push_op(opcode, op);
526    }
527
528    fn compile_binop(&mut self, slot: LocalIdx, op: &ast::BinOp) {
529        use ast::BinOpKind;
530
531        // Short-circuiting and other strange operators, which are
532        // under the same node type as NODE_BIN_OP, but need to be
533        // handled separately (i.e. before compiling the expressions
534        // used for standard binary operators).
535
536        match op.operator().unwrap() {
537            BinOpKind::And => return self.compile_and(slot, op),
538            BinOpKind::Or => return self.compile_or(slot, op),
539            BinOpKind::Implication => return self.compile_implication(slot, op),
540            _ => {}
541        };
542
543        // For all other operators, the two values need to be left on
544        // the stack in the correct order before pushing the
545        // instruction for the operation itself.
546        self.compile(slot, op.lhs().unwrap());
547        self.emit_force(&op.lhs().unwrap());
548
549        self.compile(slot, op.rhs().unwrap());
550        self.emit_force(&op.rhs().unwrap());
551
552        match op.operator().unwrap() {
553            BinOpKind::Add => self.push_op(Op::Add, op),
554            BinOpKind::Sub => self.push_op(Op::Sub, op),
555            BinOpKind::Mul => self.push_op(Op::Mul, op),
556            BinOpKind::Div => self.push_op(Op::Div, op),
557            BinOpKind::Update => self.push_op(Op::AttrsUpdate, op),
558            BinOpKind::Equal => self.push_op(Op::Equal, op),
559            BinOpKind::Less => self.push_op(Op::Less, op),
560            BinOpKind::LessOrEq => self.push_op(Op::LessOrEq, op),
561            BinOpKind::More => self.push_op(Op::More, op),
562            BinOpKind::MoreOrEq => self.push_op(Op::MoreOrEq, op),
563            BinOpKind::Concat => self.push_op(Op::Concat, op),
564
565            BinOpKind::NotEqual => {
566                self.push_op(Op::Equal, op);
567                self.push_op(Op::Invert, op)
568            }
569
570            // Handled by separate branch above.
571            BinOpKind::And | BinOpKind::Implication | BinOpKind::Or => {
572                unreachable!()
573            }
574        };
575    }
576
577    fn compile_and(&mut self, slot: LocalIdx, node: &ast::BinOp) {
578        debug_assert!(
579            matches!(node.operator(), Some(ast::BinOpKind::And)),
580            "compile_and called with wrong operator kind: {:?}",
581            node.operator(),
582        );
583
584        // Leave left-hand side value on the stack.
585        self.compile(slot, node.lhs().unwrap());
586        self.emit_force(&node.lhs().unwrap());
587
588        let throw_idx = self.push_op(Op::JumpIfCatchable, node);
589        self.push_u16(0);
590        // If this value is false, jump over the right-hand side - the
591        // whole expression is false.
592        let end_idx = self.push_op(Op::JumpIfFalse, node);
593        self.push_u16(0);
594
595        // Otherwise, remove the previous value and leave the
596        // right-hand side on the stack. Its result is now the value
597        // of the whole expression.
598        self.push_op(Op::Pop, node);
599        self.compile(slot, node.rhs().unwrap());
600        self.emit_force(&node.rhs().unwrap());
601
602        self.patch_jump(end_idx);
603        self.push_op(Op::AssertBool, node);
604        self.patch_jump(throw_idx);
605    }
606
607    fn compile_or(&mut self, slot: LocalIdx, node: &ast::BinOp) {
608        debug_assert!(
609            matches!(node.operator(), Some(ast::BinOpKind::Or)),
610            "compile_or called with wrong operator kind: {:?}",
611            node.operator(),
612        );
613
614        // Leave left-hand side value on the stack
615        self.compile(slot, node.lhs().unwrap());
616        self.emit_force(&node.lhs().unwrap());
617
618        let throw_idx = self.push_op(Op::JumpIfCatchable, node);
619        self.push_u16(0);
620        // Opposite of above: If this value is **true**, we can
621        // short-circuit the right-hand side.
622        let end_idx = self.push_op(Op::JumpIfTrue, node);
623        self.push_u16(0);
624        self.push_op(Op::Pop, node);
625        self.compile(slot, node.rhs().unwrap());
626        self.emit_force(&node.rhs().unwrap());
627
628        self.patch_jump(end_idx);
629        self.push_op(Op::AssertBool, node);
630        self.patch_jump(throw_idx);
631    }
632
633    fn compile_implication(&mut self, slot: LocalIdx, node: &ast::BinOp) {
634        debug_assert!(
635            matches!(node.operator(), Some(ast::BinOpKind::Implication)),
636            "compile_implication called with wrong operator kind: {:?}",
637            node.operator(),
638        );
639
640        // Leave left-hand side value on the stack and invert it.
641        self.compile(slot, node.lhs().unwrap());
642        self.emit_force(&node.lhs().unwrap());
643        let throw_idx = self.push_op(Op::JumpIfCatchable, node);
644        self.push_u16(0);
645        self.push_op(Op::Invert, node);
646
647        // Exactly as `||` (because `a -> b` = `!a || b`).
648        let end_idx = self.push_op(Op::JumpIfTrue, node);
649        self.push_u16(0);
650
651        self.push_op(Op::Pop, node);
652        self.compile(slot, node.rhs().unwrap());
653        self.emit_force(&node.rhs().unwrap());
654
655        self.patch_jump(end_idx);
656        self.push_op(Op::AssertBool, node);
657        self.patch_jump(throw_idx);
658    }
659
660    /// Compile list literals into equivalent bytecode. List
661    /// construction is fairly simple, consisting of pushing code for
662    /// each literal element and an instruction with the element
663    /// count.
664    ///
665    /// The VM, after evaluating the code for each element, simply
666    /// constructs the list from the given number of elements.
667    fn compile_list(&mut self, slot: LocalIdx, node: &ast::List) {
668        let mut count = 0;
669
670        // Open a temporary scope to correctly account for stack items
671        // that exist during the construction.
672        self.scope_mut().begin_scope();
673
674        for item in node.items() {
675            // Start tracing new stack slots from the second list
676            // element onwards. The first list element is located in
677            // the stack slot of the list itself.
678            let item_slot = match count {
679                0 => slot,
680                _ => {
681                    let item_span = self.span_for(&item);
682                    self.scope_mut().declare_phantom(item_span, false)
683                }
684            };
685
686            count += 1;
687            self.compile(item_slot, item);
688            self.scope_mut().mark_initialised(item_slot);
689        }
690
691        self.push_op(Op::List, node);
692        self.push_uvarint(count as u64);
693        self.scope_mut().end_scope();
694    }
695
696    fn compile_attr(&mut self, slot: LocalIdx, node: &ast::Attr) {
697        match node {
698            ast::Attr::Dynamic(dynamic) => {
699                self.compile(slot, dynamic.expr().unwrap());
700                self.emit_force(&dynamic.expr().unwrap());
701            }
702
703            ast::Attr::Str(s) => {
704                self.compile_str(slot, s);
705                self.emit_force(s);
706            }
707
708            ast::Attr::Ident(ident) => self.emit_literal_ident(ident),
709        }
710    }
711
712    fn compile_has_attr(&mut self, slot: LocalIdx, node: &ast::HasAttr) {
713        // Put the attribute set on the stack.
714        self.compile(slot, node.expr().unwrap());
715        self.emit_force(node);
716
717        // Push all path fragments with an operation for fetching the
718        // next nested element, for all fragments except the last one.
719        for (count, fragment) in node.attrpath().unwrap().attrs().enumerate() {
720            if count > 0 {
721                self.push_op(Op::AttrsTrySelect, &fragment);
722                self.emit_force(&fragment);
723            }
724
725            self.compile_attr(slot, &fragment);
726        }
727
728        // After the last fragment, emit the actual instruction that
729        // leaves a boolean on the stack.
730        self.push_op(Op::HasAttr, node);
731    }
732
733    /// When compiling select or select_or expressions, an optimisation is
734    /// possible of compiling the set emitted a constant attribute set by
735    /// immediately replacing it with the actual value.
736    ///
737    /// We take care not to emit an error here, as that would interfere with
738    /// thunking behaviour (there can be perfectly valid Nix code that accesses
739    /// a statically known attribute set that is lacking a key, because that
740    /// thunk is never evaluated). If anything is missing, just inform the
741    /// caller that the optimisation did not take place and move on. We may want
742    /// to emit warnings here in the future.
743    fn optimise_select(&mut self, path: &ast::Attrpath) -> bool {
744        // If compiling the set emitted a constant attribute set, the
745        // associated constant can immediately be replaced with the
746        // actual value.
747        //
748        // We take care not to emit an error here, as that would
749        // interfere with thunking behaviour (there can be perfectly
750        // valid Nix code that accesses a statically known attribute
751        // set that is lacking a key, because that thunk is never
752        // evaluated). If anything is missing, just move on. We may
753        // want to emit warnings here in the future.
754        if let Some((Op::Constant, op_idx)) = self.chunk().last_op() {
755            let (idx, _) = self.chunk().read_uvarint(op_idx + 1);
756            let constant = &mut self.chunk().constants[idx as usize];
757            if let Value::Attrs(attrs) = constant {
758                let mut path_iter = path.attrs();
759
760                // Only do this optimisation if there is a *single*
761                // element in the attribute path. It is extremely
762                // unlikely that we'd have a static nested set.
763                if let (Some(attr), None) = (path_iter.next(), path_iter.next()) {
764                    // Only do this optimisation for statically known attrs.
765                    if let Some(ident) = expr_static_attr_str(&attr) {
766                        if let Some(selected_value) = attrs.select(ident.as_bytes()) {
767                            *constant = selected_value.clone();
768                            return true;
769                        }
770                    }
771                }
772            }
773        }
774
775        false
776    }
777
778    fn compile_select(&mut self, slot: LocalIdx, node: &ast::Select) {
779        let set = node.expr().unwrap();
780        let path = node.attrpath().unwrap();
781
782        if node.or_token().is_some() {
783            return self.compile_select_or(slot, set, path, node.default_expr().unwrap());
784        }
785
786        // Push the set onto the stack
787        self.compile(slot, set.clone());
788        if self.optimise_select(&path) {
789            return;
790        }
791
792        // Compile each key fragment and emit access instructions.
793        //
794        // TODO: multi-select instruction to avoid re-pushing attrs on
795        // nested selects.
796        for fragment in path.attrs() {
797            // Force the current set value.
798            self.emit_force(&set);
799
800            self.compile_attr(slot, &fragment);
801            self.push_op(Op::AttrsSelect, &fragment);
802        }
803    }
804
805    /// Compile an `or` expression into a chunk of conditional jumps.
806    ///
807    /// If at any point during attribute set traversal a key is
808    /// missing, the `OpAttrOrNotFound` instruction will leave a
809    /// special sentinel value on the stack.
810    ///
811    /// After each access, a conditional jump evaluates the top of the
812    /// stack and short-circuits to the default value if it sees the
813    /// sentinel.
814    ///
815    /// Code like `{ a.b = 1; }.a.c or 42` yields this bytecode and
816    /// runtime stack:
817    ///
818    /// ```notrust
819    ///            Bytecode                     Runtime stack
820    ///  ┌────────────────────────────┐   ┌─────────────────────────┐
821    ///  │    ...                     │   │ ...                     │
822    ///  │ 5  OP_ATTRS(1)             │ → │ 5  [ { a.b = 1; }     ] │
823    ///  │ 6  OP_CONSTANT("a")        │ → │ 6  [ { a.b = 1; } "a" ] │
824    ///  │ 7  OP_ATTR_OR_NOT_FOUND    │ → │ 7  [ { b = 1; }       ] │
825    ///  │ 8  JUMP_IF_NOT_FOUND(13)   │ → │ 8  [ { b = 1; }       ] │
826    ///  │ 9  OP_CONSTANT("C")        │ → │ 9  [ { b = 1; } "c"   ] │
827    ///  │ 10 OP_ATTR_OR_NOT_FOUND    │ → │ 10 [ NOT_FOUND        ] │
828    ///  │ 11 JUMP_IF_NOT_FOUND(13)   │ → │ 11 [                  ] │
829    ///  │ 12 JUMP(14)                │   │ ..     jumped over      │
830    ///  │ 13 CONSTANT(42)            │ → │ 12 [ 42 ]               │
831    ///  │ 14 ...                     │   │ ..   ....               │
832    ///  └────────────────────────────┘   └─────────────────────────┘
833    /// ```
834    fn compile_select_or(
835        &mut self,
836        slot: LocalIdx,
837        set: ast::Expr,
838        path: ast::Attrpath,
839        default: ast::Expr,
840    ) {
841        self.compile(slot, set);
842        if self.optimise_select(&path) {
843            return;
844        }
845
846        let mut jumps = vec![];
847
848        for fragment in path.attrs() {
849            self.emit_force(&fragment);
850            self.compile_attr(slot, &fragment.clone());
851            self.push_op(Op::AttrsTrySelect, &fragment);
852            jumps.push(self.push_op(Op::JumpIfNotFound, &fragment));
853            self.push_u16(0);
854        }
855
856        let final_jump = self.push_op(Op::Jump, &path);
857        self.push_u16(0);
858
859        for jump in jumps {
860            self.patch_jump(jump);
861        }
862
863        // Compile the default value expression and patch the final
864        // jump to point *beyond* it.
865        self.compile(slot, default);
866        self.patch_jump(final_jump);
867    }
868
869    /// Compile `assert` expressions using jumping instructions in the VM.
870    ///
871    /// ```notrust
872    ///                        ┌─────────────────────┐
873    ///                        │ 0  [ conditional ]  │
874    ///                        │ 1   JUMP_IF_FALSE  →┼─┐
875    ///                        │ 2  [  main body  ]  │ │ Jump to else body if
876    ///                       ┌┼─3─←     JUMP        │ │ condition is false.
877    ///  Jump over else body  ││ 4   OP_ASSERT_FAIL ←┼─┘
878    ///  if condition is true.└┼─5─→     ...         │
879    ///                        └─────────────────────┘
880    /// ```
881    fn compile_assert(&mut self, slot: LocalIdx, node: &ast::Assert) {
882        // Compile the assertion condition to leave its value on the stack.
883        self.compile(slot, node.condition().unwrap());
884        self.emit_force(&node.condition().unwrap());
885
886        let throw_idx = self.push_op(Op::JumpIfCatchable, node);
887        self.push_u16(0);
888
889        let then_idx = self.push_op(Op::JumpIfFalse, node);
890        self.push_u16(0);
891
892        self.push_op(Op::Pop, node);
893        self.compile(slot, node.body().unwrap());
894
895        let else_idx = self.push_op(Op::Jump, node);
896        self.push_u16(0);
897
898        self.patch_jump(then_idx);
899        self.push_op(Op::Pop, node);
900        self.push_op(Op::AssertFail, &node.condition().unwrap());
901
902        self.patch_jump(else_idx);
903        self.patch_jump(throw_idx);
904    }
905
906    /// Compile conditional expressions using jumping instructions in the VM.
907    ///
908    /// ```notrust
909    ///                        ┌─────────────────────┐
910    ///                        │ 0  [ conditional ]  │
911    ///                        │ 1   JUMP_IF_CATCH  →┼───┐ Jump over else body
912    ///                        │ 2   JUMP_IF_FALSE  →┼─┐ │ if condition is catchable.
913    ///                        │ 3  [  main body  ]  │ │ ← Jump to else body if
914    ///                       ┌┼─4─←     JUMP        │ │ ← condition is false.
915    ///  Jump over else body  ││ 5  [  else body  ] ←┼─┘ │
916    ///  if condition is true.└┼─6─→     ...        ←┼───┘
917    ///                        └─────────────────────┘
918    /// ```
919    fn compile_if_else(&mut self, slot: LocalIdx, node: &ast::IfElse) {
920        self.compile(slot, node.condition().unwrap());
921        self.emit_force(&node.condition().unwrap());
922
923        let throw_idx = self.push_op(Op::JumpIfCatchable, &node.condition().unwrap());
924        self.push_u16(0);
925
926        let then_idx = self.push_op(Op::JumpIfFalse, &node.condition().unwrap());
927        self.push_u16(0);
928
929        self.push_op(Op::Pop, node); // discard condition value
930        self.compile(slot, node.body().unwrap());
931
932        let else_idx = self.push_op(Op::Jump, node);
933        self.push_u16(0);
934
935        self.patch_jump(then_idx); // patch jump *to* else_body
936        self.push_op(Op::Pop, node); // discard condition value
937        self.compile(slot, node.else_body().unwrap());
938
939        self.patch_jump(else_idx); // patch jump *over* else body
940        self.patch_jump(throw_idx); // patch jump *over* else body
941    }
942
943    /// Compile `with` expressions by emitting instructions that
944    /// pop/remove the indices of attribute sets that are implicitly
945    /// in scope through `with` on the "with-stack".
946    fn compile_with(&mut self, slot: LocalIdx, node: &ast::With) {
947        self.scope_mut().begin_scope();
948        // TODO: Detect if the namespace is just an identifier, and
949        // resolve that directly (thus avoiding duplication on the
950        // stack).
951        self.compile(slot, node.namespace().unwrap());
952
953        let span = self.span_for(&node.namespace().unwrap());
954
955        // The attribute set from which `with` inherits values
956        // occupies a slot on the stack, but this stack slot is not
957        // directly accessible. As it must be accounted for to
958        // calculate correct offsets, what we call a "phantom" local
959        // is declared here.
960        let local_idx = self.scope_mut().declare_phantom(span, true);
961        let with_idx = self.scope().stack_index(local_idx);
962
963        self.scope_mut().push_with();
964
965        self.push_op(Op::PushWith, &node.namespace().unwrap());
966        self.push_uvarint(with_idx.0 as u64);
967
968        self.compile(slot, node.body().unwrap());
969
970        self.push_op(Op::PopWith, node);
971        self.scope_mut().pop_with();
972        self.cleanup_scope(node);
973    }
974
975    /// Compiles pattern function arguments, such as `{ a, b }: ...`.
976    ///
977    /// These patterns are treated as a special case of locals binding
978    /// where the attribute set itself is placed on the first stack
979    /// slot of the call frame (either as a phantom, or named in case
980    /// of an `@` binding), and the function call sets up the rest of
981    /// the stack as if the parameters were rewritten into a `let`
982    /// binding.
983    ///
984    /// For example:
985    ///
986    /// ```nix
987    /// ({ a, b ? 2, c ? a * b, ... }@args: <body>)  { a = 10; }
988    /// ```
989    ///
990    /// would be compiled similarly to a binding such as
991    ///
992    /// ```nix
993    /// let args = { a = 10; };
994    /// in let a = args.a;
995    ///        b = args.a or 2;
996    ///        c = args.c or a * b;
997    ///    in <body>
998    /// ```
999    ///
1000    /// However, there are two properties of pattern function arguments that can
1001    /// not be compiled by desugaring in this way:
1002    ///
1003    /// 1. Bindings have to fail if too many arguments are provided. This is
1004    ///    done by emitting a special instruction that checks the set of keys
1005    ///    from a constant containing the expected keys.
1006    /// 2. Formal arguments with a default expression are (as an optimization and
1007    ///    because it is simpler) not wrapped in another thunk, instead compiled
1008    ///    and accessed separately. This means that the default expression may
1009    ///    never make it into the local's stack slot if the argument is provided
1010    ///    by the caller. We need to take this into account and skip any
1011    ///    operations specific to the expression like thunk finalisation in such
1012    ///    cases.
1013    fn compile_param_pattern(&mut self, pattern: &ast::Pattern) -> (Formals, CodeIdx) {
1014        let span = self.span_for(pattern);
1015
1016        let (set_idx, pat_bind_name) = match pattern.pat_bind() {
1017            Some(name) => {
1018                let pat_bind_name = name.ident().unwrap().to_string();
1019                (
1020                    self.declare_local(&name, pat_bind_name.clone()),
1021                    Some(pat_bind_name),
1022                )
1023            }
1024            None => (self.scope_mut().declare_phantom(span, true), None),
1025        };
1026
1027        // At call time, the attribute set is already at the top of the stack.
1028        self.scope_mut().mark_initialised(set_idx);
1029        self.emit_force(pattern);
1030        let throw_idx = self.push_op(Op::JumpIfCatchable, pattern);
1031        self.push_u16(0);
1032
1033        // Evaluation fails on a type error, even if the argument(s) are unused.
1034        self.push_op(Op::AssertAttrs, pattern);
1035
1036        let ellipsis = pattern.ellipsis_token().is_some();
1037        if !ellipsis {
1038            self.push_op(Op::ValidateClosedFormals, pattern);
1039        }
1040
1041        // Similar to `let ... in ...`, we now do multiple passes over
1042        // the bindings to first declare them, then populate them, and
1043        // then finalise any necessary recursion into the scope.
1044        let mut entries: Vec<TrackedFormal> = vec![];
1045        let mut arguments = BTreeMap::default();
1046
1047        for entry in pattern.pat_entries() {
1048            let ident = entry.ident().unwrap();
1049            let idx = self.declare_local(&ident, ident.to_string());
1050
1051            arguments.insert(ident.into(), entry.default().is_some());
1052
1053            if let Some(default_expr) = entry.default() {
1054                entries.push(TrackedFormal::WithDefault {
1055                    local_idx: idx,
1056                    // This phantom is used to track at runtime (!) whether we need to
1057                    // finalise the local's stack slot or not. The relevant instructions are
1058                    // emitted in the second pass where the mechanism is explained as well.
1059                    finalise_request_idx: {
1060                        let span = self.span_for(&default_expr);
1061                        self.scope_mut().declare_phantom(span, false)
1062                    },
1063                    default_expr,
1064                    pattern_entry: entry,
1065                });
1066            } else {
1067                entries.push(TrackedFormal::NoDefault {
1068                    local_idx: idx,
1069                    pattern_entry: entry,
1070                });
1071            }
1072        }
1073
1074        // For each of the bindings, push the set on the stack and
1075        // attempt to select from it.
1076        let stack_idx = self.scope().stack_index(set_idx);
1077        for tracked_formal in entries.iter() {
1078            self.push_op(Op::GetLocal, pattern);
1079            self.push_uvarint(stack_idx.0 as u64);
1080            self.emit_literal_ident(&tracked_formal.pattern_entry().ident().unwrap());
1081
1082            let idx = tracked_formal.local_idx();
1083
1084            // Use the same mechanism as `compile_select_or` if a
1085            // default value was provided, or simply select otherwise.
1086            match tracked_formal {
1087                TrackedFormal::WithDefault {
1088                    default_expr,
1089                    pattern_entry,
1090                    ..
1091                } => {
1092                    // The tricky bit about compiling a formal argument with a default value
1093                    // is that the default may be a thunk that may depend on the value of
1094                    // other formal arguments, i.e. may need to be finalised. This
1095                    // finalisation can only happen if we are actually using the default
1096                    // value—otherwise OpFinalise will crash on an already finalised (or
1097                    // non-thunk) value.
1098                    //
1099                    // Thus we use an additional local to track whether we wound up
1100                    // defaulting or not. `FinaliseRequest(false)` indicates that we should
1101                    // not finalise, as we did not default.
1102                    //
1103                    // We are being wasteful with VM stack space in case of default
1104                    // expressions that don't end up needing to be finalised. Unfortunately
1105                    // we only know better after compiling the default expression, so
1106                    // avoiding unnecessary locals would mean we'd need to modify the chunk
1107                    // after the fact.
1108                    self.push_op(Op::AttrsTrySelect, &pattern_entry.ident().unwrap());
1109                    let jump_to_default = self.push_op(Op::JumpIfNotFound, default_expr);
1110                    self.push_u16(0);
1111
1112                    self.emit_constant(Value::FinaliseRequest(false), default_expr);
1113
1114                    let jump_over_default = self.push_op(Op::Jump, default_expr);
1115                    self.push_u16(0);
1116
1117                    self.patch_jump(jump_to_default);
1118
1119                    // Does not need to thunked since compile() already does so when necessary
1120                    self.compile(idx, default_expr.clone());
1121
1122                    self.emit_constant(Value::FinaliseRequest(true), default_expr);
1123
1124                    self.patch_jump(jump_over_default);
1125                }
1126                TrackedFormal::NoDefault { pattern_entry, .. } => {
1127                    self.push_op(Op::AttrsSelect, &pattern_entry.ident().unwrap());
1128                }
1129            }
1130
1131            self.scope_mut().mark_initialised(idx);
1132            if let TrackedFormal::WithDefault {
1133                finalise_request_idx,
1134                ..
1135            } = tracked_formal
1136            {
1137                self.scope_mut().mark_initialised(*finalise_request_idx);
1138            }
1139        }
1140
1141        for tracked_formal in entries.iter() {
1142            if self.scope()[tracked_formal.local_idx()].needs_finaliser {
1143                let stack_idx = self.scope().stack_index(tracked_formal.local_idx());
1144                match tracked_formal {
1145                    TrackedFormal::NoDefault { .. } => panic!(
1146                        "Snix bug: local for pattern formal needs finaliser, but has no default expr"
1147                    ),
1148                    TrackedFormal::WithDefault {
1149                        finalise_request_idx,
1150                        ..
1151                    } => {
1152                        let finalise_request_stack_idx =
1153                            self.scope().stack_index(*finalise_request_idx);
1154
1155                        // TODO(sterni): better spans
1156                        self.push_op(Op::GetLocal, pattern);
1157                        self.push_uvarint(finalise_request_stack_idx.0 as u64);
1158                        let jump_over_finalise = self.push_op(Op::JumpIfNoFinaliseRequest, pattern);
1159                        self.push_u16(0);
1160                        self.push_op(Op::Finalise, pattern);
1161                        self.push_uvarint(stack_idx.0 as u64);
1162                        self.patch_jump(jump_over_finalise);
1163                        // Get rid of finaliser request value on the stack
1164                        self.push_op(Op::Pop, pattern);
1165                    }
1166                }
1167            }
1168        }
1169
1170        (
1171            (Formals {
1172                arguments,
1173                ellipsis,
1174                span,
1175                name: pat_bind_name,
1176            }),
1177            throw_idx,
1178        )
1179    }
1180
1181    fn compile_lambda(&mut self, slot: LocalIdx, node: &ast::Lambda) -> Option<CodeIdx> {
1182        // Compile the function itself, recording its formal arguments (if any)
1183        // for later use
1184        let formals = match node.param().unwrap() {
1185            ast::Param::Pattern(pat) => Some(self.compile_param_pattern(&pat)),
1186
1187            ast::Param::IdentParam(param) => {
1188                let name = param
1189                    .ident()
1190                    .unwrap()
1191                    .ident_token()
1192                    .unwrap()
1193                    .text()
1194                    .to_string();
1195
1196                let idx = self.declare_local(&param, &name);
1197                self.scope_mut().mark_initialised(idx);
1198                None
1199            }
1200        };
1201
1202        self.compile(slot, node.body().unwrap());
1203        if let Some((formals, throw_idx)) = formals {
1204            self.context_mut().lambda.formals = Some(formals);
1205            Some(throw_idx)
1206        } else {
1207            self.context_mut().lambda.formals = None;
1208            None
1209        }
1210    }
1211
1212    fn thunk<N, F>(&mut self, outer_slot: LocalIdx, node: &N, content: F)
1213    where
1214        N: ToSpan,
1215        F: FnOnce(&mut Compiler, LocalIdx),
1216    {
1217        self.compile_lambda_or_thunk(true, outer_slot, node, |comp, idx| {
1218            content(comp, idx);
1219            None
1220        })
1221    }
1222
1223    /// Compile an expression into a runtime closure or thunk
1224    fn compile_lambda_or_thunk<N, F>(
1225        &mut self,
1226        is_suspended_thunk: bool,
1227        outer_slot: LocalIdx,
1228        node: &N,
1229        content: F,
1230    ) where
1231        N: ToSpan,
1232        F: FnOnce(&mut Compiler, LocalIdx) -> Option<CodeIdx>,
1233    {
1234        let name = self.scope()[outer_slot].name();
1235        self.new_context();
1236
1237        // Set the (optional) name of the current slot on the lambda that is
1238        // being compiled.
1239        self.context_mut().lambda.name = name;
1240
1241        let span = self.span_for(node);
1242        let slot = self.scope_mut().declare_phantom(span, false);
1243        self.scope_mut().begin_scope();
1244
1245        let throw_idx = content(self, slot);
1246        self.cleanup_scope(node);
1247        if let Some(throw_idx) = throw_idx {
1248            self.patch_jump(throw_idx);
1249        }
1250
1251        // Pop the lambda context back off, and emit the finished
1252        // lambda as a constant.
1253        let mut compiled = self.contexts.pop().unwrap();
1254
1255        // Emit an instruction to inform the VM that the chunk has ended.
1256        compiled
1257            .lambda
1258            .chunk
1259            .push_op(Op::Return, self.span_for(node));
1260
1261        let lambda = Rc::new(compiled.lambda);
1262        if is_suspended_thunk {
1263            self.observer.observe_compiled_thunk(&lambda);
1264        } else {
1265            self.observer.observe_compiled_lambda(&lambda);
1266        }
1267
1268        // If no upvalues are captured, emit directly and move on.
1269        if lambda.upvalue_count == 0 && !compiled.captures_with_stack {
1270            self.emit_constant(
1271                if is_suspended_thunk {
1272                    Value::Thunk(Thunk::new_suspended(lambda, span))
1273                } else {
1274                    Value::Closure(Rc::new(Closure::new(lambda)))
1275                },
1276                node,
1277            );
1278            return;
1279        }
1280
1281        // Otherwise, we need to emit the variable number of
1282        // operands that allow the runtime to close over the
1283        // upvalues and leave a blueprint in the constant index from
1284        // which the result can be constructed.
1285        let blueprint_idx = self.chunk().push_constant(Value::Blueprint(lambda));
1286
1287        let code_idx = self.push_op(
1288            if is_suspended_thunk {
1289                Op::ThunkSuspended
1290            } else {
1291                Op::ThunkClosure
1292            },
1293            node,
1294        );
1295        self.push_uvarint(blueprint_idx.0 as u64);
1296
1297        self.emit_upvalue_data(
1298            outer_slot,
1299            node,
1300            compiled.scope.upvalues,
1301            compiled.captures_with_stack,
1302        );
1303
1304        if !is_suspended_thunk && !self.scope()[outer_slot].needs_finaliser {
1305            if !self.scope()[outer_slot].must_thunk {
1306                // The closure has upvalues, but is not recursive. Therefore no
1307                // thunk is required, which saves us the overhead of
1308                // Rc<RefCell<>>
1309                self.chunk().code[code_idx.0] = Op::Closure as u8;
1310            } else {
1311                // This case occurs when a closure has upvalue-references to
1312                // itself but does not need a finaliser. Since no OpFinalise
1313                // will be emitted later on we synthesize one here. It is needed
1314                // here only to set [`Closure::is_finalised`] which is used for
1315                // sanity checks.
1316                #[cfg(debug_assertions)]
1317                {
1318                    self.push_op(Op::Finalise, &self.span_for(node));
1319                    self.push_uvarint(self.scope().stack_index(outer_slot).0 as u64);
1320                }
1321            }
1322        }
1323    }
1324
1325    fn compile_apply(&mut self, slot: LocalIdx, node: &ast::Apply) {
1326        // To call a function, we leave its arguments on the stack,
1327        // followed by the function expression itself, and then emit a
1328        // call instruction. This way, the stack is perfectly laid out
1329        // to enter the function call straight away.
1330        self.compile(slot, node.argument().unwrap());
1331        self.compile(slot, node.lambda().unwrap());
1332        self.emit_force(&node.lambda().unwrap());
1333        self.push_op(Op::Call, node);
1334    }
1335
1336    /// Emit the data instructions that the runtime needs to correctly
1337    /// assemble the upvalues struct.
1338    fn emit_upvalue_data<T: ToSpan>(
1339        &mut self,
1340        slot: LocalIdx,
1341        _: &T, // TODO
1342        upvalues: Vec<Upvalue>,
1343        capture_with: bool,
1344    ) {
1345        // Push the count of arguments to be expected, with one bit set to
1346        // indicate whether the with stack needs to be captured.
1347        let mut count = (upvalues.len() as u64) << 1;
1348        if capture_with {
1349            count |= 1;
1350        }
1351        self.push_uvarint(count);
1352
1353        for upvalue in upvalues {
1354            match upvalue.kind {
1355                UpvalueKind::Local(idx) => {
1356                    let target = &self.scope()[idx];
1357                    let stack_idx = self.scope().stack_index(idx);
1358
1359                    // If the target is not yet initialised, we need to defer
1360                    // the local access
1361                    if !target.initialised {
1362                        self.push_uvarint(Position::deferred_local(stack_idx).0);
1363                        self.scope_mut().mark_needs_finaliser(slot);
1364                    } else {
1365                        // a self-reference
1366                        if slot == idx {
1367                            self.scope_mut().mark_must_thunk(slot);
1368                        }
1369                        self.push_uvarint(Position::stack_index(stack_idx).0);
1370                    }
1371                }
1372
1373                UpvalueKind::Upvalue(idx) => {
1374                    self.push_uvarint(Position::upvalue_index(idx).0);
1375                }
1376            };
1377        }
1378    }
1379
1380    /// Emit the literal string value of an identifier. Required for
1381    /// several operations related to attribute sets, where
1382    /// identifiers are used as string keys.
1383    fn emit_literal_ident(&mut self, ident: &ast::Ident) {
1384        self.emit_constant(Value::String(ident.clone().into()), ident);
1385    }
1386
1387    /// Patch the jump instruction at the given index, setting its
1388    /// jump offset from the placeholder to the current code position.
1389    ///
1390    /// This is required because the actual target offset of jumps is
1391    /// not known at the time when the jump operation itself is
1392    /// emitted.
1393    fn patch_jump(&mut self, idx: CodeIdx) {
1394        self.chunk().patch_jump(idx.0);
1395    }
1396
1397    /// Decrease scope depth of the current function and emit
1398    /// instructions to clean up the stack at runtime.
1399    fn cleanup_scope<N: ToSpan>(&mut self, node: &N) {
1400        // When ending a scope, all corresponding locals need to be
1401        // removed, but the value of the body needs to remain on the
1402        // stack. This is implemented by a separate instruction.
1403        let (popcount, unused_spans) = self.scope_mut().end_scope();
1404
1405        for span in &unused_spans {
1406            self.emit_warning(span, WarningKind::UnusedBinding);
1407        }
1408
1409        if popcount > 0 {
1410            self.push_op(Op::CloseScope, node);
1411            self.push_uvarint(popcount as u64);
1412        }
1413    }
1414
1415    /// Open a new lambda context within which to compile a function,
1416    /// closure or thunk.
1417    fn new_context(&mut self) {
1418        self.contexts.push(self.context().inherit());
1419    }
1420
1421    /// Declare a local variable known in the scope that is being
1422    /// compiled by pushing it to the locals. This is used to
1423    /// determine the stack offset of variables.
1424    fn declare_local<S: Into<String>, N: ToSpan>(&mut self, node: &N, name: S) -> LocalIdx {
1425        let name = name.into();
1426        let depth = self.scope().scope_depth();
1427
1428        // Do this little dance to turn name:&'a str into the same
1429        // string with &'static lifetime, as required by WarningKind
1430        if let Some((global_ident, _)) = self.globals.get_key_value(name.as_str()) {
1431            self.emit_warning(node, WarningKind::ShadowedGlobal(global_ident));
1432        }
1433
1434        let span = self.span_for(node);
1435        let (idx, shadowed) = self.scope_mut().declare_local(name, span);
1436
1437        if let Some(shadow_idx) = shadowed {
1438            let other = &self.scope()[shadow_idx];
1439            if other.depth == depth {
1440                self.emit_error(node, ErrorKind::VariableAlreadyDefined(other.span));
1441            }
1442        }
1443
1444        idx
1445    }
1446
1447    /// Determine whether the current lambda context has any ancestors
1448    /// that use dynamic scope resolution, and mark contexts as
1449    /// needing to capture their enclosing `with`-stack in their
1450    /// upvalues.
1451    fn has_dynamic_ancestor(&mut self) -> bool {
1452        let mut ancestor_has_with = false;
1453
1454        for ctx in self.contexts.iter_mut() {
1455            if ancestor_has_with {
1456                // If the ancestor has an active with stack, mark this
1457                // lambda context as needing to capture it.
1458                ctx.captures_with_stack = true;
1459            } else {
1460                // otherwise, check this context and move on
1461                ancestor_has_with = ctx.scope.has_with();
1462            }
1463        }
1464
1465        ancestor_has_with
1466    }
1467
1468    fn emit_force<N: ToSpan>(&mut self, node: &N) {
1469        self.push_op(Op::Force, node);
1470    }
1471
1472    fn emit_warning<N: ToSpan>(&mut self, node: &N, kind: WarningKind) {
1473        let span = self.span_for(node);
1474        self.warnings.push(EvalWarning { kind, span })
1475    }
1476
1477    fn emit_error<N: ToSpan>(&mut self, node: &N, kind: ErrorKind) {
1478        let span = self.span_for(node);
1479        self.errors
1480            .push(Error::new(kind, span, self.source.clone()))
1481    }
1482}
1483
1484/// Convert a non-dynamic string expression to a string if possible.
1485fn expr_static_str(node: &ast::Str) -> Option<SmolStr> {
1486    let mut parts = node.normalized_parts();
1487
1488    if parts.len() != 1 {
1489        return None;
1490    }
1491
1492    if let Some(ast::InterpolPart::Literal(lit)) = parts.pop() {
1493        return Some(SmolStr::new(lit));
1494    }
1495
1496    None
1497}
1498
1499/// Convert the provided `ast::Attr` into a statically known string if
1500/// possible.
1501fn expr_static_attr_str(node: &ast::Attr) -> Option<SmolStr> {
1502    match node {
1503        ast::Attr::Ident(ident) => Some(ident.ident_token().unwrap().text().into()),
1504        ast::Attr::Str(s) => expr_static_str(s),
1505
1506        // The dynamic node type is just a wrapper. C++ Nix does not care
1507        // about the dynamic wrapper when determining whether the node
1508        // itself is dynamic, it depends solely on the expression inside
1509        // (i.e. `let ${"a"} = 1; in a` is valid).
1510        ast::Attr::Dynamic(dynamic) => match dynamic.expr().unwrap() {
1511            ast::Expr::Str(s) => expr_static_str(&s),
1512            _ => None,
1513        },
1514    }
1515}
1516
1517/// Create a delayed source-only builtin compilation, for a builtin
1518/// which is written in Nix code.
1519///
1520/// **Important:** snix *panics* if a builtin with invalid source code
1521/// is supplied. This is because there is no user-friendly way to
1522/// thread the errors out of this function right now.
1523fn compile_src_builtin(
1524    name: &'static str,
1525    code: &str,
1526    source: SourceCode,
1527    weak: &Weak<GlobalsMap>,
1528) -> Value {
1529    use std::fmt::Write;
1530
1531    let parsed = rnix::ast::Root::parse(code);
1532
1533    if !parsed.errors().is_empty() {
1534        let mut out = format!("BUG: code for source-builtin '{name}' had parser errors");
1535        for error in parsed.errors() {
1536            writeln!(out, "{error}").unwrap();
1537        }
1538
1539        panic!("{}", out);
1540    }
1541
1542    let file = source.add_file(format!("<src-builtins/{name}.nix>"), code.to_string());
1543    let weak = weak.clone();
1544
1545    Value::Thunk(Thunk::new_suspended_native(Box::new(move || {
1546        let result = compile(
1547            &parsed.tree().expr().unwrap(),
1548            None,
1549            weak.upgrade().unwrap(),
1550            None,
1551            &source,
1552            &file,
1553            &mut crate::observer::NoOpObserver {},
1554        )
1555        .map_err(|e| ErrorKind::NativeError {
1556            gen_type: "derivation",
1557            err: Box::new(e),
1558        })?;
1559
1560        if !result.errors.is_empty() {
1561            return Err(ErrorKind::ImportCompilerError {
1562                path: format!("src-builtins/{name}.nix").into(),
1563                errors: result.errors,
1564            });
1565        }
1566
1567        Ok(Value::Thunk(Thunk::new_suspended(result.lambda, file.span)))
1568    })))
1569}
1570
1571/// Prepare the full set of globals available in evaluated code. These
1572/// are constructed from the set of builtins supplied by the caller,
1573/// which are made available globally under the `builtins` identifier.
1574///
1575/// A subset of builtins (specified by [`GLOBAL_BUILTINS`]) is
1576/// available globally *iff* they are set.
1577///
1578/// Optionally adds the `import` feature if desired by the caller.
1579pub fn prepare_globals(
1580    builtins: Vec<(&'static str, Value)>,
1581    src_builtins: Vec<(&'static str, &'static str)>,
1582    source: SourceCode,
1583    enable_import: bool,
1584) -> Rc<GlobalsMap> {
1585    Rc::new_cyclic(Box::new(move |weak: &Weak<GlobalsMap>| {
1586        // First step is to construct the builtins themselves as
1587        // `NixAttrs`.
1588        let mut builtins: GlobalsMap = FxHashMap::from_iter(builtins);
1589
1590        // At this point, optionally insert `import` if enabled. To
1591        // "tie the knot" of `import` needing the full set of globals
1592        // to instantiate its compiler, the `Weak` reference is passed
1593        // here.
1594        if enable_import {
1595            let import = Value::Builtin(import::builtins_import(weak, source.clone()));
1596            builtins.insert("import", import);
1597        }
1598
1599        // Next, the actual map of globals which the compiler will use
1600        // to resolve identifiers is constructed.
1601        let mut globals: GlobalsMap = FxHashMap::default();
1602
1603        // builtins contain themselves (`builtins.builtins`), which we
1604        // can resolve by manually constructing a suspended thunk that
1605        // dereferences the same weak pointer as above.
1606        let weak_globals = weak.clone();
1607        builtins.insert(
1608            "builtins",
1609            Value::Thunk(Thunk::new_suspended_native(Box::new(move || {
1610                Ok(weak_globals
1611                    .upgrade()
1612                    .unwrap()
1613                    .get("builtins")
1614                    .cloned()
1615                    .unwrap())
1616            }))),
1617        );
1618
1619        // Insert top-level static value builtins.
1620        globals.insert("true", Value::Bool(true));
1621        globals.insert("false", Value::Bool(false));
1622        globals.insert("null", Value::Null);
1623
1624        // If "source builtins" were supplied, compile them and insert
1625        // them.
1626        builtins.extend(src_builtins.into_iter().map(move |(name, code)| {
1627            let compiled = compile_src_builtin(name, code, source.clone(), weak);
1628            (name, compiled)
1629        }));
1630
1631        // Construct the actual `builtins` attribute set and insert it
1632        // in the global scope.
1633        globals.insert(
1634            "builtins",
1635            Value::attrs(NixAttrs::from_iter(builtins.clone())),
1636        );
1637
1638        // Finally, the builtins that should be globally available are
1639        // "elevated" to the outer scope.
1640        for global in GLOBAL_BUILTINS {
1641            if let Some(builtin) = builtins.get(global).cloned() {
1642                globals.insert(global, builtin);
1643            }
1644        }
1645
1646        globals
1647    }))
1648}
1649
1650pub fn compile(
1651    expr: &ast::Expr,
1652    location: Option<PathBuf>,
1653    globals: Rc<GlobalsMap>,
1654    env: Option<&FxHashMap<SmolStr, Value>>,
1655    source: &SourceCode,
1656    file: &codemap::File,
1657    observer: &mut dyn CompilerObserver,
1658) -> EvalResult<CompilationOutput> {
1659    let mut c = Compiler::new(location, globals.clone(), env, source, file, observer)?;
1660
1661    let root_span = c.span_for(expr);
1662    let root_slot = c.scope_mut().declare_phantom(root_span, false);
1663    c.compile(root_slot, expr.clone());
1664
1665    // The final operation of any top-level Nix program must always be
1666    // `OpForce`. A thunk should not be returned to the user in an
1667    // unevaluated state (though in practice, a value *containing* a
1668    // thunk might be returned).
1669    c.emit_force(expr);
1670    if let Some(env) = env {
1671        if !env.is_empty() {
1672            c.push_op(Op::CloseScope, &root_span);
1673            c.push_uvarint(env.len() as u64);
1674        }
1675    }
1676    c.push_op(Op::Return, &root_span);
1677
1678    let lambda = Rc::new(c.contexts.pop().unwrap().lambda);
1679    c.observer.observe_compiled_toplevel(&lambda);
1680
1681    Ok(CompilationOutput {
1682        lambda,
1683        warnings: c.warnings,
1684        errors: c.errors,
1685    })
1686}