charon_lib/transform/
inline_promoted_consts.rs

1use std::{collections::HashMap, mem};
2
3use super::{ctx::UllbcPass, TransformCtx};
4use crate::{ids::Generator, ullbc_ast::*};
5
6pub struct Transform;
7impl UllbcPass for Transform {
8    fn transform_ctx(&self, ctx: &mut TransformCtx) {
9        // Currently the only anon consts that are not already evaluated are promoted consts. If
10        // that changes, we'll have to restrict this pass to the consts that can be inlined into a
11        // body.
12
13        // Map each anon const id to its initializer, and remove both from `translated`.
14        let anon_consts: HashMap<GlobalDeclId, ExprBody> = ctx
15            .translated
16            .global_decls
17            .extract(|gdecl| matches!(gdecl.global_kind, GlobalKind::AnonConst))
18            .filter_map(|(id, gdecl)| {
19                let fdecl = ctx.translated.fun_decls.remove(gdecl.init)?;
20                let body = fdecl.body.ok()?;
21                let body = body.to_unstructured()?;
22                Some((id, body))
23            })
24            .collect();
25
26        ctx.for_each_fun_decl(|_ctx, decl| {
27            if let Ok(outer_body) = &mut decl.body {
28                let outer_body = outer_body.as_unstructured_mut().unwrap();
29                for block_id in outer_body.body.all_indices() {
30                    // Subtle: This generator must be managed to correctly track the indices that will
31                    // be generated when pushing onto `outer_body.body`.
32                    let mut bid_generator =
33                        Generator::new_with_init_value(outer_body.body.next_id());
34                    let start_new_bodies = bid_generator.next_id();
35                    let Some(block) = outer_body.body.get_mut(block_id) else {
36                        continue;
37                    };
38                    let mut new_blocks = vec![];
39                    block.dyn_visit_in_body_mut(|op: &mut Operand| {
40                        if let Operand::Const(c) = op
41                            && let RawConstantExpr::Global(gref) = &mut c.value
42                            && let Some(inner_body) = anon_consts.get(&gref.id)
43                        {
44                            // We inline the required body by shifting its local ids and block ids
45                            // and adding its blocks to the outer body. The inner body's return
46                            // local becomes a normal local that we can read from. We redirect some
47                            // gotos so that the inner body is executed before the current block.
48                            let mut inner_body = inner_body.clone().substitute(&gref.generics);
49
50                            let return_local = outer_body.locals.locals.next_id();
51                            inner_body.dyn_visit_in_body_mut(|l: &mut LocalId| {
52                                *l += return_local;
53                            });
54
55                            let start_block = bid_generator.next_id();
56                            bid_generator.advance(inner_body.body.elem_count());
57                            let end_block = bid_generator.next_id();
58                            inner_body.dyn_visit_in_body_mut(|b: &mut BlockId| {
59                                *b += start_block;
60                            });
61                            // Make all returns point to `end_block`. This block doesn't exist yet,
62                            // it will either be the start block of another inner body, or the
63                            // current outer block that we'll push at the end.
64                            inner_body.body.dyn_visit_in_body_mut(|t: &mut Terminator| {
65                                if let RawTerminator::Return = t.content {
66                                    t.content = RawTerminator::Goto { target: end_block };
67                                }
68                            });
69
70                            outer_body
71                                .locals
72                                .locals
73                                .extend(inner_body.locals.locals.into_iter());
74                            new_blocks.extend(inner_body.body);
75                            *op = Operand::Move(outer_body.locals.place_for_var(return_local));
76                        }
77                    });
78                    if !new_blocks.is_empty() {
79                        // Instead of the current block, start evaluating the new bodies.
80                        let block = mem::replace(
81                            block,
82                            BlockData::new_goto(Span::dummy(), start_new_bodies),
83                        );
84                        // Add the new blocks. They've been set up so that each new inner body
85                        // returns to what follows it in the sequence. Hence the last added body
86                        // points to the not-yet-existing block at `start_new_bodies`
87                        outer_body.body.extend(new_blocks.into_iter());
88                        // Push the current block to be executed after the newly-added ones.
89                        outer_body.body.push(block);
90                    }
91                }
92            }
93        });
94    }
95}