charon_lib/transform/simplify_output/inline_promoted_consts.rs
1use std::{collections::HashMap, mem};
2
3use crate::transform::{TransformCtx, ctx::UllbcPass};
4use crate::{ids::Generator, ullbc_ast::*};
5
6pub struct Transform;
7impl UllbcPass for Transform {
8 fn transform_ctx(&self, ctx: &mut TransformCtx) {
9 // Currently the only anon consts that are not already evaluated are promoted consts. If
10 // that changes, we'll have to restrict this pass to the consts that can be inlined into a
11 // body.
12
13 // Map each anon const id to its initializer, and remove both from `translated`.
14 let anon_consts: HashMap<GlobalDeclId, ExprBody> = ctx
15 .translated
16 .global_decls
17 .extract(|gdecl| matches!(gdecl.global_kind, GlobalKind::AnonConst))
18 .filter_map(|(id, gdecl)| {
19 let fdecl = ctx.translated.fun_decls.remove(gdecl.init)?;
20 let body = fdecl.body.to_unstructured()?;
21 Some((id, body))
22 })
23 .collect();
24
25 ctx.for_each_fun_decl(|_ctx, decl| {
26 if let Some(outer_body) = decl.body.as_unstructured_mut() {
27 for block_id in outer_body.body.all_indices() {
28 // Subtle: This generator must be managed to correctly track the indices that will
29 // be generated when pushing onto `outer_body.body`.
30 let mut bid_generator =
31 Generator::new_with_init_value(outer_body.body.next_id());
32 let start_new_bodies = bid_generator.next_id();
33 let Some(block) = outer_body.body.get_mut(block_id) else {
34 continue;
35 };
36 let mut new_blocks = vec![];
37 block.dyn_visit_in_body_mut(|op: &mut Operand| {
38 if let Operand::Const(c) = op
39 && let ConstantExprKind::Global(gref) = &mut c.kind
40 && let Some(inner_body) = anon_consts.get(&gref.id)
41 {
42 // We inline the required body by shifting its local ids and block ids
43 // and adding its blocks to the outer body. The inner body's return
44 // local becomes a normal local that we can read from. We redirect some
45 // gotos so that the inner body is executed before the current block.
46 let mut inner_body = inner_body.clone().substitute(&gref.generics);
47
48 // The init function of a global assumes the return place is live;
49 // this is not the case once we inline it
50 inner_body.body[0].statements.insert(
51 0,
52 Statement::new(
53 Span::dummy(),
54 StatementKind::StorageLive(LocalId::ZERO),
55 ),
56 );
57
58 let return_local = outer_body.locals.locals.next_id();
59 inner_body.dyn_visit_in_body_mut(|l: &mut LocalId| {
60 *l += return_local;
61 });
62
63 let start_block = bid_generator.next_id();
64 bid_generator.advance(inner_body.body.elem_count());
65 let end_block = bid_generator.next_id();
66 inner_body.dyn_visit_in_body_mut(|b: &mut BlockId| {
67 *b += start_block;
68 });
69 // Make all returns point to `end_block`. This block doesn't exist yet,
70 // it will either be the start block of another inner body, or the
71 // current outer block that we'll push at the end.
72 inner_body.body.dyn_visit_in_body_mut(|t: &mut Terminator| {
73 if let TerminatorKind::Return = t.kind {
74 t.kind = TerminatorKind::Goto { target: end_block };
75 }
76 });
77
78 outer_body
79 .locals
80 .locals
81 .extend(inner_body.locals.locals.into_iter());
82 new_blocks.extend(inner_body.body);
83 *op = Operand::Move(outer_body.locals.place_for_var(return_local));
84 }
85 });
86 if !new_blocks.is_empty() {
87 // Instead of the current block, start evaluating the new bodies.
88 let block = mem::replace(
89 block,
90 BlockData::new_goto(Span::dummy(), start_new_bodies),
91 );
92 // Add the new blocks. They've been set up so that each new inner body
93 // returns to what follows it in the sequence. Hence the last added body
94 // points to the not-yet-existing block at `start_new_bodies`
95 outer_body.body.extend(new_blocks.into_iter());
96 // Push the current block to be executed after the newly-added ones.
97 outer_body.body.push(block);
98 }
99 }
100 }
101 });
102 }
103}