charon_lib/transform/simplify_output/inline_anon_consts.rs
1use std::{collections::HashMap, mem};
2
3use crate::transform::{TransformCtx, ctx::UllbcPass};
4use crate::{ids::Generator, ullbc_ast::*};
5
6pub struct Transform;
7impl UllbcPass for Transform {
8 fn should_run(&self, options: &crate::options::TranslateOptions) -> bool {
9 !options.raw_consts
10 }
11
12 fn transform_ctx(&self, ctx: &mut TransformCtx) {
13 // Map each anon const id to its initializer, and remove both from `translated`.
14 let anon_consts: HashMap<GlobalDeclId, ExprBody> = ctx
15 .translated
16 .global_decls
17 .extract(|gdecl| matches!(gdecl.global_kind, GlobalKind::AnonConst))
18 .filter_map(|(id, gdecl)| {
19 let fdecl = ctx.translated.fun_decls.remove(gdecl.init)?;
20 let body = fdecl.body.to_unstructured()?;
21 Some((id, body))
22 })
23 .collect();
24
25 ctx.for_each_fun_decl(|_ctx, decl| {
26 if let Some(outer_body) = decl.body.as_unstructured_mut() {
27 for block_id in outer_body.body.all_indices() {
28 // Subtle: This generator must be managed to correctly track the indices that will
29 // be generated when pushing onto `outer_body.body`.
30 let mut bid_generator: Generator<BlockId> =
31 Generator::new_with_init_value(outer_body.body.next_idx());
32 let start_new_bodies = bid_generator.next_id();
33 let Some(block) = outer_body.body.get_mut(block_id) else {
34 continue;
35 };
36 let mut new_blocks = vec![];
37 block.dyn_visit_in_body_mut(|op: &mut Operand| {
38 if let Operand::Const(c) = op
39 && let ConstantExprKind::Global(gref) = &mut c.kind
40 && let Some(inner_body) = anon_consts.get(&gref.id)
41 {
42 // We inline the required body by shifting its local ids and block ids
43 // and adding its blocks to the outer body. The inner body's return
44 // local becomes a normal local that we can read from. We redirect some
45 // gotos so that the inner body is executed before the current block.
46 let mut inner_body = inner_body.clone().substitute(&gref.generics);
47
48 // Shift all the body regions in the inner body.
49 inner_body.dyn_visit_mut(|r: &mut Region| {
50 if let Region::Body(v) = r {
51 *v += outer_body.bound_body_regions;
52 }
53 });
54 outer_body.bound_body_regions += inner_body.bound_body_regions;
55
56 // The init function of a global assumes the return place is live;
57 // this is not the case once we inline it
58 inner_body.body[0].statements.insert(
59 0,
60 Statement::new(
61 Span::dummy(),
62 StatementKind::StorageLive(LocalId::ZERO),
63 ),
64 );
65
66 let return_local = outer_body.locals.locals.next_idx();
67 inner_body.dyn_visit_in_body_mut(|l: &mut LocalId| {
68 *l += return_local;
69 });
70
71 let start_block = bid_generator.next_id();
72 bid_generator.advance(inner_body.body.len());
73 let end_block = bid_generator.next_id();
74 inner_body.dyn_visit_in_body_mut(|b: &mut BlockId| {
75 *b += start_block;
76 });
77 // Make all returns point to `end_block`. This block doesn't exist yet,
78 // it will either be the start block of another inner body, or the
79 // current outer block that we'll push at the end.
80 inner_body.body.dyn_visit_in_body_mut(|t: &mut Terminator| {
81 if let TerminatorKind::Return = t.kind {
82 t.kind = TerminatorKind::Goto { target: end_block };
83 }
84 });
85
86 outer_body
87 .locals
88 .locals
89 .extend(inner_body.locals.locals.into_iter());
90 new_blocks.extend(inner_body.body);
91 *op = Operand::Move(outer_body.locals.place_for_var(return_local));
92 }
93 });
94 if !new_blocks.is_empty() {
95 // Instead of the current block, start evaluating the new bodies.
96 let block = mem::replace(
97 block,
98 BlockData::new_goto(Span::dummy(), start_new_bodies),
99 );
100 // Add the new blocks. They've been set up so that each new inner body
101 // returns to what follows it in the sequence. Hence the last added body
102 // points to the not-yet-existing block at `start_new_bodies`
103 outer_body.body.extend(new_blocks.into_iter());
104 // Push the current block to be executed after the newly-added ones.
105 outer_body.body.push(block);
106 }
107 }
108 }
109 });
110 }
111}