10const Def* insert_ret(
const Def* def,
const Def* ret) {
11 auto new_ops =
DefVec(def->num_projs() + 1, [&](
auto i) { return (i == def->num_projs()) ? ret : *def->proj(i); });
12 auto&
w = def->world();
13 return def->is_term() ?
w.tuple(new_ops) :
w.sigma(new_ops);
21 for (
auto mut :
world().copy_externals()) rewrite(mut);
22 while (!worklist_.empty()) {
23 auto [lvm, lcm, lam] = worklist_.front();
27 world().DLOG(
"in {} (lvm={}, lcm={})", lam, lvm_, lcm_);
29 auto new_f = rewrite(lam->filter());
30 auto new_b = rewrite(lam->body());
31 lam->reset({new_f, new_b});
35 for (
auto lam : new_externals_) lam->make_external();
38Lam* LowerTypedClos::make_stub(
Lam* lam,
enum Mode mode,
bool adjust_bb_type) {
39 assert(lam &&
"make_stub: not a lam");
40 if (
auto i = old2new_.find(lam); i != old2new_.end() && i->second->isa_mut<
Lam>())
return i->second->as_mut<
Lam>();
42 auto new_dom = w.sigma(
DefVec(lam->num_doms(), [&](
auto i) ->
const Def* {
43 auto new_dom = rewrite(lam->dom(i));
44 if (i == Clos_Env_Param) {
48 return world().call<mem::Ptr0>(new_dom);
52 if (
Lam::isa_basicblock(lam) && adjust_bb_type) new_dom = insert_ret(new_dom, dummy_ret_->type());
53 auto new_type = w.cn(new_dom);
54 auto new_lam = lam->
stub(new_type);
55 w.DLOG(
"stub {} ~> {}", lam, new_lam);
59 new_externals_.emplace_back(new_lam);
66 lcm = w.extract(env_mem, 0_u64)->set(
"mem");
67 env = w.extract(env_mem, 1_u64)->set(
"closure_env");
68 }
else if (mode == Unbox) {
71 auto new_args =
w.tuple(
DefVec(lam->num_doms(), [&](
auto i) {
72 return (i == Clos_Env_Param) ? env : (lam->var(i) == mem::mem_var(lam)) ? lcm : *new_lam->var(i);
74 assert(new_args->num_projs() == lam->num_doms());
75 assert(lam->num_doms() <= new_lam->num_doms());
76 map(lam->
var(), new_args);
78 return map(lam, new_lam)->as<
Lam>();
81const Def* LowerTypedClos::rewrite(
const Def* def) {
82 switch (def->node()) {
92 if (
auto i = old2new_.find(def); i != old2new_.end())
return i->second;
93 if (
auto var = def->isa<
Var>();
94 var && var->mut()->isa_mut<Lam>())
95 assert(
false &&
"Lam vars should appear in a map!");
97 auto new_type = rewrite(def->type());
100 auto pi = rewrite(ct->op(1))->as<
Pi>();
102 auto env_type = rewrite(ct->op(2));
103 return map(def,
w.sigma({pi, env_type}));
104 }
else if (
auto proj = def->isa<Extract>()) {
105 auto tuple = proj->tuple();
108 assert(idx && idx <= 2 &&
"unknown proj from closure tuple");
110 return map(def, env_type());
112 return map(def, rewrite(tuple)->proj(*idx - 1));
113 }
else if (
auto var = tuple->isa<
Var>(); var &&
isa_clos_type(var->mut())) {
114 assert(
false &&
"proj fst type form closure type");
119 auto env = rewrite(
c.env());
121 const Def* fn = make_stub(
c.fnc_as_lam(), mode,
true);
122 if (env->
type() ==
w.sigma()) {
124 env =
w.bot(env_type());
127 auto mem =
w.extract(mem_ptr, 0_u64);
128 auto env_ptr = mem_ptr->proj(1_u64);
135 return map(def,
w.tuple({fn, env}));
136 }
else if (
auto lam = def->isa_mut<Lam>()) {
137 return make_stub(lam, No_Env,
false);
138 }
else if (
auto mut = def->isa_mut()) {
140 auto new_mut = mut->stub(new_type);
142 for (
size_t i = 0; i < mut->num_ops(); i++)
143 if (mut->op(i)) new_mut->set(i, rewrite(mut->op(i)));
145 if (
auto imm = new_mut->immutabilize())
return map(mut, imm);
147 }
else if (def->isa<
Axiom>()) {
150 auto new_ops =
DefVec(def->num_ops(), [&](
auto i) { return rewrite(def->op(i)); });
152 if (
auto app = def->isa<App>()) {
154 if (
auto p = app->callee()->isa<Extract>();
156 new_ops[1] = insert_ret(new_ops[1], dummy_ret_);
159 auto new_def = def->rebuild(new_type, new_ops);
170 for (
size_t i = 0; i < new_def->num_ops(); i++)
171 if (new_def->op(i)->type() ==
w.annex<
mem::M>()) new_def = new_def->refine(i, lcm_);
173 if (new_type ==
w.annex<
mem::M>()) {
176 }
else if (new_type->isa<Sigma>()) {
177 for (
size_t i = 0; i < new_type->num_ops(); i++) {
178 if (new_type->op(i) ==
w.annex<
mem::M>()) {
179 lcm_ =
w.extract(new_def, i);
180 lvm_ =
w.extract(def, i);
186 return map(def, new_def);
static bool alpha(Ref d1, Ref d2)
bool is_set() const
Yields true if empty or the last op is set.
Ref type() const noexcept
Yields the raw type of this Def, i.e. maybe nullptr.
Ref var(nat_t a, nat_t i) noexcept
static const Lam * isa_basicblock(Ref d)
static std::optional< T > isa(Ref def)
static const Pi * isa_basicblock(Ref d)
Is this a continuation (Pi::isa_cn) that is not Pi::isa_returning?
This is a thin wrapper for std::span<T, N> with the following additional features:
void start() override
Actual entry.
ClosLit isa_clos_lit(Ref def, bool fn_isa_lam=true)
Tries to match a closure literal.
static constexpr size_t Clos_Env_Param
Describes where the environment is placed in the argument list.
const Sigma * isa_clos_type(Ref def)
Ref op_alloc(Ref type, Ref mem)
Ref mem_var(Lam *lam)
Returns the memory argument of a function if it has one.
Ref op_slot(Ref type, Ref mem)
Vector< const Def * > DefVec