Skip to content

Commit 49f77a0

Browse files
committed
Merge DSE and load-store passes into the same pass
1 parent 453aec9 commit 49f77a0

1 file changed

Lines changed: 17 additions & 100 deletions

File tree

zjit/src/hir.rs

Lines changed: 17 additions & 100 deletions
Original file line numberDiff line numberDiff line change
@@ -5154,6 +5154,19 @@ impl Function {
51545154
new_insns.push(insn_id);
51555155
}
51565156

5157+
5158+
// get the block
5159+
// load-store optimization
5160+
// reverse updated instructions
5161+
// DSE
5162+
// reverse final updated instructions
5163+
// return
5164+
//
5165+
// get the block
5166+
// load-store optimization, DSE* after (same iteration)
5167+
// instead of eliding the store, mark the earlier store for removal
5168+
// return
5169+
51575170
// After performing scalar replacement on loads and stores where
51585171
// applicable, we scan in reverse to find an eliminate dead stores
51595172
let mut reversed_insns = std::mem::take(&mut new_insns);
@@ -5181,11 +5194,10 @@ impl Function {
51815194
remove_aliasing(&mut compile_time_heap, offset);
51825195
},
51835196
insn => {
5184-
// TODO: Add test case for control flow potential bugs
5185-
if insn.effects_of().includes(Effect::read(abstract_heaps::Memory)) {
5186-
compile_time_heap.clear();
5187-
}
5188-
else if insn.effects_of().includes(effects::Control) {
5197+
let insn_reads_memory = insn.effects_of().includes(Effect::read(abstract_heaps::Memory));
5198+
let insn_uses_control_flow = insn.effects_of().includes(effects::Control);
5199+
// TODO: Once we update from extended basic blocks to normal basic blocks, we can remove the control flow check
5200+
if insn_reads_memory || insn_uses_control_flow {
51895201
compile_time_heap.clear();
51905202
}
51915203
}
@@ -5197,101 +5209,6 @@ impl Function {
51975209
}
51985210
}
51995211

5200-
// This pass assumes that load_store_optimization has already run
5201-
// This means that we will never have cases where loads and stores are
5202-
// repeated with the same offset and value.
5203-
// However, we still may have unnecessary stores. For the purposes of this
5204-
// pass, unnecessary means that a store to an offset is not utilized by
5205-
// other HIR instructions before a second store to the same offset occurs.
5206-
// Removing one of these two stores does not alter program behavior.
5207-
// TODO: Improve comments and clean up algorithm sketch
5208-
// fn eliminate_dead_stores(&mut self) {
5209-
// #[derive(PartialEq, Eq, Hash)]
5210-
// struct StoreHeap {
5211-
// // TODO: Is object a good name here? If not, what should it be instead?
5212-
// object: InsnId,
5213-
// offset: i32,
5214-
// }
5215-
5216-
// // This helper function is used while we don't have type based alias
5217-
// // analysis. Matching offsets could result in aliased objects that need
5218-
// // to be clear.
5219-
// fn prune_stores(map: &mut HashMap<StoreHeap, InsnId>, offset: i32) {
5220-
// map.retain(|StoreHeap { object: _, offset: off }, _| *off != offset);
5221-
// }
5222-
5223-
// for block in self.rpo() {
5224-
// // TODO: Probably convert this to a vec of tuples? Time complexity is worse but constant factors are better
5225-
// let mut active_stores: HashMap<StoreHeap, InsnId> = HashMap::new();
5226-
// let insns = std::mem::take(&mut self.blocks[block.0].insns);
5227-
// let mut new_insns = vec![];
5228-
// for insn_id in insns.iter().rev().copied() {
5229-
// match self.find(insn_id) {
5230-
// Insn::StoreField { recv, offset, .. } => {
5231-
// let key = StoreHeap { object: self.chase_insn(recv), offset };
5232-
// // If an older store is being tracked, then there have
5233-
// // been no usees between stores and the first store is
5234-
// // dead and should not be emitted.
5235-
// if active_stores.contains_key(&key) {
5236-
// continue
5237-
// }
5238-
// // Otherwise, we have a new store to begin tracking.
5239-
// // Because we don't have type based alias analysis, we
5240-
// // need to remove all other stores with the same offset
5241-
// // before adding our own.
5242-
// else {
5243-
// prune_stores(&mut active_stores, offset);
5244-
// active_stores.insert(key, insn_id);
5245-
// }
5246-
// }
5247-
// // If a load is found, then any earlier store was
5248-
// // necessary. We know this because redundant loads and
5249-
// // stores were already eliminated, implying the
5250-
// // hypothetical StoreField we haven't found yet has a
5251-
// // different value than the one in active_stores.
5252-
// // The following lines are required when we have type
5253-
// // based alias analysis and the following call to prune
5254-
// // stores is no longer used.
5255-
// // Insn::LoadField { recv, offset, .. } => {
5256-
// // let key = StoreHeap { object: self.chase_insn(recv), offset };
5257-
// // active_stores.remove(&key);
5258-
// Insn::LoadField{ offset, .. } => {
5259-
// // Because we don't have type based alias analysis, we
5260-
// // can't just remove the (up to) one entry of the
5261-
// // StoreHeap key in active_stores. We need to remove all
5262-
// // such keys that could alias.
5263-
// prune_stores(&mut active_stores, offset);
5264-
// }
5265-
// Insn::WriteBarrier { .. } => {
5266-
// // Currently, WriteBarrier write effects are Allocator and Memory when we'd really like them to be flags.
5267-
// // We don't use LoadField for mark bits so we can ignore them for now.
5268-
// // But flags does not exist in our effects abstract heap modeling and we don't want to add special casing to effects.
5269-
// // This special casing in this pass here should be removed once we refine our effects system to provide greater granularity for WriteBarrier.
5270-
// // TODO: use TBAA
5271-
// let offset = RUBY_OFFSET_RBASIC_FLAGS;
5272-
// prune_stores(&mut active_stores, offset);
5273-
// },
5274-
// insn => {
5275-
// // If the instruction can read memory, we cannot assume
5276-
// // entries of active_stores are not loaded. This is because we use
5277-
// // extended basic blocks and we should probably change this to be
5278-
// // standard basic blocks.
5279-
// // TODO: Add a test for this case
5280-
// if insn.effects_of().includes(Effect::read(abstract_heaps::Memory)) {
5281-
// active_stores.clear();
5282-
// }
5283-
// else if insn.effects_of().includes(effects::Control) {
5284-
// active_stores.clear();
5285-
// }
5286-
// }
5287-
// }
5288-
// new_insns.push(insn_id);
5289-
// }
5290-
// new_insns.reverse();
5291-
// self.blocks[block.0].insns = new_insns;
5292-
// }
5293-
// }
5294-
52955212
/// Fold a binary operator on fixnums.
52965213
fn fold_fixnum_bop(&mut self, insn_id: InsnId, left: InsnId, right: InsnId, f: impl FnOnce(Option<i64>, Option<i64>) -> Option<i64>) -> InsnId {
52975214
f(self.type_of(left).fixnum_value(), self.type_of(right).fixnum_value())

0 commit comments

Comments
 (0)