@@ -5205,92 +5205,92 @@ impl Function {
52055205 // other HIR instructions before a second store to the same offset occurs.
52065206 // Removing one of these two stores does not alter program behavior.
52075207 // TODO: Improve comments and clean up algorithm sketch
5208- fn eliminate_dead_stores ( & mut self ) {
5209- #[ derive( PartialEq , Eq , Hash ) ]
5210- struct StoreHeap {
5211- // TODO: Is object a good name here? If not, what should it be instead?
5212- object : InsnId ,
5213- offset : i32 ,
5214- }
5215-
5216- // This helper function is used while we don't have type based alias
5217- // analysis. Matching offsets could result in aliased objects that need
5218- // to be clear.
5219- fn prune_stores ( map : & mut HashMap < StoreHeap , InsnId > , offset : i32 ) {
5220- map. retain ( |StoreHeap { object : _, offset : off } , _| * off != offset) ;
5221- }
5222-
5223- for block in self . rpo ( ) {
5224- // TODO: Probably convert this to a vec of tuples? Time complexity is worse but constant factors are better
5225- let mut active_stores: HashMap < StoreHeap , InsnId > = HashMap :: new ( ) ;
5226- let insns = std:: mem:: take ( & mut self . blocks [ block. 0 ] . insns ) ;
5227- let mut new_insns = vec ! [ ] ;
5228- for insn_id in insns. iter ( ) . rev ( ) . copied ( ) {
5229- match self . find ( insn_id) {
5230- Insn :: StoreField { recv, offset, .. } => {
5231- let key = StoreHeap { object : self . chase_insn ( recv) , offset } ;
5232- // If an older store is being tracked, then there have
5233- // been no usees between stores and the first store is
5234- // dead and should not be emitted.
5235- if active_stores. contains_key ( & key) {
5236- continue
5237- }
5238- // Otherwise, we have a new store to begin tracking.
5239- // Because we don't have type based alias analysis, we
5240- // need to remove all other stores with the same offset
5241- // before adding our own.
5242- else {
5243- prune_stores ( & mut active_stores, offset) ;
5244- active_stores. insert ( key, insn_id) ;
5245- }
5246- }
5247- // If a load is found, then any earlier store was
5248- // necessary. We know this because redundant loads and
5249- // stores were already eliminated, implying the
5250- // hypothetical StoreField we haven't found yet has a
5251- // different value than the one in active_stores.
5252- // The following lines are required when we have type
5253- // based alias analysis and the following call to prune
5254- // stores is no longer used.
5255- // Insn::LoadField { recv, offset, .. } => {
5256- // let key = StoreHeap { object: self.chase_insn(recv), offset };
5257- // active_stores.remove(&key);
5258- Insn :: LoadField { offset, .. } => {
5259- // Because we don't have type based alias analysis, we
5260- // can't just remove the (up to) one entry of the
5261- // StoreHeap key in active_stores. We need to remove all
5262- // such keys that could alias.
5263- prune_stores ( & mut active_stores, offset) ;
5264- }
5265- Insn :: WriteBarrier { .. } => {
5266- // Currently, WriteBarrier write effects are Allocator and Memory when we'd really like them to be flags.
5267- // We don't use LoadField for mark bits so we can ignore them for now.
5268- // But flags does not exist in our effects abstract heap modeling and we don't want to add special casing to effects.
5269- // This special casing in this pass here should be removed once we refine our effects system to provide greater granularity for WriteBarrier.
5270- // TODO: use TBAA
5271- let offset = RUBY_OFFSET_RBASIC_FLAGS ;
5272- prune_stores ( & mut active_stores, offset) ;
5273- } ,
5274- insn => {
5275- // If the instruction can read memory, we cannot assume
5276- // entries of active_stores are not loaded. This is because we use
5277- // extended basic blocks and we should probably change this to be
5278- // standard basic blocks.
5279- // TODO: Add a test for this case
5280- if insn. effects_of ( ) . includes ( Effect :: read ( abstract_heaps:: Memory ) ) {
5281- active_stores. clear ( ) ;
5282- }
5283- else if insn. effects_of ( ) . includes ( effects:: Control ) {
5284- active_stores. clear ( ) ;
5285- }
5286- }
5287- }
5288- new_insns. push ( insn_id) ;
5289- }
5290- new_insns. reverse ( ) ;
5291- self . blocks [ block. 0 ] . insns = new_insns;
5292- }
5293- }
5208+ // fn eliminate_dead_stores(&mut self) {
5209+ // #[derive(PartialEq, Eq, Hash)]
5210+ // struct StoreHeap {
5211+ // // TODO: Is object a good name here? If not, what should it be instead?
5212+ // object: InsnId,
5213+ // offset: i32,
5214+ // }
5215+
5216+ // // This helper function is used while we don't have type based alias
5217+ // // analysis. Matching offsets could result in aliased objects that need
5218+ // // to be clear.
5219+ // fn prune_stores(map: &mut HashMap<StoreHeap, InsnId>, offset: i32) {
5220+ // map.retain(|StoreHeap { object: _, offset: off }, _| *off != offset);
5221+ // }
5222+
5223+ // for block in self.rpo() {
5224+ // // TODO: Probably convert this to a vec of tuples? Time complexity is worse but constant factors are better
5225+ // let mut active_stores: HashMap<StoreHeap, InsnId> = HashMap::new();
5226+ // let insns = std::mem::take(&mut self.blocks[block.0].insns);
5227+ // let mut new_insns = vec![];
5228+ // for insn_id in insns.iter().rev().copied() {
5229+ // match self.find(insn_id) {
5230+ // Insn::StoreField { recv, offset, .. } => {
5231+ // let key = StoreHeap { object: self.chase_insn(recv), offset };
5232+ // // If an older store is being tracked, then there have
5233+ // // been no usees between stores and the first store is
5234+ // // dead and should not be emitted.
5235+ // if active_stores.contains_key(&key) {
5236+ // continue
5237+ // }
5238+ // // Otherwise, we have a new store to begin tracking.
5239+ // // Because we don't have type based alias analysis, we
5240+ // // need to remove all other stores with the same offset
5241+ // // before adding our own.
5242+ // else {
5243+ // prune_stores(&mut active_stores, offset);
5244+ // active_stores.insert(key, insn_id);
5245+ // }
5246+ // }
5247+ // // If a load is found, then any earlier store was
5248+ // // necessary. We know this because redundant loads and
5249+ // // stores were already eliminated, implying the
5250+ // // hypothetical StoreField we haven't found yet has a
5251+ // // different value than the one in active_stores.
5252+ // // The following lines are required when we have type
5253+ // // based alias analysis and the following call to prune
5254+ // // stores is no longer used.
5255+ // // Insn::LoadField { recv, offset, .. } => {
5256+ // // let key = StoreHeap { object: self.chase_insn(recv), offset };
5257+ // // active_stores.remove(&key);
5258+ // Insn::LoadField{ offset, .. } => {
5259+ // // Because we don't have type based alias analysis, we
5260+ // // can't just remove the (up to) one entry of the
5261+ // // StoreHeap key in active_stores. We need to remove all
5262+ // // such keys that could alias.
5263+ // prune_stores(&mut active_stores, offset);
5264+ // }
5265+ // Insn::WriteBarrier { .. } => {
5266+ // // Currently, WriteBarrier write effects are Allocator and Memory when we'd really like them to be flags.
5267+ // // We don't use LoadField for mark bits so we can ignore them for now.
5268+ // // But flags does not exist in our effects abstract heap modeling and we don't want to add special casing to effects.
5269+ // // This special casing in this pass here should be removed once we refine our effects system to provide greater granularity for WriteBarrier.
5270+ // // TODO: use TBAA
5271+ // let offset = RUBY_OFFSET_RBASIC_FLAGS;
5272+ // prune_stores(&mut active_stores, offset);
5273+ // },
5274+ // insn => {
5275+ // // If the instruction can read memory, we cannot assume
5276+ // // entries of active_stores are not loaded. This is because we use
5277+ // // extended basic blocks and we should probably change this to be
5278+ // // standard basic blocks.
5279+ // // TODO: Add a test for this case
5280+ // if insn.effects_of().includes(Effect::read(abstract_heaps::Memory)) {
5281+ // active_stores.clear();
5282+ // }
5283+ // else if insn.effects_of().includes(effects::Control) {
5284+ // active_stores.clear();
5285+ // }
5286+ // }
5287+ // }
5288+ // new_insns.push(insn_id);
5289+ // }
5290+ // new_insns.reverse();
5291+ // self.blocks[block.0].insns = new_insns;
5292+ // }
5293+ // }
52945294
52955295 /// Fold a binary operator on fixnums.
52965296 fn fold_fixnum_bop ( & mut self , insn_id : InsnId , left : InsnId , right : InsnId , f : impl FnOnce ( Option < i64 > , Option < i64 > ) -> Option < i64 > ) -> InsnId {
@@ -5961,7 +5961,6 @@ impl Function {
59615961 ( convert_no_profile_sends) => { Counter :: compile_hir_strength_reduce_time_ns } ;
59625962 // End strength reduction bucket
59635963 ( optimize_load_store) => { Counter :: compile_hir_optimize_load_store_time_ns } ;
5964- ( eliminate_dead_stores) => { Counter :: compile_hir_eliminate_dead_stores_time_ns } ;
59655964 ( fold_constants) => { Counter :: compile_hir_fold_constants_time_ns } ;
59665965 ( clean_cfg) => { Counter :: compile_hir_clean_cfg_time_ns } ;
59675966 ( remove_redundant_patch_points) => { Counter :: compile_hir_remove_redundant_patch_points_time_ns } ;
@@ -5996,7 +5995,6 @@ impl Function {
59965995 run_pass ! ( optimize_c_calls) ;
59975996 run_pass ! ( convert_no_profile_sends) ;
59985997 run_pass ! ( optimize_load_store) ;
5999- // run_pass!(eliminate_dead_stores);
60005998 run_pass ! ( fold_constants) ;
60015999 run_pass ! ( clean_cfg) ;
60026000 run_pass ! ( remove_redundant_patch_points) ;
0 commit comments