@@ -5101,92 +5101,92 @@ impl Function {
51015101 // other HIR instructions before a second store to the same offset occurs.
51025102 // Removing one of these two stores does not alter program behavior.
51035103 // TODO: Improve comments and clean up algorithm sketch
5104- fn eliminate_dead_stores ( & mut self ) {
5105- #[ derive( PartialEq , Eq , Hash ) ]
5106- struct StoreHeap {
5107- // TODO: Is object a good name here? If not, what should it be instead?
5108- object : InsnId ,
5109- offset : i32 ,
5110- }
5111-
5112- // This helper function is used while we don't have type based alias
5113- // analysis. Matching offsets could result in aliased objects that need
5114- // to be clear.
5115- fn prune_stores ( map : & mut HashMap < StoreHeap , InsnId > , offset : i32 ) {
5116- map. retain ( |StoreHeap { object : _, offset : off } , _| * off != offset) ;
5117- }
5118-
5119- for block in self . rpo ( ) {
5120- // TODO: Probably convert this to a vec of tuples? Time complexity is worse but constant factors are better
5121- let mut active_stores: HashMap < StoreHeap , InsnId > = HashMap :: new ( ) ;
5122- let insns = std:: mem:: take ( & mut self . blocks [ block. 0 ] . insns ) ;
5123- let mut new_insns = vec ! [ ] ;
5124- for insn_id in insns. iter ( ) . rev ( ) . copied ( ) {
5125- match self . find ( insn_id) {
5126- Insn :: StoreField { recv, offset, .. } => {
5127- let key = StoreHeap { object : self . chase_insn ( recv) , offset } ;
5128- // If an older store is being tracked, then there have
5129- // been no usees between stores and the first store is
5130- // dead and should not be emitted.
5131- if active_stores. contains_key ( & key) {
5132- continue
5133- }
5134- // Otherwise, we have a new store to begin tracking.
5135- // Because we don't have type based alias analysis, we
5136- // need to remove all other stores with the same offset
5137- // before adding our own.
5138- else {
5139- prune_stores ( & mut active_stores, offset) ;
5140- active_stores. insert ( key, insn_id) ;
5141- }
5142- }
5143- // If a load is found, then any earlier store was
5144- // necessary. We know this because redundant loads and
5145- // stores were already eliminated, implying the
5146- // hypothetical StoreField we haven't found yet has a
5147- // different value than the one in active_stores.
5148- // The following lines are required when we have type
5149- // based alias analysis and the following call to prune
5150- // stores is no longer used.
5151- // Insn::LoadField { recv, offset, .. } => {
5152- // let key = StoreHeap { object: self.chase_insn(recv), offset };
5153- // active_stores.remove(&key);
5154- Insn :: LoadField { offset, .. } => {
5155- // Because we don't have type based alias analysis, we
5156- // can't just remove the (up to) one entry of the
5157- // StoreHeap key in active_stores. We need to remove all
5158- // such keys that could alias.
5159- prune_stores ( & mut active_stores, offset) ;
5160- }
5161- Insn :: WriteBarrier { .. } => {
5162- // Currently, WriteBarrier write effects are Allocator and Memory when we'd really like them to be flags.
5163- // We don't use LoadField for mark bits so we can ignore them for now.
5164- // But flags does not exist in our effects abstract heap modeling and we don't want to add special casing to effects.
5165- // This special casing in this pass here should be removed once we refine our effects system to provide greater granularity for WriteBarrier.
5166- // TODO: use TBAA
5167- let offset = RUBY_OFFSET_RBASIC_FLAGS ;
5168- prune_stores ( & mut active_stores, offset) ;
5169- } ,
5170- insn => {
5171- // If the instruction can read memory, we cannot assume
5172- // entries of active_stores are not loaded. This is because we use
5173- // extended basic blocks and we should probably change this to be
5174- // standard basic blocks.
5175- // TODO: Add a test for this case
5176- if insn. effects_of ( ) . includes ( Effect :: read ( abstract_heaps:: Memory ) ) {
5177- active_stores. clear ( ) ;
5178- }
5179- else if insn. effects_of ( ) . includes ( effects:: Control ) {
5180- active_stores. clear ( ) ;
5181- }
5182- }
5183- }
5184- new_insns. push ( insn_id) ;
5185- }
5186- new_insns. reverse ( ) ;
5187- self . blocks [ block. 0 ] . insns = new_insns;
5188- }
5189- }
5104+ // fn eliminate_dead_stores(&mut self) {
5105+ // #[derive(PartialEq, Eq, Hash)]
5106+ // struct StoreHeap {
5107+ // // TODO: Is object a good name here? If not, what should it be instead?
5108+ // object: InsnId,
5109+ // offset: i32,
5110+ // }
5111+
5112+ // // This helper function is used while we don't have type based alias
5113+ // // analysis. Matching offsets could result in aliased objects that need
5114+ // // to be clear.
5115+ // fn prune_stores(map: &mut HashMap<StoreHeap, InsnId>, offset: i32) {
5116+ // map.retain(|StoreHeap { object: _, offset: off }, _| *off != offset);
5117+ // }
5118+
5119+ // for block in self.rpo() {
5120+ // // TODO: Probably convert this to a vec of tuples? Time complexity is worse but constant factors are better
5121+ // let mut active_stores: HashMap<StoreHeap, InsnId> = HashMap::new();
5122+ // let insns = std::mem::take(&mut self.blocks[block.0].insns);
5123+ // let mut new_insns = vec![];
5124+ // for insn_id in insns.iter().rev().copied() {
5125+ // match self.find(insn_id) {
5126+ // Insn::StoreField { recv, offset, .. } => {
5127+ // let key = StoreHeap { object: self.chase_insn(recv), offset };
5128+ // // If an older store is being tracked, then there have
5129+ // // been no usees between stores and the first store is
5130+ // // dead and should not be emitted.
5131+ // if active_stores.contains_key(&key) {
5132+ // continue
5133+ // }
5134+ // // Otherwise, we have a new store to begin tracking.
5135+ // // Because we don't have type based alias analysis, we
5136+ // // need to remove all other stores with the same offset
5137+ // // before adding our own.
5138+ // else {
5139+ // prune_stores(&mut active_stores, offset);
5140+ // active_stores.insert(key, insn_id);
5141+ // }
5142+ // }
5143+ // // If a load is found, then any earlier store was
5144+ // // necessary. We know this because redundant loads and
5145+ // // stores were already eliminated, implying the
5146+ // // hypothetical StoreField we haven't found yet has a
5147+ // // different value than the one in active_stores.
5148+ // // The following lines are required when we have type
5149+ // // based alias analysis and the following call to prune
5150+ // // stores is no longer used.
5151+ // // Insn::LoadField { recv, offset, .. } => {
5152+ // // let key = StoreHeap { object: self.chase_insn(recv), offset };
5153+ // // active_stores.remove(&key);
5154+ // Insn::LoadField{ offset, .. } => {
5155+ // // Because we don't have type based alias analysis, we
5156+ // // can't just remove the (up to) one entry of the
5157+ // // StoreHeap key in active_stores. We need to remove all
5158+ // // such keys that could alias.
5159+ // prune_stores(&mut active_stores, offset);
5160+ // }
5161+ // Insn::WriteBarrier { .. } => {
5162+ // // Currently, WriteBarrier write effects are Allocator and Memory when we'd really like them to be flags.
5163+ // // We don't use LoadField for mark bits so we can ignore them for now.
5164+ // // But flags does not exist in our effects abstract heap modeling and we don't want to add special casing to effects.
5165+ // // This special casing in this pass here should be removed once we refine our effects system to provide greater granularity for WriteBarrier.
5166+ // // TODO: use TBAA
5167+ // let offset = RUBY_OFFSET_RBASIC_FLAGS;
5168+ // prune_stores(&mut active_stores, offset);
5169+ // },
5170+ // insn => {
5171+ // // If the instruction can read memory, we cannot assume
5172+ // // entries of active_stores are not loaded. This is because we use
5173+ // // extended basic blocks and we should probably change this to be
5174+ // // standard basic blocks.
5175+ // // TODO: Add a test for this case
5176+ // if insn.effects_of().includes(Effect::read(abstract_heaps::Memory)) {
5177+ // active_stores.clear();
5178+ // }
5179+ // else if insn.effects_of().includes(effects::Control) {
5180+ // active_stores.clear();
5181+ // }
5182+ // }
5183+ // }
5184+ // new_insns.push(insn_id);
5185+ // }
5186+ // new_insns.reverse();
5187+ // self.blocks[block.0].insns = new_insns;
5188+ // }
5189+ // }
51905190
51915191 /// Fold a binary operator on fixnums.
51925192 fn fold_fixnum_bop ( & mut self , insn_id : InsnId , left : InsnId , right : InsnId , f : impl FnOnce ( Option < i64 > , Option < i64 > ) -> Option < i64 > ) -> InsnId {
@@ -5827,7 +5827,6 @@ impl Function {
58275827 ( optimize_c_calls) => { Counter :: compile_hir_strength_reduce_time_ns } ;
58285828 // End strength reduction bucket
58295829 ( optimize_load_store) => { Counter :: compile_hir_optimize_load_store_time_ns } ;
5830- ( eliminate_dead_stores) => { Counter :: compile_hir_eliminate_dead_stores_time_ns } ;
58315830 ( fold_constants) => { Counter :: compile_hir_fold_constants_time_ns } ;
58325831 ( clean_cfg) => { Counter :: compile_hir_clean_cfg_time_ns } ;
58335832 ( remove_redundant_patch_points) => { Counter :: compile_hir_remove_redundant_patch_points_time_ns } ;
@@ -5859,7 +5858,6 @@ impl Function {
58595858 run_pass ! ( optimize_getivar) ;
58605859 run_pass ! ( optimize_c_calls) ;
58615860 run_pass ! ( optimize_load_store) ;
5862- // run_pass!(eliminate_dead_stores);
58635861 run_pass ! ( fold_constants) ;
58645862 run_pass ! ( clean_cfg) ;
58655863 run_pass ! ( remove_redundant_patch_points) ;
0 commit comments