Home | History | Annotate | Line # | Download | only in gcc
      1  1.1  mrg /* Generic partial redundancy elimination with lazy code motion support.
      2  1.1  mrg    Copyright (C) 1998-2022 Free Software Foundation, Inc.
      3  1.1  mrg 
      4  1.1  mrg This file is part of GCC.
      5  1.1  mrg 
      6  1.1  mrg GCC is free software; you can redistribute it and/or modify it under
      7  1.1  mrg the terms of the GNU General Public License as published by the Free
      8  1.1  mrg Software Foundation; either version 3, or (at your option) any later
      9  1.1  mrg version.
     10  1.1  mrg 
     11  1.1  mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     12  1.1  mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
     13  1.1  mrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     14  1.1  mrg for more details.
     15  1.1  mrg 
     16  1.1  mrg You should have received a copy of the GNU General Public License
     17  1.1  mrg along with GCC; see the file COPYING3.  If not see
     18  1.1  mrg <http://www.gnu.org/licenses/>.  */
     19  1.1  mrg 
     20  1.1  mrg /* These routines are meant to be used by various optimization
     21  1.1  mrg    passes which can be modeled as lazy code motion problems.
     22  1.1  mrg    Including, but not limited to:
     23  1.1  mrg 
     24  1.1  mrg 	* Traditional partial redundancy elimination.
     25  1.1  mrg 
     26  1.1  mrg 	* Placement of caller/caller register save/restores.
     27  1.1  mrg 
     28  1.1  mrg 	* Load/store motion.
     29  1.1  mrg 
     30  1.1  mrg 	* Copy motion.
     31  1.1  mrg 
     32  1.1  mrg 	* Conversion of flat register files to a stacked register
     33  1.1  mrg 	model.
     34  1.1  mrg 
     35  1.1  mrg 	* Dead load/store elimination.
     36  1.1  mrg 
     37  1.1  mrg   These routines accept as input:
     38  1.1  mrg 
     39  1.1  mrg 	* Basic block information (number of blocks, lists of
     40  1.1  mrg 	predecessors and successors).  Note the granularity
     41  1.1  mrg 	does not need to be basic block, they could be statements
     42  1.1  mrg 	or functions.
     43  1.1  mrg 
     44  1.1  mrg 	* Bitmaps of local properties (computed, transparent and
     45  1.1  mrg 	anticipatable expressions).
     46  1.1  mrg 
     47  1.1  mrg   The output of these routines is bitmap of redundant computations
     48  1.1  mrg   and a bitmap of optimal placement points.  */
     49  1.1  mrg 
     50  1.1  mrg 
     51  1.1  mrg #include "config.h"
     52  1.1  mrg #include "system.h"
     53  1.1  mrg #include "coretypes.h"
     54  1.1  mrg #include "backend.h"
     55  1.1  mrg #include "cfganal.h"
     56  1.1  mrg #include "lcm.h"
     57  1.1  mrg 
     58  1.1  mrg /* Edge based LCM routines.  */
     59  1.1  mrg static void compute_antinout_edge (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
     60  1.1  mrg static void compute_earliest (struct edge_list *, int, sbitmap *, sbitmap *,
     61  1.1  mrg 			      sbitmap *, sbitmap *, sbitmap *);
     62  1.1  mrg static void compute_laterin (struct edge_list *, sbitmap *, sbitmap *,
     63  1.1  mrg 			     sbitmap *, sbitmap *);
     64  1.1  mrg static void compute_insert_delete (struct edge_list *edge_list, sbitmap *,
     65  1.1  mrg 				   sbitmap *, sbitmap *, sbitmap *, sbitmap *);
     66  1.1  mrg 
     67  1.1  mrg /* Edge based LCM routines on a reverse flowgraph.  */
     68  1.1  mrg static void compute_farthest (struct edge_list *, int, sbitmap *, sbitmap *,
     69  1.1  mrg 			      sbitmap*, sbitmap *, sbitmap *);
     70  1.1  mrg static void compute_nearerout (struct edge_list *, sbitmap *, sbitmap *,
     71  1.1  mrg 			       sbitmap *, sbitmap *);
     72  1.1  mrg static void compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *,
     73  1.1  mrg 				       sbitmap *, sbitmap *, sbitmap *,
     74  1.1  mrg 				       sbitmap *);
     75  1.1  mrg 
     76  1.1  mrg /* Edge based lcm routines.  */
     78  1.1  mrg 
     79  1.1  mrg /* Compute expression anticipatability at entrance and exit of each block.
     80  1.1  mrg    This is done based on the flow graph, and not on the pred-succ lists.
     81  1.1  mrg    Other than that, its pretty much identical to compute_antinout.  */
     82  1.1  mrg 
     83  1.1  mrg static void
     84  1.1  mrg compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
     85  1.1  mrg 		       sbitmap *antout)
     86  1.1  mrg {
     87  1.1  mrg   basic_block bb;
     88  1.1  mrg   edge e;
     89  1.1  mrg   basic_block *worklist, *qin, *qout, *qend;
     90  1.1  mrg   unsigned int qlen;
     91  1.1  mrg   edge_iterator ei;
     92  1.1  mrg 
     93  1.1  mrg   /* Allocate a worklist array/queue.  Entries are only added to the
     94  1.1  mrg      list if they were not already on the list.  So the size is
     95  1.1  mrg      bounded by the number of basic blocks.  */
     96  1.1  mrg   qin = qout = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
     97  1.1  mrg 
     98  1.1  mrg   /* We want a maximal solution, so make an optimistic initialization of
     99  1.1  mrg      ANTIN.  */
    100  1.1  mrg   bitmap_vector_ones (antin, last_basic_block_for_fn (cfun));
    101  1.1  mrg 
    102  1.1  mrg   /* Put every block on the worklist; this is necessary because of the
    103  1.1  mrg      optimistic initialization of ANTIN above.  */
    104  1.1  mrg   int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
    105  1.1  mrg   int postorder_num = post_order_compute (postorder, false, false);
    106  1.1  mrg   for (int i = 0; i < postorder_num; ++i)
    107  1.1  mrg     {
    108  1.1  mrg       bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
    109  1.1  mrg       *qin++ = bb;
    110  1.1  mrg       bb->aux = bb;
    111  1.1  mrg     }
    112  1.1  mrg   free (postorder);
    113  1.1  mrg 
    114  1.1  mrg   qin = worklist;
    115  1.1  mrg   qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
    116  1.1  mrg   qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
    117  1.1  mrg 
    118  1.1  mrg   /* Mark blocks which are predecessors of the exit block so that we
    119  1.1  mrg      can easily identify them below.  */
    120  1.1  mrg   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    121  1.1  mrg     e->src->aux = EXIT_BLOCK_PTR_FOR_FN (cfun);
    122  1.1  mrg 
    123  1.1  mrg   /* Iterate until the worklist is empty.  */
    124  1.1  mrg   while (qlen)
    125  1.1  mrg     {
    126  1.1  mrg       /* Take the first entry off the worklist.  */
    127  1.1  mrg       bb = *qout++;
    128  1.1  mrg       qlen--;
    129  1.1  mrg 
    130  1.1  mrg       if (qout >= qend)
    131  1.1  mrg 	qout = worklist;
    132  1.1  mrg 
    133  1.1  mrg       if (bb->aux == EXIT_BLOCK_PTR_FOR_FN (cfun))
    134  1.1  mrg 	/* Do not clear the aux field for blocks which are predecessors of
    135  1.1  mrg 	   the EXIT block.  That way we never add then to the worklist
    136  1.1  mrg 	   again.  */
    137  1.1  mrg 	bitmap_clear (antout[bb->index]);
    138  1.1  mrg       else
    139  1.1  mrg 	{
    140  1.1  mrg 	  /* Clear the aux field of this block so that it can be added to
    141  1.1  mrg 	     the worklist again if necessary.  */
    142  1.1  mrg 	  bb->aux = NULL;
    143  1.1  mrg 	  bitmap_intersection_of_succs (antout[bb->index], antin, bb);
    144  1.1  mrg 	}
    145  1.1  mrg 
    146  1.1  mrg       if (bitmap_or_and (antin[bb->index], antloc[bb->index],
    147  1.1  mrg 				   transp[bb->index], antout[bb->index]))
    148  1.1  mrg 	/* If the in state of this block changed, then we need
    149  1.1  mrg 	   to add the predecessors of this block to the worklist
    150  1.1  mrg 	   if they are not already on the worklist.  */
    151  1.1  mrg 	FOR_EACH_EDGE (e, ei, bb->preds)
    152  1.1  mrg 	  if (!e->src->aux && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
    153  1.1  mrg 	    {
    154  1.1  mrg 	      *qin++ = e->src;
    155  1.1  mrg 	      e->src->aux = e;
    156  1.1  mrg 	      qlen++;
    157  1.1  mrg 	      if (qin >= qend)
    158  1.1  mrg 		qin = worklist;
    159  1.1  mrg 	    }
    160  1.1  mrg     }
    161  1.1  mrg 
    162  1.1  mrg   clear_aux_for_edges ();
    163  1.1  mrg   clear_aux_for_blocks ();
    164  1.1  mrg   free (worklist);
    165  1.1  mrg }
    166  1.1  mrg 
    167  1.1  mrg /* Compute the earliest vector for edge based lcm.  */
    168  1.1  mrg 
    169  1.1  mrg static void
    170  1.1  mrg compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin,
    171  1.1  mrg 		  sbitmap *antout, sbitmap *avout, sbitmap *kill,
    172  1.1  mrg 		  sbitmap *earliest)
    173  1.1  mrg {
    174  1.1  mrg   int x, num_edges;
    175  1.1  mrg   basic_block pred, succ;
    176  1.1  mrg 
    177  1.1  mrg   num_edges = NUM_EDGES (edge_list);
    178  1.1  mrg 
    179  1.1  mrg   auto_sbitmap difference (n_exprs), temp_bitmap (n_exprs);
    180  1.1  mrg   for (x = 0; x < num_edges; x++)
    181  1.1  mrg     {
    182  1.1  mrg       pred = INDEX_EDGE_PRED_BB (edge_list, x);
    183  1.1  mrg       succ = INDEX_EDGE_SUCC_BB (edge_list, x);
    184  1.1  mrg       if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
    185  1.1  mrg 	bitmap_copy (earliest[x], antin[succ->index]);
    186  1.1  mrg       else
    187  1.1  mrg 	{
    188  1.1  mrg 	  if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
    189  1.1  mrg 	    bitmap_clear (earliest[x]);
    190  1.1  mrg 	  else
    191  1.1  mrg 	    {
    192  1.1  mrg 	      bitmap_and_compl (difference, antin[succ->index],
    193  1.1  mrg 				  avout[pred->index]);
    194  1.1  mrg 	      bitmap_not (temp_bitmap, antout[pred->index]);
    195  1.1  mrg 	      bitmap_and_or (earliest[x], difference,
    196  1.1  mrg 				    kill[pred->index], temp_bitmap);
    197  1.1  mrg 	    }
    198  1.1  mrg 	}
    199  1.1  mrg     }
    200  1.1  mrg }
    201  1.1  mrg 
    202  1.1  mrg /* later(p,s) is dependent on the calculation of laterin(p).
    203  1.1  mrg    laterin(p) is dependent on the calculation of later(p2,p).
    204  1.1  mrg 
    205  1.1  mrg      laterin(ENTRY) is defined as all 0's
    206  1.1  mrg      later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY)
    207  1.1  mrg      laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)).
    208  1.1  mrg 
    209  1.1  mrg    If we progress in this manner, starting with all basic blocks
    210  1.1  mrg    in the work list, anytime we change later(bb), we need to add
    211  1.1  mrg    succs(bb) to the worklist if they are not already on the worklist.
    212  1.1  mrg 
    213  1.1  mrg    Boundary conditions:
    214  1.1  mrg 
    215  1.1  mrg      We prime the worklist all the normal basic blocks.   The ENTRY block can
    216  1.1  mrg      never be added to the worklist since it is never the successor of any
    217  1.1  mrg      block.  We explicitly prevent the EXIT block from being added to the
    218  1.1  mrg      worklist.
    219  1.1  mrg 
    220  1.1  mrg      We optimistically initialize LATER.  That is the only time this routine
    221  1.1  mrg      will compute LATER for an edge out of the entry block since the entry
    222  1.1  mrg      block is never on the worklist.  Thus, LATERIN is neither used nor
    223  1.1  mrg      computed for the ENTRY block.
    224  1.1  mrg 
    225  1.1  mrg      Since the EXIT block is never added to the worklist, we will neither
    226  1.1  mrg      use nor compute LATERIN for the exit block.  Edges which reach the
    227  1.1  mrg      EXIT block are handled in the normal fashion inside the loop.  However,
    228  1.1  mrg      the insertion/deletion computation needs LATERIN(EXIT), so we have
    229  1.1  mrg      to compute it.  */
    230  1.1  mrg 
    231  1.1  mrg static void
    232  1.1  mrg compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
    233  1.1  mrg 		 sbitmap *antloc, sbitmap *later, sbitmap *laterin)
    234  1.1  mrg {
    235  1.1  mrg   int num_edges, i;
    236  1.1  mrg   edge e;
    237  1.1  mrg   basic_block *worklist, *qin, *qout, *qend, bb;
    238  1.1  mrg   unsigned int qlen;
    239  1.1  mrg   edge_iterator ei;
    240  1.1  mrg 
    241  1.1  mrg   num_edges = NUM_EDGES (edge_list);
    242  1.1  mrg 
    243  1.1  mrg   /* Allocate a worklist array/queue.  Entries are only added to the
    244  1.1  mrg      list if they were not already on the list.  So the size is
    245  1.1  mrg      bounded by the number of basic blocks.  */
    246  1.1  mrg   qin = qout = worklist
    247  1.1  mrg     = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
    248  1.1  mrg 
    249  1.1  mrg   /* Initialize a mapping from each edge to its index.  */
    250  1.1  mrg   for (i = 0; i < num_edges; i++)
    251  1.1  mrg     INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
    252  1.1  mrg 
    253  1.1  mrg   /* We want a maximal solution, so initially consider LATER true for
    254  1.1  mrg      all edges.  This allows propagation through a loop since the incoming
    255  1.1  mrg      loop edge will have LATER set, so if all the other incoming edges
    256  1.1  mrg      to the loop are set, then LATERIN will be set for the head of the
    257  1.1  mrg      loop.
    258  1.1  mrg 
    259  1.1  mrg      If the optimistic setting of LATER on that edge was incorrect (for
    260  1.1  mrg      example the expression is ANTLOC in a block within the loop) then
    261  1.1  mrg      this algorithm will detect it when we process the block at the head
    262  1.1  mrg      of the optimistic edge.  That will requeue the affected blocks.  */
    263  1.1  mrg   bitmap_vector_ones (later, num_edges);
    264  1.1  mrg 
    265  1.1  mrg   /* Note that even though we want an optimistic setting of LATER, we
    266  1.1  mrg      do not want to be overly optimistic.  Consider an outgoing edge from
    267  1.1  mrg      the entry block.  That edge should always have a LATER value the
    268  1.1  mrg      same as EARLIEST for that edge.  */
    269  1.1  mrg   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
    270  1.1  mrg     bitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
    271  1.1  mrg 
    272  1.1  mrg   /* Add all the blocks to the worklist.  This prevents an early exit from
    273  1.1  mrg      the loop given our optimistic initialization of LATER above.  */
    274  1.1  mrg   auto_vec<int, 20> postorder;
    275  1.1  mrg   inverted_post_order_compute (&postorder);
    276  1.1  mrg   for (unsigned int i = 0; i < postorder.length (); ++i)
    277  1.1  mrg     {
    278  1.1  mrg       bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
    279  1.1  mrg       if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
    280  1.1  mrg 	  || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
    281  1.1  mrg 	continue;
    282  1.1  mrg       *qin++ = bb;
    283  1.1  mrg       bb->aux = bb;
    284  1.1  mrg     }
    285  1.1  mrg 
    286  1.1  mrg   /* Note that we do not use the last allocated element for our queue,
    287  1.1  mrg      as EXIT_BLOCK is never inserted into it. */
    288  1.1  mrg   qin = worklist;
    289  1.1  mrg   qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
    290  1.1  mrg   qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
    291  1.1  mrg 
    292  1.1  mrg   /* Iterate until the worklist is empty.  */
    293  1.1  mrg   while (qlen)
    294  1.1  mrg     {
    295  1.1  mrg       /* Take the first entry off the worklist.  */
    296  1.1  mrg       bb = *qout++;
    297  1.1  mrg       bb->aux = NULL;
    298  1.1  mrg       qlen--;
    299  1.1  mrg       if (qout >= qend)
    300  1.1  mrg 	qout = worklist;
    301  1.1  mrg 
    302  1.1  mrg       /* Compute the intersection of LATERIN for each incoming edge to B.  */
    303  1.1  mrg       bitmap_ones (laterin[bb->index]);
    304  1.1  mrg       FOR_EACH_EDGE (e, ei, bb->preds)
    305  1.1  mrg 	bitmap_and (laterin[bb->index], laterin[bb->index],
    306  1.1  mrg 		    later[(size_t)e->aux]);
    307  1.1  mrg 
    308  1.1  mrg       /* Calculate LATER for all outgoing edges.  */
    309  1.1  mrg       FOR_EACH_EDGE (e, ei, bb->succs)
    310  1.1  mrg 	if (bitmap_ior_and_compl (later[(size_t) e->aux],
    311  1.1  mrg 				  earliest[(size_t) e->aux],
    312  1.1  mrg 				  laterin[bb->index],
    313  1.1  mrg 				  antloc[bb->index])
    314  1.1  mrg 	    /* If LATER for an outgoing edge was changed, then we need
    315  1.1  mrg 	       to add the target of the outgoing edge to the worklist.  */
    316  1.1  mrg 	    && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest->aux == 0)
    317  1.1  mrg 	  {
    318  1.1  mrg 	    *qin++ = e->dest;
    319  1.1  mrg 	    e->dest->aux = e;
    320  1.1  mrg 	    qlen++;
    321  1.1  mrg 	    if (qin >= qend)
    322  1.1  mrg 	      qin = worklist;
    323  1.1  mrg 	  }
    324  1.1  mrg     }
    325  1.1  mrg 
    326  1.1  mrg   /* Computation of insertion and deletion points requires computing LATERIN
    327  1.1  mrg      for the EXIT block.  We allocated an extra entry in the LATERIN array
    328  1.1  mrg      for just this purpose.  */
    329  1.1  mrg   bitmap_ones (laterin[last_basic_block_for_fn (cfun)]);
    330  1.1  mrg   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    331  1.1  mrg     bitmap_and (laterin[last_basic_block_for_fn (cfun)],
    332  1.1  mrg 		laterin[last_basic_block_for_fn (cfun)],
    333  1.1  mrg 		later[(size_t) e->aux]);
    334  1.1  mrg 
    335  1.1  mrg   clear_aux_for_edges ();
    336  1.1  mrg   free (worklist);
    337  1.1  mrg }
    338  1.1  mrg 
    339  1.1  mrg /* Compute the insertion and deletion points for edge based LCM.  */
    340  1.1  mrg 
    341  1.1  mrg static void
    342  1.1  mrg compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
    343  1.1  mrg 		       sbitmap *later, sbitmap *laterin, sbitmap *insert,
    344  1.1  mrg 		       sbitmap *del)
    345  1.1  mrg {
    346  1.1  mrg   int x;
    347  1.1  mrg   basic_block bb;
    348  1.1  mrg 
    349  1.1  mrg   FOR_EACH_BB_FN (bb, cfun)
    350  1.1  mrg     bitmap_and_compl (del[bb->index], antloc[bb->index],
    351  1.1  mrg 			laterin[bb->index]);
    352  1.1  mrg 
    353  1.1  mrg   for (x = 0; x < NUM_EDGES (edge_list); x++)
    354  1.1  mrg     {
    355  1.1  mrg       basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
    356  1.1  mrg 
    357  1.1  mrg       if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
    358  1.1  mrg 	bitmap_and_compl (insert[x], later[x],
    359  1.1  mrg 			  laterin[last_basic_block_for_fn (cfun)]);
    360  1.1  mrg       else
    361  1.1  mrg 	bitmap_and_compl (insert[x], later[x], laterin[b->index]);
    362  1.1  mrg     }
    363  1.1  mrg }
    364  1.1  mrg 
    365  1.1  mrg /* Given local properties TRANSP, ANTLOC, AVLOC, KILL return the insert and
    366  1.1  mrg    delete vectors for edge based LCM  and return the AVIN, AVOUT bitmap.
    367  1.1  mrg    map the insert vector to what edge an expression should be inserted on.  */
    368  1.1  mrg 
    369  1.1  mrg struct edge_list *
    370  1.1  mrg pre_edge_lcm_avs (int n_exprs, sbitmap *transp,
    371  1.1  mrg 	      sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
    372  1.1  mrg 	      sbitmap *avin, sbitmap *avout,
    373  1.1  mrg 	      sbitmap **insert, sbitmap **del)
    374  1.1  mrg {
    375  1.1  mrg   sbitmap *antin, *antout, *earliest;
    376  1.1  mrg   sbitmap *later, *laterin;
    377  1.1  mrg   struct edge_list *edge_list;
    378  1.1  mrg   int num_edges;
    379  1.1  mrg 
    380  1.1  mrg   edge_list = create_edge_list ();
    381  1.1  mrg   num_edges = NUM_EDGES (edge_list);
    382  1.1  mrg 
    383  1.1  mrg #ifdef LCM_DEBUG_INFO
    384  1.1  mrg   if (dump_file)
    385  1.1  mrg     {
    386  1.1  mrg       fprintf (dump_file, "Edge List:\n");
    387  1.1  mrg       verify_edge_list (dump_file, edge_list);
    388  1.1  mrg       print_edge_list (dump_file, edge_list);
    389  1.1  mrg       dump_bitmap_vector (dump_file, "transp", "", transp,
    390  1.1  mrg 			  last_basic_block_for_fn (cfun));
    391  1.1  mrg       dump_bitmap_vector (dump_file, "antloc", "", antloc,
    392  1.1  mrg 			  last_basic_block_for_fn (cfun));
    393  1.1  mrg       dump_bitmap_vector (dump_file, "avloc", "", avloc,
    394  1.1  mrg 			  last_basic_block_for_fn (cfun));
    395  1.1  mrg       dump_bitmap_vector (dump_file, "kill", "", kill,
    396  1.1  mrg 			  last_basic_block_for_fn (cfun));
    397  1.1  mrg     }
    398  1.1  mrg #endif
    399  1.1  mrg 
    400  1.1  mrg   /* Compute global availability.  */
    401  1.1  mrg   compute_available (avloc, kill, avout, avin);
    402  1.1  mrg 
    403  1.1  mrg   /* Compute global anticipatability.  */
    404  1.1  mrg   antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
    405  1.1  mrg   antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
    406  1.1  mrg   compute_antinout_edge (antloc, transp, antin, antout);
    407  1.1  mrg 
    408  1.1  mrg #ifdef LCM_DEBUG_INFO
    409  1.1  mrg   if (dump_file)
    410  1.1  mrg     {
    411  1.1  mrg       dump_bitmap_vector (dump_file, "antin", "", antin,
    412  1.1  mrg 			  last_basic_block_for_fn (cfun));
    413  1.1  mrg       dump_bitmap_vector (dump_file, "antout", "", antout,
    414  1.1  mrg 			  last_basic_block_for_fn (cfun));
    415  1.1  mrg     }
    416  1.1  mrg #endif
    417  1.1  mrg 
    418  1.1  mrg   /* Compute earliestness.  */
    419  1.1  mrg   earliest = sbitmap_vector_alloc (num_edges, n_exprs);
    420  1.1  mrg   compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest);
    421  1.1  mrg 
    422  1.1  mrg #ifdef LCM_DEBUG_INFO
    423  1.1  mrg   if (dump_file)
    424  1.1  mrg     dump_bitmap_vector (dump_file, "earliest", "", earliest, num_edges);
    425  1.1  mrg #endif
    426  1.1  mrg 
    427  1.1  mrg   sbitmap_vector_free (antout);
    428  1.1  mrg   sbitmap_vector_free (antin);
    429  1.1  mrg 
    430  1.1  mrg   later = sbitmap_vector_alloc (num_edges, n_exprs);
    431  1.1  mrg 
    432  1.1  mrg   /* Allocate an extra element for the exit block in the laterin vector.  */
    433  1.1  mrg   laterin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1,
    434  1.1  mrg 				  n_exprs);
    435  1.1  mrg   compute_laterin (edge_list, earliest, antloc, later, laterin);
    436  1.1  mrg 
    437  1.1  mrg #ifdef LCM_DEBUG_INFO
    438  1.1  mrg   if (dump_file)
    439  1.1  mrg     {
    440  1.1  mrg       dump_bitmap_vector (dump_file, "laterin", "", laterin,
    441  1.1  mrg 			  last_basic_block_for_fn (cfun) + 1);
    442  1.1  mrg       dump_bitmap_vector (dump_file, "later", "", later, num_edges);
    443  1.1  mrg     }
    444  1.1  mrg #endif
    445  1.1  mrg 
    446  1.1  mrg   sbitmap_vector_free (earliest);
    447  1.1  mrg 
    448  1.1  mrg   *insert = sbitmap_vector_alloc (num_edges, n_exprs);
    449  1.1  mrg   *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
    450  1.1  mrg   bitmap_vector_clear (*insert, num_edges);
    451  1.1  mrg   bitmap_vector_clear (*del, last_basic_block_for_fn (cfun));
    452  1.1  mrg   compute_insert_delete (edge_list, antloc, later, laterin, *insert, *del);
    453  1.1  mrg 
    454  1.1  mrg   sbitmap_vector_free (laterin);
    455  1.1  mrg   sbitmap_vector_free (later);
    456  1.1  mrg 
    457  1.1  mrg #ifdef LCM_DEBUG_INFO
    458  1.1  mrg   if (dump_file)
    459  1.1  mrg     {
    460  1.1  mrg       dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
    461  1.1  mrg       dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
    462  1.1  mrg 			  last_basic_block_for_fn (cfun));
    463  1.1  mrg     }
    464  1.1  mrg #endif
    465  1.1  mrg 
    466  1.1  mrg   return edge_list;
    467  1.1  mrg }
    468  1.1  mrg 
    469  1.1  mrg /* Wrapper to allocate avin/avout and call pre_edge_lcm_avs.  */
    470  1.1  mrg 
    471  1.1  mrg struct edge_list *
    472  1.1  mrg pre_edge_lcm (int n_exprs, sbitmap *transp,
    473  1.1  mrg 	      sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
    474  1.1  mrg 	      sbitmap **insert, sbitmap **del)
    475  1.1  mrg {
    476  1.1  mrg   struct edge_list *edge_list;
    477  1.1  mrg   sbitmap *avin, *avout;
    478  1.1  mrg 
    479  1.1  mrg   avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
    480  1.1  mrg   avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
    481  1.1  mrg 
    482  1.1  mrg   edge_list = pre_edge_lcm_avs (n_exprs, transp, avloc, antloc, kill,
    483  1.1  mrg 				 avin, avout, insert, del);
    484  1.1  mrg 
    485  1.1  mrg   sbitmap_vector_free (avout);
    486  1.1  mrg   sbitmap_vector_free (avin);
    487  1.1  mrg 
    488  1.1  mrg   return edge_list;
    489  1.1  mrg }
    490  1.1  mrg 
    491  1.1  mrg /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors.
    492  1.1  mrg    Return the number of passes we performed to iterate to a solution.  */
    493  1.1  mrg 
    494  1.1  mrg void
    495  1.1  mrg compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
    496  1.1  mrg 		   sbitmap *avin)
    497  1.1  mrg {
    498  1.1  mrg   edge e;
    499  1.1  mrg   basic_block *worklist, *qin, *qout, *qend, bb;
    500  1.1  mrg   unsigned int qlen;
    501  1.1  mrg   edge_iterator ei;
    502  1.1  mrg 
    503  1.1  mrg   /* Allocate a worklist array/queue.  Entries are only added to the
    504  1.1  mrg      list if they were not already on the list.  So the size is
    505  1.1  mrg      bounded by the number of basic blocks.  */
    506  1.1  mrg   qin = qout = worklist =
    507  1.1  mrg     XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
    508  1.1  mrg 
    509  1.1  mrg   /* We want a maximal solution.  */
    510  1.1  mrg   bitmap_vector_ones (avout, last_basic_block_for_fn (cfun));
    511  1.1  mrg 
    512  1.1  mrg   /* Put every block on the worklist; this is necessary because of the
    513  1.1  mrg      optimistic initialization of AVOUT above.  Use inverted postorder
    514  1.1  mrg      to make the dataflow problem require less iterations.  */
    515  1.1  mrg   auto_vec<int, 20> postorder;
    516  1.1  mrg   inverted_post_order_compute (&postorder);
    517  1.1  mrg   for (unsigned int i = 0; i < postorder.length (); ++i)
    518  1.1  mrg     {
    519  1.1  mrg       bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
    520  1.1  mrg       if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
    521  1.1  mrg 	  || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
    522  1.1  mrg 	continue;
    523  1.1  mrg       *qin++ = bb;
    524  1.1  mrg       bb->aux = bb;
    525  1.1  mrg     }
    526  1.1  mrg 
    527  1.1  mrg   qin = worklist;
    528  1.1  mrg   qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
    529  1.1  mrg   qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
    530  1.1  mrg 
    531  1.1  mrg   /* Mark blocks which are successors of the entry block so that we
    532  1.1  mrg      can easily identify them below.  */
    533  1.1  mrg   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
    534  1.1  mrg     e->dest->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun);
    535  1.1  mrg 
    536  1.1  mrg   /* Iterate until the worklist is empty.  */
    537  1.1  mrg   while (qlen)
    538  1.1  mrg     {
    539  1.1  mrg       /* Take the first entry off the worklist.  */
    540  1.1  mrg       bb = *qout++;
    541  1.1  mrg       qlen--;
    542  1.1  mrg 
    543  1.1  mrg       if (qout >= qend)
    544  1.1  mrg 	qout = worklist;
    545  1.1  mrg 
    546  1.1  mrg       /* If one of the predecessor blocks is the ENTRY block, then the
    547  1.1  mrg 	 intersection of avouts is the null set.  We can identify such blocks
    548  1.1  mrg 	 by the special value in the AUX field in the block structure.  */
    549  1.1  mrg       if (bb->aux == ENTRY_BLOCK_PTR_FOR_FN (cfun))
    550  1.1  mrg 	/* Do not clear the aux field for blocks which are successors of the
    551  1.1  mrg 	   ENTRY block.  That way we never add then to the worklist again.  */
    552  1.1  mrg 	bitmap_clear (avin[bb->index]);
    553  1.1  mrg       else
    554  1.1  mrg 	{
    555  1.1  mrg 	  /* Clear the aux field of this block so that it can be added to
    556  1.1  mrg 	     the worklist again if necessary.  */
    557  1.1  mrg 	  bb->aux = NULL;
    558  1.1  mrg 	  bitmap_intersection_of_preds (avin[bb->index], avout, bb);
    559  1.1  mrg 	}
    560  1.1  mrg 
    561  1.1  mrg       if (bitmap_ior_and_compl (avout[bb->index], avloc[bb->index],
    562  1.1  mrg 				    avin[bb->index], kill[bb->index]))
    563  1.1  mrg 	/* If the out state of this block changed, then we need
    564  1.1  mrg 	   to add the successors of this block to the worklist
    565  1.1  mrg 	   if they are not already on the worklist.  */
    566  1.1  mrg 	FOR_EACH_EDGE (e, ei, bb->succs)
    567  1.1  mrg 	  if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
    568  1.1  mrg 	    {
    569  1.1  mrg 	      *qin++ = e->dest;
    570  1.1  mrg 	      e->dest->aux = e;
    571  1.1  mrg 	      qlen++;
    572  1.1  mrg 
    573  1.1  mrg 	      if (qin >= qend)
    574  1.1  mrg 		qin = worklist;
    575  1.1  mrg 	    }
    576  1.1  mrg     }
    577  1.1  mrg 
    578  1.1  mrg   clear_aux_for_edges ();
    579  1.1  mrg   clear_aux_for_blocks ();
    580  1.1  mrg   free (worklist);
    581  1.1  mrg }
    582  1.1  mrg 
    583  1.1  mrg /* Compute the farthest vector for edge based lcm.  */
    584  1.1  mrg 
    585  1.1  mrg static void
    586  1.1  mrg compute_farthest (struct edge_list *edge_list, int n_exprs,
    587  1.1  mrg 		  sbitmap *st_avout, sbitmap *st_avin, sbitmap *st_antin,
    588  1.1  mrg 		  sbitmap *kill, sbitmap *farthest)
    589  1.1  mrg {
    590  1.1  mrg   int x, num_edges;
    591  1.1  mrg   basic_block pred, succ;
    592  1.1  mrg 
    593  1.1  mrg   num_edges = NUM_EDGES (edge_list);
    594  1.1  mrg 
    595  1.1  mrg   auto_sbitmap difference (n_exprs), temp_bitmap (n_exprs);
    596  1.1  mrg   for (x = 0; x < num_edges; x++)
    597  1.1  mrg     {
    598  1.1  mrg       pred = INDEX_EDGE_PRED_BB (edge_list, x);
    599  1.1  mrg       succ = INDEX_EDGE_SUCC_BB (edge_list, x);
    600  1.1  mrg       if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
    601  1.1  mrg 	bitmap_copy (farthest[x], st_avout[pred->index]);
    602  1.1  mrg       else
    603  1.1  mrg 	{
    604  1.1  mrg 	  if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
    605  1.1  mrg 	    bitmap_clear (farthest[x]);
    606  1.1  mrg 	  else
    607  1.1  mrg 	    {
    608  1.1  mrg 	      bitmap_and_compl (difference, st_avout[pred->index],
    609  1.1  mrg 				  st_antin[succ->index]);
    610  1.1  mrg 	      bitmap_not (temp_bitmap, st_avin[succ->index]);
    611  1.1  mrg 	      bitmap_and_or (farthest[x], difference,
    612  1.1  mrg 				    kill[succ->index], temp_bitmap);
    613  1.1  mrg 	    }
    614  1.1  mrg 	}
    615  1.1  mrg     }
    616  1.1  mrg }
    617  1.1  mrg 
    618  1.1  mrg /* Compute nearer and nearerout vectors for edge based lcm.
    619  1.1  mrg 
    620  1.1  mrg    This is the mirror of compute_laterin, additional comments on the
    621  1.1  mrg    implementation can be found before compute_laterin.  */
    622  1.1  mrg 
    623  1.1  mrg static void
    624  1.1  mrg compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
    625  1.1  mrg 		   sbitmap *st_avloc, sbitmap *nearer, sbitmap *nearerout)
    626  1.1  mrg {
    627  1.1  mrg   int num_edges, i;
    628  1.1  mrg   edge e;
    629  1.1  mrg   basic_block *worklist, *tos, bb;
    630  1.1  mrg   edge_iterator ei;
    631  1.1  mrg 
    632  1.1  mrg   num_edges = NUM_EDGES (edge_list);
    633  1.1  mrg 
    634  1.1  mrg   /* Allocate a worklist array/queue.  Entries are only added to the
    635  1.1  mrg      list if they were not already on the list.  So the size is
    636  1.1  mrg      bounded by the number of basic blocks.  */
    637  1.1  mrg   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
    638  1.1  mrg 
    639  1.1  mrg   /* Initialize NEARER for each edge and build a mapping from an edge to
    640  1.1  mrg      its index.  */
    641  1.1  mrg   for (i = 0; i < num_edges; i++)
    642  1.1  mrg     INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
    643  1.1  mrg 
    644  1.1  mrg   /* We want a maximal solution.  */
    645  1.1  mrg   bitmap_vector_ones (nearer, num_edges);
    646  1.1  mrg 
    647  1.1  mrg   /* Note that even though we want an optimistic setting of NEARER, we
    648  1.1  mrg      do not want to be overly optimistic.  Consider an incoming edge to
    649  1.1  mrg      the exit block.  That edge should always have a NEARER value the
    650  1.1  mrg      same as FARTHEST for that edge.  */
    651  1.1  mrg   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    652  1.1  mrg     bitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
    653  1.1  mrg 
    654  1.1  mrg   /* Add all the blocks to the worklist.  This prevents an early exit
    655  1.1  mrg      from the loop given our optimistic initialization of NEARER.  */
    656  1.1  mrg   FOR_EACH_BB_FN (bb, cfun)
    657  1.1  mrg     {
    658  1.1  mrg       *tos++ = bb;
    659  1.1  mrg       bb->aux = bb;
    660  1.1  mrg     }
    661  1.1  mrg 
    662  1.1  mrg   /* Iterate until the worklist is empty.  */
    663  1.1  mrg   while (tos != worklist)
    664  1.1  mrg     {
    665  1.1  mrg       /* Take the first entry off the worklist.  */
    666  1.1  mrg       bb = *--tos;
    667  1.1  mrg       bb->aux = NULL;
    668  1.1  mrg 
    669  1.1  mrg       /* Compute the intersection of NEARER for each outgoing edge from B.  */
    670  1.1  mrg       bitmap_ones (nearerout[bb->index]);
    671  1.1  mrg       FOR_EACH_EDGE (e, ei, bb->succs)
    672  1.1  mrg 	bitmap_and (nearerout[bb->index], nearerout[bb->index],
    673  1.1  mrg 			 nearer[(size_t) e->aux]);
    674  1.1  mrg 
    675  1.1  mrg       /* Calculate NEARER for all incoming edges.  */
    676  1.1  mrg       FOR_EACH_EDGE (e, ei, bb->preds)
    677  1.1  mrg 	if (bitmap_ior_and_compl (nearer[(size_t) e->aux],
    678  1.1  mrg 				      farthest[(size_t) e->aux],
    679  1.1  mrg 				      nearerout[e->dest->index],
    680  1.1  mrg 				      st_avloc[e->dest->index])
    681  1.1  mrg 	    /* If NEARER for an incoming edge was changed, then we need
    682  1.1  mrg 	       to add the source of the incoming edge to the worklist.  */
    683  1.1  mrg 	    && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && e->src->aux == 0)
    684  1.1  mrg 	  {
    685  1.1  mrg 	    *tos++ = e->src;
    686  1.1  mrg 	    e->src->aux = e;
    687  1.1  mrg 	  }
    688  1.1  mrg     }
    689  1.1  mrg 
    690  1.1  mrg   /* Computation of insertion and deletion points requires computing NEAREROUT
    691  1.1  mrg      for the ENTRY block.  We allocated an extra entry in the NEAREROUT array
    692  1.1  mrg      for just this purpose.  */
    693  1.1  mrg   bitmap_ones (nearerout[last_basic_block_for_fn (cfun)]);
    694  1.1  mrg   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
    695  1.1  mrg     bitmap_and (nearerout[last_basic_block_for_fn (cfun)],
    696  1.1  mrg 		     nearerout[last_basic_block_for_fn (cfun)],
    697  1.1  mrg 		     nearer[(size_t) e->aux]);
    698  1.1  mrg 
    699  1.1  mrg   clear_aux_for_edges ();
    700  1.1  mrg   free (tos);
    701  1.1  mrg }
    702  1.1  mrg 
    703  1.1  mrg /* Compute the insertion and deletion points for edge based LCM.  */
    704  1.1  mrg 
    705  1.1  mrg static void
    706  1.1  mrg compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc,
    707  1.1  mrg 			   sbitmap *nearer, sbitmap *nearerout,
    708  1.1  mrg 			   sbitmap *insert, sbitmap *del)
    709  1.1  mrg {
    710  1.1  mrg   int x;
    711  1.1  mrg   basic_block bb;
    712  1.1  mrg 
    713  1.1  mrg   FOR_EACH_BB_FN (bb, cfun)
    714  1.1  mrg     bitmap_and_compl (del[bb->index], st_avloc[bb->index],
    715  1.1  mrg 			nearerout[bb->index]);
    716  1.1  mrg 
    717  1.1  mrg   for (x = 0; x < NUM_EDGES (edge_list); x++)
    718  1.1  mrg     {
    719  1.1  mrg       basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
    720  1.1  mrg       if (b == ENTRY_BLOCK_PTR_FOR_FN (cfun))
    721  1.1  mrg 	bitmap_and_compl (insert[x], nearer[x],
    722  1.1  mrg 			  nearerout[last_basic_block_for_fn (cfun)]);
    723  1.1  mrg       else
    724  1.1  mrg 	bitmap_and_compl (insert[x], nearer[x], nearerout[b->index]);
    725  1.1  mrg     }
    726  1.1  mrg }
    727  1.1  mrg 
    728  1.1  mrg /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the
    729  1.1  mrg    insert and delete vectors for edge based reverse LCM.  Returns an
    730  1.1  mrg    edgelist which is used to map the insert vector to what edge
    731  1.1  mrg    an expression should be inserted on.  */
    732  1.1  mrg 
    733  1.1  mrg struct edge_list *
    734  1.1  mrg pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
    735  1.1  mrg 		  sbitmap *st_avloc, sbitmap *st_antloc, sbitmap *kill,
    736  1.1  mrg 		  sbitmap **insert, sbitmap **del)
    737  1.1  mrg {
    738  1.1  mrg   sbitmap *st_antin, *st_antout;
    739  1.1  mrg   sbitmap *st_avout, *st_avin, *farthest;
    740  1.1  mrg   sbitmap *nearer, *nearerout;
    741  1.1  mrg   struct edge_list *edge_list;
    742  1.1  mrg   int num_edges;
    743  1.1  mrg 
    744  1.1  mrg   edge_list = create_edge_list ();
    745  1.1  mrg   num_edges = NUM_EDGES (edge_list);
    746  1.1  mrg 
    747  1.1  mrg   st_antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
    748  1.1  mrg   st_antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
    749  1.1  mrg   bitmap_vector_clear (st_antin, last_basic_block_for_fn (cfun));
    750  1.1  mrg   bitmap_vector_clear (st_antout, last_basic_block_for_fn (cfun));
    751  1.1  mrg   compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
    752  1.1  mrg 
    753  1.1  mrg   /* Compute global anticipatability.  */
    754  1.1  mrg   st_avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
    755  1.1  mrg   st_avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
    756  1.1  mrg   compute_available (st_avloc, kill, st_avout, st_avin);
    757  1.1  mrg 
    758  1.1  mrg #ifdef LCM_DEBUG_INFO
    759  1.1  mrg   if (dump_file)
    760  1.1  mrg     {
    761  1.1  mrg       fprintf (dump_file, "Edge List:\n");
    762  1.1  mrg       verify_edge_list (dump_file, edge_list);
    763  1.1  mrg       print_edge_list (dump_file, edge_list);
    764  1.1  mrg       dump_bitmap_vector (dump_file, "transp", "", transp,
    765  1.1  mrg 			  last_basic_block_for_fn (cfun));
    766  1.1  mrg       dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc,
    767  1.1  mrg 			  last_basic_block_for_fn (cfun));
    768  1.1  mrg       dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc,
    769  1.1  mrg 			  last_basic_block_for_fn (cfun));
    770  1.1  mrg       dump_bitmap_vector (dump_file, "st_antin", "", st_antin,
    771  1.1  mrg 			  last_basic_block_for_fn (cfun));
    772  1.1  mrg       dump_bitmap_vector (dump_file, "st_antout", "", st_antout,
    773  1.1  mrg 			  last_basic_block_for_fn (cfun));
    774  1.1  mrg       dump_bitmap_vector (dump_file, "st_kill", "", kill,
    775  1.1  mrg 			  last_basic_block_for_fn (cfun));
    776  1.1  mrg     }
    777  1.1  mrg #endif
    778  1.1  mrg 
    779  1.1  mrg #ifdef LCM_DEBUG_INFO
    780  1.1  mrg   if (dump_file)
    781  1.1  mrg     {
    782  1.1  mrg       dump_bitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block_for_fn (cfun));
    783  1.1  mrg       dump_bitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block_for_fn (cfun));
    784  1.1  mrg     }
    785  1.1  mrg #endif
    786  1.1  mrg 
    787  1.1  mrg   /* Compute farthestness.  */
    788  1.1  mrg   farthest = sbitmap_vector_alloc (num_edges, n_exprs);
    789  1.1  mrg   compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin,
    790  1.1  mrg 		    kill, farthest);
    791  1.1  mrg 
    792  1.1  mrg #ifdef LCM_DEBUG_INFO
    793  1.1  mrg   if (dump_file)
    794  1.1  mrg     dump_bitmap_vector (dump_file, "farthest", "", farthest, num_edges);
    795  1.1  mrg #endif
    796  1.1  mrg 
    797  1.1  mrg   sbitmap_vector_free (st_antin);
    798  1.1  mrg   sbitmap_vector_free (st_antout);
    799  1.1  mrg 
    800  1.1  mrg   sbitmap_vector_free (st_avin);
    801  1.1  mrg   sbitmap_vector_free (st_avout);
    802  1.1  mrg 
    803  1.1  mrg   nearer = sbitmap_vector_alloc (num_edges, n_exprs);
    804  1.1  mrg 
    805  1.1  mrg   /* Allocate an extra element for the entry block.  */
    806  1.1  mrg   nearerout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1,
    807  1.1  mrg 				    n_exprs);
    808  1.1  mrg   compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout);
    809  1.1  mrg 
    810  1.1  mrg #ifdef LCM_DEBUG_INFO
    811  1.1  mrg   if (dump_file)
    812  1.1  mrg     {
    813  1.1  mrg       dump_bitmap_vector (dump_file, "nearerout", "", nearerout,
    814  1.1  mrg 			   last_basic_block_for_fn (cfun) + 1);
    815  1.1  mrg       dump_bitmap_vector (dump_file, "nearer", "", nearer, num_edges);
    816  1.1  mrg     }
    817  1.1  mrg #endif
    818  1.1  mrg 
    819  1.1  mrg   sbitmap_vector_free (farthest);
    820  1.1  mrg 
    821  1.1  mrg   *insert = sbitmap_vector_alloc (num_edges, n_exprs);
    822  1.1  mrg   *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
    823  1.1  mrg   compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
    824  1.1  mrg 			     *insert, *del);
    825  1.1  mrg 
    826  1.1  mrg   sbitmap_vector_free (nearerout);
    827  1.1  mrg   sbitmap_vector_free (nearer);
    828  1.1  mrg 
    829  1.1  mrg #ifdef LCM_DEBUG_INFO
    830  1.1  mrg   if (dump_file)
    831  1.1  mrg     {
    832  1.1  mrg       dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
    833  1.1  mrg       dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
    834  1.1  mrg 			   last_basic_block_for_fn (cfun));
    835  1.1  mrg     }
    836  1.1  mrg #endif
    837  1.1  mrg   return edge_list;
    838  1.1  mrg }
    839           
    840