Home | History | Annotate | Line # | Download | only in gcc
gimple-harden-conditionals.cc revision 1.1
      1 /* Harden conditionals.
      2    Copyright (C) 2021-2022 Free Software Foundation, Inc.
      3    Contributed by Alexandre Oliva <oliva (at) adacore.com>.
      4 
      5 This file is part of GCC.
      6 
      7 GCC is free software; you can redistribute it and/or modify it under
      8 the terms of the GNU General Public License as published by the Free
      9 Software Foundation; either version 3, or (at your option) any later
     10 version.
     11 
     12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
     14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     15 for more details.
     16 
     17 You should have received a copy of the GNU General Public License
     18 along with GCC; see the file COPYING3.  If not see
     19 <http://www.gnu.org/licenses/>.  */
     20 
     21 #include "config.h"
     22 #include "system.h"
     23 #include "coretypes.h"
     24 #include "backend.h"
     25 #include "target.h"
     26 #include "rtl.h"
     27 #include "tree.h"
     28 #include "fold-const.h"
     29 #include "gimple.h"
     30 #include "gimplify.h"
     31 #include "tree-pass.h"
     32 #include "ssa.h"
     33 #include "gimple-iterator.h"
     34 #include "tree-cfg.h"
     35 #include "basic-block.h"
     36 #include "cfghooks.h"
     37 #include "cfgloop.h"
     38 #include "tree-eh.h"
     39 #include "diagnostic.h"
     40 #include "intl.h"
     41 
     42 namespace {
     43 
     44 /* These passes introduces redundant, but reversed conditionals at
     45    compares, such as those used in conditional branches, and those
     46    that compute boolean results.  This doesn't make much sense for
     47    abstract CPUs, but this kind of hardening may avoid undesirable
     48    execution paths on actual CPUs under such attacks as of power
     49    deprivation.  */
     50 
     51 /* Define a pass to harden conditionals other than branches.  */
     52 
     53 const pass_data pass_data_harden_compares = {
     54   GIMPLE_PASS,
     55   "hardcmp",
     56   OPTGROUP_NONE,
     57   TV_NONE,
     58   PROP_cfg | PROP_ssa, // properties_required
     59   0,	    // properties_provided
     60   0,	    // properties_destroyed
     61   0,	    // properties_start
     62   TODO_update_ssa
     63   | TODO_cleanup_cfg
     64   | TODO_verify_il, // properties_finish
     65 };
     66 
     67 class pass_harden_compares : public gimple_opt_pass
     68 {
     69 public:
     70   pass_harden_compares (gcc::context *ctxt)
     71     : gimple_opt_pass (pass_data_harden_compares, ctxt)
     72   {}
     73   opt_pass *clone () { return new pass_harden_compares (m_ctxt); }
     74   virtual bool gate (function *) {
     75     return flag_harden_compares;
     76   }
     77   virtual unsigned int execute (function *);
     78 };
     79 
     80 /* Define a pass to harden conditionals in branches.  This pass must
     81    run after the above, otherwise it will re-harden the checks
     82    introduced by the above.  */
     83 
     84 const pass_data pass_data_harden_conditional_branches = {
     85   GIMPLE_PASS,
     86   "hardcbr",
     87   OPTGROUP_NONE,
     88   TV_NONE,
     89   PROP_cfg | PROP_ssa, // properties_required
     90   0,	    // properties_provided
     91   0,	    // properties_destroyed
     92   0,	    // properties_start
     93   TODO_update_ssa
     94   | TODO_cleanup_cfg
     95   | TODO_verify_il, // properties_finish
     96 };
     97 
     98 class pass_harden_conditional_branches : public gimple_opt_pass
     99 {
    100 public:
    101   pass_harden_conditional_branches (gcc::context *ctxt)
    102     : gimple_opt_pass (pass_data_harden_conditional_branches, ctxt)
    103   {}
    104   opt_pass *clone () { return new pass_harden_conditional_branches (m_ctxt); }
    105   virtual bool gate (function *) {
    106     return flag_harden_conditional_branches;
    107   }
    108   virtual unsigned int execute (function *);
    109 };
    110 
    111 }
    112 
    113 /* If VAL is an SSA name, return an SSA name holding the same value,
    114    but without the compiler's knowing that it holds the same value, so
    115    that uses thereof can't be optimized the way VAL might.  Insert
    116    stmts that initialize it before *GSIP, with LOC.
    117 
    118    Otherwise, VAL must be an invariant, returned unchanged.  */
    119 
    120 static inline tree
    121 detach_value (location_t loc, gimple_stmt_iterator *gsip, tree val)
    122 {
    123   if (TREE_CONSTANT (val) || TREE_CODE (val) != SSA_NAME)
    124     {
    125       gcc_checking_assert (is_gimple_min_invariant (val));
    126       return val;
    127     }
    128 
    129   /* Create a SSA "copy" of VAL.  It would be nice to have it named
    130      after the corresponding variable, but sharing the same decl is
    131      problematic when VAL is a DECL_BY_REFERENCE RESULT_DECL, and
    132      copying just the identifier hits -fcompare-debug failures.  */
    133   tree ret = make_ssa_name (TREE_TYPE (val));
    134 
    135   /* Some modes won't fit in general regs, so we fall back to memory
    136      for them.  ??? It would be ideal to try to identify an alternate,
    137      wider or more suitable register class, and use the corresponding
    138      constraint, but there's no logic to go from register class to
    139      constraint, even if there is a corresponding constraint, and even
    140      if we could enumerate constraints, we can't get to their string
    141      either.  So this will do for now.  */
    142   bool need_memory = true;
    143   enum machine_mode mode = TYPE_MODE (TREE_TYPE (val));
    144   if (mode != BLKmode)
    145     for (int i = 0; i < FIRST_PSEUDO_REGISTER; i++)
    146       if (TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], i)
    147 	  && targetm.hard_regno_mode_ok (i, mode))
    148 	{
    149 	  need_memory = false;
    150 	  break;
    151 	}
    152 
    153   tree asminput = val;
    154   tree asmoutput = ret;
    155   const char *constraint_out = need_memory ? "=m" : "=g";
    156   const char *constraint_in = need_memory ? "m" : "0";
    157 
    158   if (need_memory)
    159     {
    160       tree temp = create_tmp_var (TREE_TYPE (val), "dtch");
    161       mark_addressable (temp);
    162 
    163       gassign *copyin = gimple_build_assign (temp, asminput);
    164       gimple_set_location (copyin, loc);
    165       gsi_insert_before (gsip, copyin, GSI_SAME_STMT);
    166 
    167       asminput = asmoutput = temp;
    168     }
    169 
    170   /* Output an asm statement with matching input and output.  It does
    171      nothing, but after it the compiler no longer knows the output
    172      still holds the same value as the input.  */
    173   vec<tree, va_gc> *inputs = NULL;
    174   vec<tree, va_gc> *outputs = NULL;
    175   vec_safe_push (outputs,
    176 		 build_tree_list
    177 		 (build_tree_list
    178 		  (NULL_TREE, build_string (strlen (constraint_out),
    179 					    constraint_out)),
    180 		  asmoutput));
    181   vec_safe_push (inputs,
    182 		 build_tree_list
    183 		 (build_tree_list
    184 		  (NULL_TREE, build_string (strlen (constraint_in),
    185 					    constraint_in)),
    186 		  asminput));
    187   gasm *detach = gimple_build_asm_vec ("", inputs, outputs,
    188 				       NULL, NULL);
    189   gimple_set_location (detach, loc);
    190   gsi_insert_before (gsip, detach, GSI_SAME_STMT);
    191 
    192   if (need_memory)
    193     {
    194       gassign *copyout = gimple_build_assign (ret, asmoutput);
    195       gimple_set_location (copyout, loc);
    196       gsi_insert_before (gsip, copyout, GSI_SAME_STMT);
    197       SSA_NAME_DEF_STMT (ret) = copyout;
    198 
    199       gassign *clobber = gimple_build_assign (asmoutput,
    200 					      build_clobber
    201 					      (TREE_TYPE (asmoutput)));
    202       gimple_set_location (clobber, loc);
    203       gsi_insert_before (gsip, clobber, GSI_SAME_STMT);
    204     }
    205   else
    206     SSA_NAME_DEF_STMT (ret) = detach;
    207 
    208   return ret;
    209 }
    210 
    211 /* Build a cond stmt out of COP, LHS, RHS, insert it before *GSIP with
    212    location LOC.  *GSIP must be at the end of a basic block.  The succ
    213    edge out of the block becomes the true or false edge opposite to
    214    that in FLAGS.  Create a new block with a single trap stmt, in the
    215    cold partition if the function is partitioned,, and a new edge to
    216    it as the other edge for the cond.  */
    217 
    218 static inline void
    219 insert_check_and_trap (location_t loc, gimple_stmt_iterator *gsip,
    220 		       int flags, enum tree_code cop, tree lhs, tree rhs)
    221 {
    222   basic_block chk = gsi_bb (*gsip);
    223 
    224   gcond *cond = gimple_build_cond (cop, lhs, rhs, NULL, NULL);
    225   gimple_set_location (cond, loc);
    226   gsi_insert_before (gsip, cond, GSI_SAME_STMT);
    227 
    228   basic_block trp = create_empty_bb (chk);
    229 
    230   gimple_stmt_iterator gsit = gsi_after_labels (trp);
    231   gcall *trap = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
    232   gimple_set_location (trap, loc);
    233   gsi_insert_before (&gsit, trap, GSI_SAME_STMT);
    234 
    235   if (dump_file)
    236     fprintf (dump_file,
    237 	     "Adding reversed compare to block %i, and trap to block %i\n",
    238 	     chk->index, trp->index);
    239 
    240   if (BB_PARTITION (chk))
    241     BB_SET_PARTITION (trp, BB_COLD_PARTITION);
    242 
    243   int true_false_flag = flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
    244   gcc_assert (true_false_flag);
    245   int neg_true_false_flag = (~flags) & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
    246 
    247   /* Remove the fallthru bit, and set the truth value for the
    248      preexisting edge and for the newly-created one.  In hardcbr,
    249      FLAGS is taken from the edge of the original cond expr that we're
    250      dealing with, so the reversed compare is expected to yield the
    251      negated result, and the same result calls for a trap.  In
    252      hardcmp, we're comparing the boolean results of the original and
    253      of the reversed compare, so we're passed FLAGS to trap on
    254      equality.  */
    255   single_succ_edge (chk)->flags &= ~EDGE_FALLTHRU;
    256   single_succ_edge (chk)->flags |= neg_true_false_flag;
    257   single_succ_edge (chk)->probability = profile_probability::always ();
    258   edge e = make_edge (chk, trp, true_false_flag);
    259   e->goto_locus = loc;
    260   e->probability = profile_probability::never ();
    261 
    262   if (dom_info_available_p (CDI_DOMINATORS))
    263     set_immediate_dominator (CDI_DOMINATORS, trp, chk);
    264   if (current_loops)
    265     add_bb_to_loop (trp, current_loops->tree_root);
    266 }
    267 
    268 /* Split edge E, and insert_check_and_trap (see above) in the
    269    newly-created block, using detached copies of LHS's and RHS's
    270    values (see detach_value above) for the COP compare.  */
    271 
    272 static inline void
    273 insert_edge_check_and_trap (location_t loc, edge e,
    274 			    enum tree_code cop, tree lhs, tree rhs)
    275 {
    276   int flags = e->flags;
    277   basic_block src = e->src;
    278   basic_block dest = e->dest;
    279   location_t eloc = e->goto_locus;
    280 
    281   basic_block chk = split_edge (e);
    282   e = NULL;
    283 
    284   single_pred_edge (chk)->goto_locus = loc;
    285   single_succ_edge (chk)->goto_locus = eloc;
    286 
    287   if (dump_file)
    288     fprintf (dump_file,
    289 	     "Splitting edge %i->%i into block %i\n",
    290 	     src->index, dest->index, chk->index);
    291 
    292   gimple_stmt_iterator gsik = gsi_after_labels (chk);
    293 
    294   bool same_p = (lhs == rhs);
    295   lhs = detach_value (loc, &gsik, lhs);
    296   rhs = same_p ? lhs : detach_value (loc, &gsik, rhs);
    297 
    298   insert_check_and_trap (loc, &gsik, flags, cop, lhs, rhs);
    299 }
    300 
    301 /* Harden cond stmts at the end of FUN's blocks.  */
    302 
    303 unsigned int
    304 pass_harden_conditional_branches::execute (function *fun)
    305 {
    306   basic_block bb;
    307   FOR_EACH_BB_REVERSE_FN (bb, fun)
    308     {
    309       gimple_stmt_iterator gsi = gsi_last_bb (bb);
    310 
    311       if (gsi_end_p (gsi))
    312 	continue;
    313 
    314       gcond *cond = dyn_cast <gcond *> (gsi_stmt (gsi));
    315       if (!cond)
    316 	continue;
    317 
    318       /* Turn:
    319 
    320 	 if (x op y) goto l1; else goto l2;
    321 
    322 	 into:
    323 
    324 	 if (x op y) goto l1'; else goto l2';
    325 	 l1': if (x' cop y') goto l1'trap; else goto l1;
    326 	 l1'trap: __builtin_trap ();
    327 	 l2': if (x' cop y') goto l2; else goto l2'trap;
    328 	 l2'trap: __builtin_trap ();
    329 
    330 	 where cop is a complementary boolean operation to op; l1', l1'trap,
    331 	 l2' and l2'trap are newly-created labels; and x' and y' hold the same
    332 	 value as x and y, but in a way that does not enable the compiler to
    333 	 optimize the redundant compare away.
    334       */
    335 
    336       enum tree_code op = gimple_cond_code (cond);
    337       tree lhs = gimple_cond_lhs (cond);
    338       tree rhs = gimple_cond_rhs (cond);
    339       location_t loc = gimple_location (cond);
    340 
    341       enum tree_code cop = invert_tree_comparison (op, HONOR_NANS (lhs));
    342 
    343       if (cop == ERROR_MARK)
    344 	/* ??? Can we do better?  */
    345 	continue;
    346 
    347       insert_edge_check_and_trap (loc, EDGE_SUCC (bb, 0), cop, lhs, rhs);
    348       insert_edge_check_and_trap (loc, EDGE_SUCC (bb, 1), cop, lhs, rhs);
    349     }
    350 
    351   return 0;
    352 }
    353 
    354 /* Instantiate a hardcbr pass.  */
    355 
    356 gimple_opt_pass *
    357 make_pass_harden_conditional_branches (gcc::context *ctxt)
    358 {
    359   return new pass_harden_conditional_branches (ctxt);
    360 }
    361 
    362 /* Return the fallthru edge of a block whose other edge is an EH
    363    edge.  If EHP is not NULL, store the EH edge in it.  */
    364 static inline edge
    365 non_eh_succ_edge (basic_block bb, edge *ehp = NULL)
    366 {
    367   gcc_checking_assert (EDGE_COUNT (bb->succs) == 2);
    368 
    369   edge ret = find_fallthru_edge (bb->succs);
    370 
    371   int eh_idx = EDGE_SUCC (bb, 0) == ret;
    372   edge eh = EDGE_SUCC (bb, eh_idx);
    373 
    374   gcc_checking_assert (!(ret->flags & EDGE_EH)
    375 		       && (eh->flags & EDGE_EH));
    376 
    377   if (ehp)
    378     *ehp = eh;
    379 
    380   return ret;
    381 }
    382 
    383 /* Harden boolean-yielding compares in FUN.  */
    384 
    385 unsigned int
    386 pass_harden_compares::execute (function *fun)
    387 {
    388   basic_block bb;
    389   /* Go backwards over BBs and stmts, so that, even if we split the
    390      block multiple times to insert a cond_expr after each compare we
    391      find, we remain in the same block, visiting every preexisting
    392      stmt exactly once, and not visiting newly-added blocks or
    393      stmts.  */
    394   FOR_EACH_BB_REVERSE_FN (bb, fun)
    395     for (gimple_stmt_iterator gsi = gsi_last_bb (bb);
    396 	 !gsi_end_p (gsi); gsi_prev (&gsi))
    397       {
    398 	gassign *asgn = dyn_cast <gassign *> (gsi_stmt (gsi));
    399 	if (!asgn)
    400 	  continue;
    401 
    402 	/* Turn:
    403 
    404 	   z = x op y;
    405 
    406 	   into:
    407 
    408 	   z = x op y;
    409 	   z' = x' cop y';
    410 	   if (z == z') __builtin_trap ();
    411 
    412 	   where cop is a complementary boolean operation to op; and x'
    413 	   and y' hold the same value as x and y, but in a way that does
    414 	   not enable the compiler to optimize the redundant compare
    415 	   away.
    416 	*/
    417 
    418 	enum tree_code op = gimple_assign_rhs_code (asgn);
    419 
    420 	enum tree_code cop;
    421 
    422 	switch (op)
    423 	  {
    424 	  case EQ_EXPR:
    425 	  case NE_EXPR:
    426 	  case GT_EXPR:
    427 	  case GE_EXPR:
    428 	  case LT_EXPR:
    429 	  case LE_EXPR:
    430 	  case LTGT_EXPR:
    431 	  case UNEQ_EXPR:
    432 	  case UNGT_EXPR:
    433 	  case UNGE_EXPR:
    434 	  case UNLT_EXPR:
    435 	  case UNLE_EXPR:
    436 	  case ORDERED_EXPR:
    437 	  case UNORDERED_EXPR:
    438 	    cop = invert_tree_comparison (op,
    439 					  HONOR_NANS
    440 					  (gimple_assign_rhs1 (asgn)));
    441 
    442 	    if (cop == ERROR_MARK)
    443 	      /* ??? Can we do better?  */
    444 	      continue;
    445 
    446 	    break;
    447 
    448 	    /* ??? Maybe handle these too?  */
    449 	  case TRUTH_NOT_EXPR:
    450 	    /* ??? The code below assumes binary ops, it would have to
    451 	       be adjusted for TRUTH_NOT_EXPR, since it's unary.  */
    452 	  case TRUTH_ANDIF_EXPR:
    453 	  case TRUTH_ORIF_EXPR:
    454 	  case TRUTH_AND_EXPR:
    455 	  case TRUTH_OR_EXPR:
    456 	  case TRUTH_XOR_EXPR:
    457 	  default:
    458 	    continue;
    459 	  }
    460 
    461 	/* These are the operands for the verification.  */
    462 	tree lhs = gimple_assign_lhs (asgn);
    463 	tree op1 = gimple_assign_rhs1 (asgn);
    464 	tree op2 = gimple_assign_rhs2 (asgn);
    465 	location_t loc = gimple_location (asgn);
    466 
    467 	/* Vector booleans can't be used in conditional branches.  ???
    468 	   Can we do better?  How to reduce compare and
    469 	   reversed-compare result vectors to a single boolean?  */
    470 	if (VECTOR_TYPE_P (TREE_TYPE (op1)))
    471 	  continue;
    472 
    473 	/* useless_type_conversion_p enables conversions from 1-bit
    474 	   integer types to boolean to be discarded.  */
    475 	gcc_checking_assert (TREE_CODE (TREE_TYPE (lhs)) == BOOLEAN_TYPE
    476 			     || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
    477 				 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1));
    478 
    479 	tree rhs = copy_ssa_name (lhs);
    480 
    481 	gimple_stmt_iterator gsi_split = gsi;
    482 	/* Don't separate the original assignment from debug stmts
    483 	   that might be associated with it, and arrange to split the
    484 	   block after debug stmts, so as to make sure the split block
    485 	   won't be debug stmts only.  */
    486 	gsi_next_nondebug (&gsi_split);
    487 
    488 	bool throwing_compare_p = stmt_ends_bb_p (asgn);
    489 	if (throwing_compare_p)
    490 	  {
    491 	    basic_block nbb = split_edge (non_eh_succ_edge
    492 					  (gimple_bb (asgn)));
    493 	    gsi_split = gsi_start_bb (nbb);
    494 
    495 	    if (dump_file)
    496 	      fprintf (dump_file,
    497 		       "Splitting non-EH edge from block %i into %i"
    498 		       " after a throwing compare\n",
    499 		       gimple_bb (asgn)->index, nbb->index);
    500 	  }
    501 
    502 	bool same_p = (op1 == op2);
    503 	op1 = detach_value (loc, &gsi_split, op1);
    504 	op2 = same_p ? op1 : detach_value (loc, &gsi_split, op2);
    505 
    506 	gassign *asgnck = gimple_build_assign (rhs, cop, op1, op2);
    507 	gimple_set_location (asgnck, loc);
    508 	gsi_insert_before (&gsi_split, asgnck, GSI_SAME_STMT);
    509 
    510 	/* We wish to insert a cond_expr after the compare, so arrange
    511 	   for it to be at the end of a block if it isn't, and for it
    512 	   to have a single successor in case there's more than
    513 	   one, as in PR104975.  */
    514 	if (!gsi_end_p (gsi_split)
    515 	    || !single_succ_p (gsi_bb (gsi_split)))
    516 	  {
    517 	    if (!gsi_end_p (gsi_split))
    518 	      gsi_prev (&gsi_split);
    519 	    else
    520 	      gsi_split = gsi_last_bb (gsi_bb (gsi_split));
    521 	    basic_block obb = gsi_bb (gsi_split);
    522 	    basic_block nbb = split_block (obb, gsi_stmt (gsi_split))->dest;
    523 	    gsi_next (&gsi_split);
    524 	    gcc_checking_assert (gsi_end_p (gsi_split));
    525 
    526 	    single_succ_edge (bb)->goto_locus = loc;
    527 
    528 	    if (dump_file)
    529 	      fprintf (dump_file,
    530 		       "Splitting block %i into %i"
    531 		       " before the conditional trap branch\n",
    532 		       obb->index, nbb->index);
    533 	  }
    534 
    535 	/* If the check assignment must end a basic block, we can't
    536 	   insert the conditional branch in the same block, so split
    537 	   the block again, and prepare to insert the conditional
    538 	   branch in the new block.
    539 
    540 	   Also assign an EH region to the compare.  Even though it's
    541 	   unlikely that the hardening compare will throw after the
    542 	   original compare didn't, the compiler won't even know that
    543 	   it's the same compare operands, so add the EH edge anyway.  */
    544 	if (throwing_compare_p)
    545 	  {
    546 	    add_stmt_to_eh_lp (asgnck, lookup_stmt_eh_lp (asgn));
    547 	    make_eh_edges (asgnck);
    548 
    549 	    edge ckeh;
    550 	    basic_block nbb = split_edge (non_eh_succ_edge
    551 					  (gimple_bb (asgnck), &ckeh));
    552 	    gsi_split = gsi_start_bb (nbb);
    553 
    554 	    if (dump_file)
    555 	      fprintf (dump_file,
    556 		       "Splitting non-EH edge from block %i into %i after"
    557 		       " the newly-inserted reversed throwing compare\n",
    558 		       gimple_bb (asgnck)->index, nbb->index);
    559 
    560 	    if (!gimple_seq_empty_p (phi_nodes (ckeh->dest)))
    561 	      {
    562 		edge aseh;
    563 		non_eh_succ_edge (gimple_bb (asgn), &aseh);
    564 
    565 		gcc_checking_assert (aseh->dest == ckeh->dest);
    566 
    567 		for (gphi_iterator psi = gsi_start_phis (ckeh->dest);
    568 		     !gsi_end_p (psi); gsi_next (&psi))
    569 		  {
    570 		    gphi *phi = psi.phi ();
    571 		    add_phi_arg (phi, PHI_ARG_DEF_FROM_EDGE (phi, aseh), ckeh,
    572 				 gimple_phi_arg_location_from_edge (phi, aseh));
    573 		  }
    574 
    575 		if (dump_file)
    576 		  fprintf (dump_file,
    577 			   "Copying PHI args in EH block %i from %i to %i\n",
    578 			   aseh->dest->index, aseh->src->index, ckeh->src->index);
    579 	      }
    580 	  }
    581 
    582 	gcc_checking_assert (single_succ_p (gsi_bb (gsi_split)));
    583 
    584 	insert_check_and_trap (loc, &gsi_split, EDGE_TRUE_VALUE,
    585 			       EQ_EXPR, lhs, rhs);
    586       }
    587 
    588   return 0;
    589 }
    590 
    591 /* Instantiate a hardcmp pass.  */
    592 
    593 gimple_opt_pass *
    594 make_pass_harden_compares (gcc::context *ctxt)
    595 {
    596   return new pass_harden_compares (ctxt);
    597 }
    598