1 1.1 mrg /* Harden conditionals. 2 1.1 mrg Copyright (C) 2021-2022 Free Software Foundation, Inc. 3 1.1 mrg Contributed by Alexandre Oliva <oliva (at) adacore.com>. 4 1.1 mrg 5 1.1 mrg This file is part of GCC. 6 1.1 mrg 7 1.1 mrg GCC is free software; you can redistribute it and/or modify it under 8 1.1 mrg the terms of the GNU General Public License as published by the Free 9 1.1 mrg Software Foundation; either version 3, or (at your option) any later 10 1.1 mrg version. 11 1.1 mrg 12 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 1.1 mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 1.1 mrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 1.1 mrg for more details. 16 1.1 mrg 17 1.1 mrg You should have received a copy of the GNU General Public License 18 1.1 mrg along with GCC; see the file COPYING3. If not see 19 1.1 mrg <http://www.gnu.org/licenses/>. */ 20 1.1 mrg 21 1.1 mrg #include "config.h" 22 1.1 mrg #include "system.h" 23 1.1 mrg #include "coretypes.h" 24 1.1 mrg #include "backend.h" 25 1.1 mrg #include "target.h" 26 1.1 mrg #include "rtl.h" 27 1.1 mrg #include "tree.h" 28 1.1 mrg #include "fold-const.h" 29 1.1 mrg #include "gimple.h" 30 1.1 mrg #include "gimplify.h" 31 1.1 mrg #include "tree-pass.h" 32 1.1 mrg #include "ssa.h" 33 1.1 mrg #include "gimple-iterator.h" 34 1.1 mrg #include "tree-cfg.h" 35 1.1 mrg #include "basic-block.h" 36 1.1 mrg #include "cfghooks.h" 37 1.1 mrg #include "cfgloop.h" 38 1.1 mrg #include "tree-eh.h" 39 1.1 mrg #include "diagnostic.h" 40 1.1 mrg #include "intl.h" 41 1.1 mrg 42 1.1 mrg namespace { 43 1.1 mrg 44 1.1 mrg /* These passes introduces redundant, but reversed conditionals at 45 1.1 mrg compares, such as those used in conditional branches, and those 46 1.1 mrg that compute boolean results. This doesn't make much sense for 47 1.1 mrg abstract CPUs, but this kind of hardening may avoid undesirable 48 1.1 mrg execution paths on actual CPUs under such attacks as of power 49 1.1 mrg deprivation. */ 50 1.1 mrg 51 1.1 mrg /* Define a pass to harden conditionals other than branches. */ 52 1.1 mrg 53 1.1 mrg const pass_data pass_data_harden_compares = { 54 1.1 mrg GIMPLE_PASS, 55 1.1 mrg "hardcmp", 56 1.1 mrg OPTGROUP_NONE, 57 1.1 mrg TV_NONE, 58 1.1 mrg PROP_cfg | PROP_ssa, // properties_required 59 1.1 mrg 0, // properties_provided 60 1.1 mrg 0, // properties_destroyed 61 1.1 mrg 0, // properties_start 62 1.1 mrg TODO_update_ssa 63 1.1 mrg | TODO_cleanup_cfg 64 1.1 mrg | TODO_verify_il, // properties_finish 65 1.1 mrg }; 66 1.1 mrg 67 1.1 mrg class pass_harden_compares : public gimple_opt_pass 68 1.1 mrg { 69 1.1 mrg public: 70 1.1 mrg pass_harden_compares (gcc::context *ctxt) 71 1.1 mrg : gimple_opt_pass (pass_data_harden_compares, ctxt) 72 1.1 mrg {} 73 1.1 mrg opt_pass *clone () { return new pass_harden_compares (m_ctxt); } 74 1.1 mrg virtual bool gate (function *) { 75 1.1 mrg return flag_harden_compares; 76 1.1 mrg } 77 1.1 mrg virtual unsigned int execute (function *); 78 1.1 mrg }; 79 1.1 mrg 80 1.1 mrg /* Define a pass to harden conditionals in branches. This pass must 81 1.1 mrg run after the above, otherwise it will re-harden the checks 82 1.1 mrg introduced by the above. */ 83 1.1 mrg 84 1.1 mrg const pass_data pass_data_harden_conditional_branches = { 85 1.1 mrg GIMPLE_PASS, 86 1.1 mrg "hardcbr", 87 1.1 mrg OPTGROUP_NONE, 88 1.1 mrg TV_NONE, 89 1.1 mrg PROP_cfg | PROP_ssa, // properties_required 90 1.1 mrg 0, // properties_provided 91 1.1 mrg 0, // properties_destroyed 92 1.1 mrg 0, // properties_start 93 1.1 mrg TODO_update_ssa 94 1.1 mrg | TODO_cleanup_cfg 95 1.1 mrg | TODO_verify_il, // properties_finish 96 1.1 mrg }; 97 1.1 mrg 98 1.1 mrg class pass_harden_conditional_branches : public gimple_opt_pass 99 1.1 mrg { 100 1.1 mrg public: 101 1.1 mrg pass_harden_conditional_branches (gcc::context *ctxt) 102 1.1 mrg : gimple_opt_pass (pass_data_harden_conditional_branches, ctxt) 103 1.1 mrg {} 104 1.1 mrg opt_pass *clone () { return new pass_harden_conditional_branches (m_ctxt); } 105 1.1 mrg virtual bool gate (function *) { 106 1.1 mrg return flag_harden_conditional_branches; 107 1.1 mrg } 108 1.1 mrg virtual unsigned int execute (function *); 109 1.1 mrg }; 110 1.1 mrg 111 1.1 mrg } 112 1.1 mrg 113 1.1 mrg /* If VAL is an SSA name, return an SSA name holding the same value, 114 1.1 mrg but without the compiler's knowing that it holds the same value, so 115 1.1 mrg that uses thereof can't be optimized the way VAL might. Insert 116 1.1 mrg stmts that initialize it before *GSIP, with LOC. 117 1.1 mrg 118 1.1 mrg Otherwise, VAL must be an invariant, returned unchanged. */ 119 1.1 mrg 120 1.1 mrg static inline tree 121 1.1 mrg detach_value (location_t loc, gimple_stmt_iterator *gsip, tree val) 122 1.1 mrg { 123 1.1 mrg if (TREE_CONSTANT (val) || TREE_CODE (val) != SSA_NAME) 124 1.1 mrg { 125 1.1 mrg gcc_checking_assert (is_gimple_min_invariant (val)); 126 1.1 mrg return val; 127 1.1 mrg } 128 1.1 mrg 129 1.1 mrg /* Create a SSA "copy" of VAL. It would be nice to have it named 130 1.1 mrg after the corresponding variable, but sharing the same decl is 131 1.1 mrg problematic when VAL is a DECL_BY_REFERENCE RESULT_DECL, and 132 1.1 mrg copying just the identifier hits -fcompare-debug failures. */ 133 1.1 mrg tree ret = make_ssa_name (TREE_TYPE (val)); 134 1.1 mrg 135 1.1 mrg /* Some modes won't fit in general regs, so we fall back to memory 136 1.1 mrg for them. ??? It would be ideal to try to identify an alternate, 137 1.1 mrg wider or more suitable register class, and use the corresponding 138 1.1 mrg constraint, but there's no logic to go from register class to 139 1.1 mrg constraint, even if there is a corresponding constraint, and even 140 1.1 mrg if we could enumerate constraints, we can't get to their string 141 1.1 mrg either. So this will do for now. */ 142 1.1 mrg bool need_memory = true; 143 1.1 mrg enum machine_mode mode = TYPE_MODE (TREE_TYPE (val)); 144 1.1 mrg if (mode != BLKmode) 145 1.1 mrg for (int i = 0; i < FIRST_PSEUDO_REGISTER; i++) 146 1.1 mrg if (TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], i) 147 1.1 mrg && targetm.hard_regno_mode_ok (i, mode)) 148 1.1 mrg { 149 1.1 mrg need_memory = false; 150 1.1 mrg break; 151 1.1 mrg } 152 1.1 mrg 153 1.1 mrg tree asminput = val; 154 1.1 mrg tree asmoutput = ret; 155 1.1 mrg const char *constraint_out = need_memory ? "=m" : "=g"; 156 1.1 mrg const char *constraint_in = need_memory ? "m" : "0"; 157 1.1 mrg 158 1.1 mrg if (need_memory) 159 1.1 mrg { 160 1.1 mrg tree temp = create_tmp_var (TREE_TYPE (val), "dtch"); 161 1.1 mrg mark_addressable (temp); 162 1.1 mrg 163 1.1 mrg gassign *copyin = gimple_build_assign (temp, asminput); 164 1.1 mrg gimple_set_location (copyin, loc); 165 1.1 mrg gsi_insert_before (gsip, copyin, GSI_SAME_STMT); 166 1.1 mrg 167 1.1 mrg asminput = asmoutput = temp; 168 1.1 mrg } 169 1.1 mrg 170 1.1 mrg /* Output an asm statement with matching input and output. It does 171 1.1 mrg nothing, but after it the compiler no longer knows the output 172 1.1 mrg still holds the same value as the input. */ 173 1.1 mrg vec<tree, va_gc> *inputs = NULL; 174 1.1 mrg vec<tree, va_gc> *outputs = NULL; 175 1.1 mrg vec_safe_push (outputs, 176 1.1 mrg build_tree_list 177 1.1 mrg (build_tree_list 178 1.1 mrg (NULL_TREE, build_string (strlen (constraint_out), 179 1.1 mrg constraint_out)), 180 1.1 mrg asmoutput)); 181 1.1 mrg vec_safe_push (inputs, 182 1.1 mrg build_tree_list 183 1.1 mrg (build_tree_list 184 1.1 mrg (NULL_TREE, build_string (strlen (constraint_in), 185 1.1 mrg constraint_in)), 186 1.1 mrg asminput)); 187 1.1 mrg gasm *detach = gimple_build_asm_vec ("", inputs, outputs, 188 1.1 mrg NULL, NULL); 189 1.1 mrg gimple_set_location (detach, loc); 190 1.1 mrg gsi_insert_before (gsip, detach, GSI_SAME_STMT); 191 1.1 mrg 192 1.1 mrg if (need_memory) 193 1.1 mrg { 194 1.1 mrg gassign *copyout = gimple_build_assign (ret, asmoutput); 195 1.1 mrg gimple_set_location (copyout, loc); 196 1.1 mrg gsi_insert_before (gsip, copyout, GSI_SAME_STMT); 197 1.1 mrg SSA_NAME_DEF_STMT (ret) = copyout; 198 1.1 mrg 199 1.1 mrg gassign *clobber = gimple_build_assign (asmoutput, 200 1.1 mrg build_clobber 201 1.1 mrg (TREE_TYPE (asmoutput))); 202 1.1 mrg gimple_set_location (clobber, loc); 203 1.1 mrg gsi_insert_before (gsip, clobber, GSI_SAME_STMT); 204 1.1 mrg } 205 1.1 mrg else 206 1.1 mrg SSA_NAME_DEF_STMT (ret) = detach; 207 1.1 mrg 208 1.1 mrg return ret; 209 1.1 mrg } 210 1.1 mrg 211 1.1 mrg /* Build a cond stmt out of COP, LHS, RHS, insert it before *GSIP with 212 1.1 mrg location LOC. *GSIP must be at the end of a basic block. The succ 213 1.1 mrg edge out of the block becomes the true or false edge opposite to 214 1.1 mrg that in FLAGS. Create a new block with a single trap stmt, in the 215 1.1 mrg cold partition if the function is partitioned,, and a new edge to 216 1.1 mrg it as the other edge for the cond. */ 217 1.1 mrg 218 1.1 mrg static inline void 219 1.1 mrg insert_check_and_trap (location_t loc, gimple_stmt_iterator *gsip, 220 1.1 mrg int flags, enum tree_code cop, tree lhs, tree rhs) 221 1.1 mrg { 222 1.1 mrg basic_block chk = gsi_bb (*gsip); 223 1.1 mrg 224 1.1 mrg gcond *cond = gimple_build_cond (cop, lhs, rhs, NULL, NULL); 225 1.1 mrg gimple_set_location (cond, loc); 226 1.1 mrg gsi_insert_before (gsip, cond, GSI_SAME_STMT); 227 1.1 mrg 228 1.1 mrg basic_block trp = create_empty_bb (chk); 229 1.1 mrg 230 1.1 mrg gimple_stmt_iterator gsit = gsi_after_labels (trp); 231 1.1 mrg gcall *trap = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); 232 1.1 mrg gimple_set_location (trap, loc); 233 1.1 mrg gsi_insert_before (&gsit, trap, GSI_SAME_STMT); 234 1.1 mrg 235 1.1 mrg if (dump_file) 236 1.1 mrg fprintf (dump_file, 237 1.1 mrg "Adding reversed compare to block %i, and trap to block %i\n", 238 1.1 mrg chk->index, trp->index); 239 1.1 mrg 240 1.1 mrg if (BB_PARTITION (chk)) 241 1.1 mrg BB_SET_PARTITION (trp, BB_COLD_PARTITION); 242 1.1 mrg 243 1.1 mrg int true_false_flag = flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE); 244 1.1 mrg gcc_assert (true_false_flag); 245 1.1 mrg int neg_true_false_flag = (~flags) & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE); 246 1.1 mrg 247 1.1 mrg /* Remove the fallthru bit, and set the truth value for the 248 1.1 mrg preexisting edge and for the newly-created one. In hardcbr, 249 1.1 mrg FLAGS is taken from the edge of the original cond expr that we're 250 1.1 mrg dealing with, so the reversed compare is expected to yield the 251 1.1 mrg negated result, and the same result calls for a trap. In 252 1.1 mrg hardcmp, we're comparing the boolean results of the original and 253 1.1 mrg of the reversed compare, so we're passed FLAGS to trap on 254 1.1 mrg equality. */ 255 1.1 mrg single_succ_edge (chk)->flags &= ~EDGE_FALLTHRU; 256 1.1 mrg single_succ_edge (chk)->flags |= neg_true_false_flag; 257 1.1 mrg single_succ_edge (chk)->probability = profile_probability::always (); 258 1.1 mrg edge e = make_edge (chk, trp, true_false_flag); 259 1.1 mrg e->goto_locus = loc; 260 1.1 mrg e->probability = profile_probability::never (); 261 1.1 mrg 262 1.1 mrg if (dom_info_available_p (CDI_DOMINATORS)) 263 1.1 mrg set_immediate_dominator (CDI_DOMINATORS, trp, chk); 264 1.1 mrg if (current_loops) 265 1.1 mrg add_bb_to_loop (trp, current_loops->tree_root); 266 1.1 mrg } 267 1.1 mrg 268 1.1 mrg /* Split edge E, and insert_check_and_trap (see above) in the 269 1.1 mrg newly-created block, using detached copies of LHS's and RHS's 270 1.1 mrg values (see detach_value above) for the COP compare. */ 271 1.1 mrg 272 1.1 mrg static inline void 273 1.1 mrg insert_edge_check_and_trap (location_t loc, edge e, 274 1.1 mrg enum tree_code cop, tree lhs, tree rhs) 275 1.1 mrg { 276 1.1 mrg int flags = e->flags; 277 1.1 mrg basic_block src = e->src; 278 1.1 mrg basic_block dest = e->dest; 279 1.1 mrg location_t eloc = e->goto_locus; 280 1.1 mrg 281 1.1 mrg basic_block chk = split_edge (e); 282 1.1 mrg e = NULL; 283 1.1 mrg 284 1.1 mrg single_pred_edge (chk)->goto_locus = loc; 285 1.1 mrg single_succ_edge (chk)->goto_locus = eloc; 286 1.1 mrg 287 1.1 mrg if (dump_file) 288 1.1 mrg fprintf (dump_file, 289 1.1 mrg "Splitting edge %i->%i into block %i\n", 290 1.1 mrg src->index, dest->index, chk->index); 291 1.1 mrg 292 1.1 mrg gimple_stmt_iterator gsik = gsi_after_labels (chk); 293 1.1 mrg 294 1.1 mrg bool same_p = (lhs == rhs); 295 1.1 mrg lhs = detach_value (loc, &gsik, lhs); 296 1.1 mrg rhs = same_p ? lhs : detach_value (loc, &gsik, rhs); 297 1.1 mrg 298 1.1 mrg insert_check_and_trap (loc, &gsik, flags, cop, lhs, rhs); 299 1.1 mrg } 300 1.1 mrg 301 1.1 mrg /* Harden cond stmts at the end of FUN's blocks. */ 302 1.1 mrg 303 1.1 mrg unsigned int 304 1.1 mrg pass_harden_conditional_branches::execute (function *fun) 305 1.1 mrg { 306 1.1 mrg basic_block bb; 307 1.1 mrg FOR_EACH_BB_REVERSE_FN (bb, fun) 308 1.1 mrg { 309 1.1 mrg gimple_stmt_iterator gsi = gsi_last_bb (bb); 310 1.1 mrg 311 1.1 mrg if (gsi_end_p (gsi)) 312 1.1 mrg continue; 313 1.1 mrg 314 1.1 mrg gcond *cond = dyn_cast <gcond *> (gsi_stmt (gsi)); 315 1.1 mrg if (!cond) 316 1.1 mrg continue; 317 1.1 mrg 318 1.1 mrg /* Turn: 319 1.1 mrg 320 1.1 mrg if (x op y) goto l1; else goto l2; 321 1.1 mrg 322 1.1 mrg into: 323 1.1 mrg 324 1.1 mrg if (x op y) goto l1'; else goto l2'; 325 1.1 mrg l1': if (x' cop y') goto l1'trap; else goto l1; 326 1.1 mrg l1'trap: __builtin_trap (); 327 1.1 mrg l2': if (x' cop y') goto l2; else goto l2'trap; 328 1.1 mrg l2'trap: __builtin_trap (); 329 1.1 mrg 330 1.1 mrg where cop is a complementary boolean operation to op; l1', l1'trap, 331 1.1 mrg l2' and l2'trap are newly-created labels; and x' and y' hold the same 332 1.1 mrg value as x and y, but in a way that does not enable the compiler to 333 1.1 mrg optimize the redundant compare away. 334 1.1 mrg */ 335 1.1 mrg 336 1.1 mrg enum tree_code op = gimple_cond_code (cond); 337 1.1 mrg tree lhs = gimple_cond_lhs (cond); 338 1.1 mrg tree rhs = gimple_cond_rhs (cond); 339 1.1 mrg location_t loc = gimple_location (cond); 340 1.1 mrg 341 1.1 mrg enum tree_code cop = invert_tree_comparison (op, HONOR_NANS (lhs)); 342 1.1 mrg 343 1.1 mrg if (cop == ERROR_MARK) 344 1.1 mrg /* ??? Can we do better? */ 345 1.1 mrg continue; 346 1.1 mrg 347 1.1 mrg insert_edge_check_and_trap (loc, EDGE_SUCC (bb, 0), cop, lhs, rhs); 348 1.1 mrg insert_edge_check_and_trap (loc, EDGE_SUCC (bb, 1), cop, lhs, rhs); 349 1.1 mrg } 350 1.1 mrg 351 1.1 mrg return 0; 352 1.1 mrg } 353 1.1 mrg 354 1.1 mrg /* Instantiate a hardcbr pass. */ 355 1.1 mrg 356 1.1 mrg gimple_opt_pass * 357 1.1 mrg make_pass_harden_conditional_branches (gcc::context *ctxt) 358 1.1 mrg { 359 1.1 mrg return new pass_harden_conditional_branches (ctxt); 360 1.1 mrg } 361 1.1 mrg 362 1.1 mrg /* Return the fallthru edge of a block whose other edge is an EH 363 1.1 mrg edge. If EHP is not NULL, store the EH edge in it. */ 364 1.1 mrg static inline edge 365 1.1 mrg non_eh_succ_edge (basic_block bb, edge *ehp = NULL) 366 1.1 mrg { 367 1.1 mrg gcc_checking_assert (EDGE_COUNT (bb->succs) == 2); 368 1.1 mrg 369 1.1 mrg edge ret = find_fallthru_edge (bb->succs); 370 1.1 mrg 371 1.1 mrg int eh_idx = EDGE_SUCC (bb, 0) == ret; 372 1.1 mrg edge eh = EDGE_SUCC (bb, eh_idx); 373 1.1 mrg 374 1.1 mrg gcc_checking_assert (!(ret->flags & EDGE_EH) 375 1.1 mrg && (eh->flags & EDGE_EH)); 376 1.1 mrg 377 1.1 mrg if (ehp) 378 1.1 mrg *ehp = eh; 379 1.1 mrg 380 1.1 mrg return ret; 381 1.1 mrg } 382 1.1 mrg 383 1.1 mrg /* Harden boolean-yielding compares in FUN. */ 384 1.1 mrg 385 1.1 mrg unsigned int 386 1.1 mrg pass_harden_compares::execute (function *fun) 387 1.1 mrg { 388 1.1 mrg basic_block bb; 389 1.1 mrg /* Go backwards over BBs and stmts, so that, even if we split the 390 1.1 mrg block multiple times to insert a cond_expr after each compare we 391 1.1 mrg find, we remain in the same block, visiting every preexisting 392 1.1 mrg stmt exactly once, and not visiting newly-added blocks or 393 1.1 mrg stmts. */ 394 1.1 mrg FOR_EACH_BB_REVERSE_FN (bb, fun) 395 1.1 mrg for (gimple_stmt_iterator gsi = gsi_last_bb (bb); 396 1.1 mrg !gsi_end_p (gsi); gsi_prev (&gsi)) 397 1.1 mrg { 398 1.1 mrg gassign *asgn = dyn_cast <gassign *> (gsi_stmt (gsi)); 399 1.1 mrg if (!asgn) 400 1.1 mrg continue; 401 1.1 mrg 402 1.1 mrg /* Turn: 403 1.1 mrg 404 1.1 mrg z = x op y; 405 1.1 mrg 406 1.1 mrg into: 407 1.1 mrg 408 1.1 mrg z = x op y; 409 1.1 mrg z' = x' cop y'; 410 1.1 mrg if (z == z') __builtin_trap (); 411 1.1 mrg 412 1.1 mrg where cop is a complementary boolean operation to op; and x' 413 1.1 mrg and y' hold the same value as x and y, but in a way that does 414 1.1 mrg not enable the compiler to optimize the redundant compare 415 1.1 mrg away. 416 1.1 mrg */ 417 1.1 mrg 418 1.1 mrg enum tree_code op = gimple_assign_rhs_code (asgn); 419 1.1 mrg 420 1.1 mrg enum tree_code cop; 421 1.1 mrg 422 1.1 mrg switch (op) 423 1.1 mrg { 424 1.1 mrg case EQ_EXPR: 425 1.1 mrg case NE_EXPR: 426 1.1 mrg case GT_EXPR: 427 1.1 mrg case GE_EXPR: 428 1.1 mrg case LT_EXPR: 429 1.1 mrg case LE_EXPR: 430 1.1 mrg case LTGT_EXPR: 431 1.1 mrg case UNEQ_EXPR: 432 1.1 mrg case UNGT_EXPR: 433 1.1 mrg case UNGE_EXPR: 434 1.1 mrg case UNLT_EXPR: 435 1.1 mrg case UNLE_EXPR: 436 1.1 mrg case ORDERED_EXPR: 437 1.1 mrg case UNORDERED_EXPR: 438 1.1 mrg cop = invert_tree_comparison (op, 439 1.1 mrg HONOR_NANS 440 1.1 mrg (gimple_assign_rhs1 (asgn))); 441 1.1 mrg 442 1.1 mrg if (cop == ERROR_MARK) 443 1.1 mrg /* ??? Can we do better? */ 444 1.1 mrg continue; 445 1.1 mrg 446 1.1 mrg break; 447 1.1 mrg 448 1.1 mrg /* ??? Maybe handle these too? */ 449 1.1 mrg case TRUTH_NOT_EXPR: 450 1.1 mrg /* ??? The code below assumes binary ops, it would have to 451 1.1 mrg be adjusted for TRUTH_NOT_EXPR, since it's unary. */ 452 1.1 mrg case TRUTH_ANDIF_EXPR: 453 1.1 mrg case TRUTH_ORIF_EXPR: 454 1.1 mrg case TRUTH_AND_EXPR: 455 1.1 mrg case TRUTH_OR_EXPR: 456 1.1 mrg case TRUTH_XOR_EXPR: 457 1.1 mrg default: 458 1.1 mrg continue; 459 1.1 mrg } 460 1.1 mrg 461 1.1 mrg /* These are the operands for the verification. */ 462 1.1 mrg tree lhs = gimple_assign_lhs (asgn); 463 1.1 mrg tree op1 = gimple_assign_rhs1 (asgn); 464 1.1 mrg tree op2 = gimple_assign_rhs2 (asgn); 465 1.1 mrg location_t loc = gimple_location (asgn); 466 1.1 mrg 467 1.1 mrg /* Vector booleans can't be used in conditional branches. ??? 468 1.1 mrg Can we do better? How to reduce compare and 469 1.1 mrg reversed-compare result vectors to a single boolean? */ 470 1.1 mrg if (VECTOR_TYPE_P (TREE_TYPE (op1))) 471 1.1 mrg continue; 472 1.1 mrg 473 1.1 mrg /* useless_type_conversion_p enables conversions from 1-bit 474 1.1 mrg integer types to boolean to be discarded. */ 475 1.1 mrg gcc_checking_assert (TREE_CODE (TREE_TYPE (lhs)) == BOOLEAN_TYPE 476 1.1 mrg || (INTEGRAL_TYPE_P (TREE_TYPE (lhs)) 477 1.1 mrg && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)); 478 1.1 mrg 479 1.1 mrg tree rhs = copy_ssa_name (lhs); 480 1.1 mrg 481 1.1 mrg gimple_stmt_iterator gsi_split = gsi; 482 1.1 mrg /* Don't separate the original assignment from debug stmts 483 1.1 mrg that might be associated with it, and arrange to split the 484 1.1 mrg block after debug stmts, so as to make sure the split block 485 1.1 mrg won't be debug stmts only. */ 486 1.1 mrg gsi_next_nondebug (&gsi_split); 487 1.1 mrg 488 1.1 mrg bool throwing_compare_p = stmt_ends_bb_p (asgn); 489 1.1 mrg if (throwing_compare_p) 490 1.1 mrg { 491 1.1 mrg basic_block nbb = split_edge (non_eh_succ_edge 492 1.1 mrg (gimple_bb (asgn))); 493 1.1 mrg gsi_split = gsi_start_bb (nbb); 494 1.1 mrg 495 1.1 mrg if (dump_file) 496 1.1 mrg fprintf (dump_file, 497 1.1 mrg "Splitting non-EH edge from block %i into %i" 498 1.1 mrg " after a throwing compare\n", 499 1.1 mrg gimple_bb (asgn)->index, nbb->index); 500 1.1 mrg } 501 1.1 mrg 502 1.1 mrg bool same_p = (op1 == op2); 503 1.1 mrg op1 = detach_value (loc, &gsi_split, op1); 504 1.1 mrg op2 = same_p ? op1 : detach_value (loc, &gsi_split, op2); 505 1.1 mrg 506 1.1 mrg gassign *asgnck = gimple_build_assign (rhs, cop, op1, op2); 507 1.1 mrg gimple_set_location (asgnck, loc); 508 1.1 mrg gsi_insert_before (&gsi_split, asgnck, GSI_SAME_STMT); 509 1.1 mrg 510 1.1 mrg /* We wish to insert a cond_expr after the compare, so arrange 511 1.1 mrg for it to be at the end of a block if it isn't, and for it 512 1.1 mrg to have a single successor in case there's more than 513 1.1 mrg one, as in PR104975. */ 514 1.1 mrg if (!gsi_end_p (gsi_split) 515 1.1 mrg || !single_succ_p (gsi_bb (gsi_split))) 516 1.1 mrg { 517 1.1 mrg if (!gsi_end_p (gsi_split)) 518 1.1 mrg gsi_prev (&gsi_split); 519 1.1 mrg else 520 1.1 mrg gsi_split = gsi_last_bb (gsi_bb (gsi_split)); 521 1.1 mrg basic_block obb = gsi_bb (gsi_split); 522 1.1 mrg basic_block nbb = split_block (obb, gsi_stmt (gsi_split))->dest; 523 1.1 mrg gsi_next (&gsi_split); 524 1.1 mrg gcc_checking_assert (gsi_end_p (gsi_split)); 525 1.1 mrg 526 1.1 mrg single_succ_edge (bb)->goto_locus = loc; 527 1.1 mrg 528 1.1 mrg if (dump_file) 529 1.1 mrg fprintf (dump_file, 530 1.1 mrg "Splitting block %i into %i" 531 1.1 mrg " before the conditional trap branch\n", 532 1.1 mrg obb->index, nbb->index); 533 1.1 mrg } 534 1.1 mrg 535 1.1 mrg /* If the check assignment must end a basic block, we can't 536 1.1 mrg insert the conditional branch in the same block, so split 537 1.1 mrg the block again, and prepare to insert the conditional 538 1.1 mrg branch in the new block. 539 1.1 mrg 540 1.1 mrg Also assign an EH region to the compare. Even though it's 541 1.1 mrg unlikely that the hardening compare will throw after the 542 1.1 mrg original compare didn't, the compiler won't even know that 543 1.1 mrg it's the same compare operands, so add the EH edge anyway. */ 544 1.1 mrg if (throwing_compare_p) 545 1.1 mrg { 546 1.1 mrg add_stmt_to_eh_lp (asgnck, lookup_stmt_eh_lp (asgn)); 547 1.1 mrg make_eh_edges (asgnck); 548 1.1 mrg 549 1.1 mrg edge ckeh; 550 1.1 mrg basic_block nbb = split_edge (non_eh_succ_edge 551 1.1 mrg (gimple_bb (asgnck), &ckeh)); 552 1.1 mrg gsi_split = gsi_start_bb (nbb); 553 1.1 mrg 554 1.1 mrg if (dump_file) 555 1.1 mrg fprintf (dump_file, 556 1.1 mrg "Splitting non-EH edge from block %i into %i after" 557 1.1 mrg " the newly-inserted reversed throwing compare\n", 558 1.1 mrg gimple_bb (asgnck)->index, nbb->index); 559 1.1 mrg 560 1.1 mrg if (!gimple_seq_empty_p (phi_nodes (ckeh->dest))) 561 1.1 mrg { 562 1.1 mrg edge aseh; 563 1.1 mrg non_eh_succ_edge (gimple_bb (asgn), &aseh); 564 1.1 mrg 565 1.1 mrg gcc_checking_assert (aseh->dest == ckeh->dest); 566 1.1 mrg 567 1.1 mrg for (gphi_iterator psi = gsi_start_phis (ckeh->dest); 568 1.1 mrg !gsi_end_p (psi); gsi_next (&psi)) 569 1.1 mrg { 570 1.1 mrg gphi *phi = psi.phi (); 571 1.1 mrg add_phi_arg (phi, PHI_ARG_DEF_FROM_EDGE (phi, aseh), ckeh, 572 1.1 mrg gimple_phi_arg_location_from_edge (phi, aseh)); 573 1.1 mrg } 574 1.1 mrg 575 1.1 mrg if (dump_file) 576 1.1 mrg fprintf (dump_file, 577 1.1 mrg "Copying PHI args in EH block %i from %i to %i\n", 578 1.1 mrg aseh->dest->index, aseh->src->index, ckeh->src->index); 579 1.1 mrg } 580 1.1 mrg } 581 1.1 mrg 582 1.1 mrg gcc_checking_assert (single_succ_p (gsi_bb (gsi_split))); 583 1.1 mrg 584 1.1 mrg insert_check_and_trap (loc, &gsi_split, EDGE_TRUE_VALUE, 585 1.1 mrg EQ_EXPR, lhs, rhs); 586 1.1 mrg } 587 1.1 mrg 588 1.1 mrg return 0; 589 1.1 mrg } 590 1.1 mrg 591 1.1 mrg /* Instantiate a hardcmp pass. */ 592 1.1 mrg 593 1.1 mrg gimple_opt_pass * 594 1.1 mrg make_pass_harden_compares (gcc::context *ctxt) 595 1.1 mrg { 596 1.1 mrg return new pass_harden_compares (ctxt); 597 1.1 mrg } 598