1 1.1 mrg /* Classes for modeling the state of memory. 2 1.1.1.2 mrg Copyright (C) 2019-2022 Free Software Foundation, Inc. 3 1.1 mrg Contributed by David Malcolm <dmalcolm (at) redhat.com>. 4 1.1 mrg 5 1.1 mrg This file is part of GCC. 6 1.1 mrg 7 1.1 mrg GCC is free software; you can redistribute it and/or modify it 8 1.1 mrg under the terms of the GNU General Public License as published by 9 1.1 mrg the Free Software Foundation; either version 3, or (at your option) 10 1.1 mrg any later version. 11 1.1 mrg 12 1.1 mrg GCC is distributed in the hope that it will be useful, but 13 1.1 mrg WITHOUT ANY WARRANTY; without even the implied warranty of 14 1.1 mrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 1.1 mrg General Public License for more details. 16 1.1 mrg 17 1.1 mrg You should have received a copy of the GNU General Public License 18 1.1 mrg along with GCC; see the file COPYING3. If not see 19 1.1 mrg <http://www.gnu.org/licenses/>. */ 20 1.1 mrg 21 1.1 mrg #include "config.h" 22 1.1 mrg #include "system.h" 23 1.1 mrg #include "coretypes.h" 24 1.1 mrg #include "tree.h" 25 1.1 mrg #include "function.h" 26 1.1 mrg #include "basic-block.h" 27 1.1 mrg #include "gimple.h" 28 1.1 mrg #include "gimple-iterator.h" 29 1.1 mrg #include "diagnostic-core.h" 30 1.1 mrg #include "graphviz.h" 31 1.1 mrg #include "options.h" 32 1.1 mrg #include "cgraph.h" 33 1.1 mrg #include "tree-dfa.h" 34 1.1 mrg #include "stringpool.h" 35 1.1 mrg #include "convert.h" 36 1.1 mrg #include "target.h" 37 1.1 mrg #include "fold-const.h" 38 1.1 mrg #include "tree-pretty-print.h" 39 1.1 mrg #include "diagnostic-color.h" 40 1.1 mrg #include "diagnostic-metadata.h" 41 1.1 mrg #include "tristate.h" 42 1.1 mrg #include "bitmap.h" 43 1.1 mrg #include "selftest.h" 44 1.1 mrg #include "function.h" 45 1.1.1.2 mrg #include "json.h" 46 1.1 mrg #include "analyzer/analyzer.h" 47 1.1 mrg #include "analyzer/analyzer-logging.h" 48 1.1 mrg #include "ordered-hash-map.h" 49 1.1 mrg #include "options.h" 50 1.1 mrg #include "cgraph.h" 51 1.1 mrg #include "cfg.h" 52 1.1 mrg #include "digraph.h" 53 1.1 mrg #include "analyzer/supergraph.h" 54 1.1 mrg #include "sbitmap.h" 55 1.1.1.2 mrg #include "analyzer/call-string.h" 56 1.1.1.2 mrg #include "analyzer/program-point.h" 57 1.1.1.2 mrg #include "analyzer/store.h" 58 1.1 mrg #include "analyzer/region-model.h" 59 1.1 mrg #include "analyzer/constraint-manager.h" 60 1.1 mrg #include "diagnostic-event-id.h" 61 1.1 mrg #include "analyzer/sm.h" 62 1.1 mrg #include "diagnostic-event-id.h" 63 1.1 mrg #include "analyzer/sm.h" 64 1.1 mrg #include "analyzer/pending-diagnostic.h" 65 1.1.1.2 mrg #include "analyzer/region-model-reachability.h" 66 1.1 mrg #include "analyzer/analyzer-selftests.h" 67 1.1.1.2 mrg #include "analyzer/program-state.h" 68 1.1 mrg #include "stor-layout.h" 69 1.1.1.2 mrg #include "attribs.h" 70 1.1.1.2 mrg #include "tree-object-size.h" 71 1.1.1.2 mrg #include "gimple-ssa.h" 72 1.1.1.2 mrg #include "tree-phinodes.h" 73 1.1.1.2 mrg #include "tree-ssa-operands.h" 74 1.1.1.2 mrg #include "ssa-iterators.h" 75 1.1.1.2 mrg #include "calls.h" 76 1.1 mrg 77 1.1 mrg #if ENABLE_ANALYZER 78 1.1 mrg 79 1.1 mrg namespace ana { 80 1.1 mrg 81 1.1 mrg /* Dump T to PP in language-independent form, for debugging/logging/dumping 82 1.1 mrg purposes. */ 83 1.1 mrg 84 1.1.1.2 mrg void 85 1.1 mrg dump_tree (pretty_printer *pp, tree t) 86 1.1 mrg { 87 1.1 mrg dump_generic_node (pp, t, 0, TDF_SLIM, 0); 88 1.1 mrg } 89 1.1 mrg 90 1.1 mrg /* Dump T to PP in language-independent form in quotes, for 91 1.1 mrg debugging/logging/dumping purposes. */ 92 1.1 mrg 93 1.1 mrg void 94 1.1 mrg dump_quoted_tree (pretty_printer *pp, tree t) 95 1.1 mrg { 96 1.1 mrg pp_begin_quote (pp, pp_show_color (pp)); 97 1.1 mrg dump_tree (pp, t); 98 1.1 mrg pp_end_quote (pp, pp_show_color (pp)); 99 1.1 mrg } 100 1.1 mrg 101 1.1 mrg /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf 102 1.1 mrg calls within other pp_printf calls. 103 1.1 mrg 104 1.1 mrg default_tree_printer handles 'T' and some other codes by calling 105 1.1 mrg dump_generic_node (pp, t, 0, TDF_SLIM, 0); 106 1.1 mrg dump_generic_node calls pp_printf in various places, leading to 107 1.1 mrg garbled output. 108 1.1 mrg 109 1.1 mrg Ideally pp_printf could be made to be reentrant, but in the meantime 110 1.1 mrg this function provides a workaround. */ 111 1.1 mrg 112 1.1.1.2 mrg void 113 1.1 mrg print_quoted_type (pretty_printer *pp, tree t) 114 1.1 mrg { 115 1.1 mrg pp_begin_quote (pp, pp_show_color (pp)); 116 1.1 mrg dump_generic_node (pp, t, 0, TDF_SLIM, 0); 117 1.1 mrg pp_end_quote (pp, pp_show_color (pp)); 118 1.1 mrg } 119 1.1 mrg 120 1.1.1.2 mrg /* class region_to_value_map. */ 121 1.1 mrg 122 1.1.1.2 mrg /* Assignment operator for region_to_value_map. */ 123 1.1 mrg 124 1.1.1.2 mrg region_to_value_map & 125 1.1.1.2 mrg region_to_value_map::operator= (const region_to_value_map &other) 126 1.1 mrg { 127 1.1.1.2 mrg m_hash_map.empty (); 128 1.1.1.2 mrg for (auto iter : other.m_hash_map) 129 1.1.1.2 mrg { 130 1.1.1.2 mrg const region *reg = iter.first; 131 1.1.1.2 mrg const svalue *sval = iter.second; 132 1.1.1.2 mrg m_hash_map.put (reg, sval); 133 1.1.1.2 mrg } 134 1.1.1.2 mrg return *this; 135 1.1 mrg } 136 1.1 mrg 137 1.1.1.2 mrg /* Equality operator for region_to_value_map. */ 138 1.1 mrg 139 1.1.1.2 mrg bool 140 1.1.1.2 mrg region_to_value_map::operator== (const region_to_value_map &other) const 141 1.1 mrg { 142 1.1.1.2 mrg if (m_hash_map.elements () != other.m_hash_map.elements ()) 143 1.1.1.2 mrg return false; 144 1.1 mrg 145 1.1.1.2 mrg for (auto iter : *this) 146 1.1.1.2 mrg { 147 1.1.1.2 mrg const region *reg = iter.first; 148 1.1.1.2 mrg const svalue *sval = iter.second; 149 1.1.1.2 mrg const svalue * const *other_slot = other.get (reg); 150 1.1.1.2 mrg if (other_slot == NULL) 151 1.1.1.2 mrg return false; 152 1.1.1.2 mrg if (sval != *other_slot) 153 1.1.1.2 mrg return false; 154 1.1.1.2 mrg } 155 1.1 mrg 156 1.1.1.2 mrg return true; 157 1.1.1.2 mrg } 158 1.1 mrg 159 1.1.1.2 mrg /* Dump this object to PP. */ 160 1.1 mrg 161 1.1 mrg void 162 1.1.1.2 mrg region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple, 163 1.1.1.2 mrg bool multiline) const 164 1.1 mrg { 165 1.1.1.2 mrg auto_vec<const region *> regs; 166 1.1.1.2 mrg for (iterator iter = begin (); iter != end (); ++iter) 167 1.1.1.2 mrg regs.safe_push ((*iter).first); 168 1.1.1.2 mrg regs.qsort (region::cmp_ptr_ptr); 169 1.1.1.2 mrg if (multiline) 170 1.1.1.2 mrg pp_newline (pp); 171 1.1 mrg else 172 1.1.1.2 mrg pp_string (pp, " {"); 173 1.1.1.2 mrg unsigned i; 174 1.1.1.2 mrg const region *reg; 175 1.1.1.2 mrg FOR_EACH_VEC_ELT (regs, i, reg) 176 1.1.1.2 mrg { 177 1.1.1.2 mrg if (multiline) 178 1.1.1.2 mrg pp_string (pp, " "); 179 1.1.1.2 mrg else if (i > 0) 180 1.1.1.2 mrg pp_string (pp, ", "); 181 1.1.1.2 mrg reg->dump_to_pp (pp, simple); 182 1.1.1.2 mrg pp_string (pp, ": "); 183 1.1.1.2 mrg const svalue *sval = *get (reg); 184 1.1.1.2 mrg sval->dump_to_pp (pp, true); 185 1.1.1.2 mrg if (multiline) 186 1.1.1.2 mrg pp_newline (pp); 187 1.1.1.2 mrg } 188 1.1.1.2 mrg if (!multiline) 189 1.1.1.2 mrg pp_string (pp, "}"); 190 1.1 mrg } 191 1.1 mrg 192 1.1.1.2 mrg /* Dump this object to stderr. */ 193 1.1 mrg 194 1.1.1.2 mrg DEBUG_FUNCTION void 195 1.1.1.2 mrg region_to_value_map::dump (bool simple) const 196 1.1 mrg { 197 1.1.1.2 mrg pretty_printer pp; 198 1.1.1.2 mrg pp_format_decoder (&pp) = default_tree_printer; 199 1.1.1.2 mrg pp_show_color (&pp) = pp_show_color (global_dc->printer); 200 1.1.1.2 mrg pp.buffer->stream = stderr; 201 1.1.1.2 mrg dump_to_pp (&pp, simple, true); 202 1.1.1.2 mrg pp_newline (&pp); 203 1.1.1.2 mrg pp_flush (&pp); 204 1.1 mrg } 205 1.1 mrg 206 1.1 mrg 207 1.1.1.2 mrg /* Attempt to merge THIS with OTHER, writing the result 208 1.1.1.2 mrg to OUT. 209 1.1.1.2 mrg 210 1.1.1.2 mrg For now, write (region, value) mappings that are in common between THIS 211 1.1.1.2 mrg and OTHER to OUT, effectively taking the intersection, rather than 212 1.1.1.2 mrg rejecting differences. */ 213 1.1.1.2 mrg 214 1.1.1.2 mrg bool 215 1.1.1.2 mrg region_to_value_map::can_merge_with_p (const region_to_value_map &other, 216 1.1.1.2 mrg region_to_value_map *out) const 217 1.1 mrg { 218 1.1.1.2 mrg for (auto iter : *this) 219 1.1.1.2 mrg { 220 1.1.1.2 mrg const region *iter_reg = iter.first; 221 1.1.1.2 mrg const svalue *iter_sval = iter.second; 222 1.1.1.2 mrg const svalue * const * other_slot = other.get (iter_reg); 223 1.1.1.2 mrg if (other_slot) 224 1.1.1.2 mrg if (iter_sval == *other_slot) 225 1.1.1.2 mrg out->put (iter_reg, iter_sval); 226 1.1.1.2 mrg } 227 1.1.1.2 mrg return true; 228 1.1 mrg } 229 1.1 mrg 230 1.1.1.2 mrg /* Purge any state involving SVAL. */ 231 1.1 mrg 232 1.1 mrg void 233 1.1.1.2 mrg region_to_value_map::purge_state_involving (const svalue *sval) 234 1.1 mrg { 235 1.1.1.2 mrg auto_vec<const region *> to_purge; 236 1.1.1.2 mrg for (auto iter : *this) 237 1.1.1.2 mrg { 238 1.1.1.2 mrg const region *iter_reg = iter.first; 239 1.1.1.2 mrg const svalue *iter_sval = iter.second; 240 1.1.1.2 mrg if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval)) 241 1.1.1.2 mrg to_purge.safe_push (iter_reg); 242 1.1.1.2 mrg } 243 1.1.1.2 mrg for (auto iter : to_purge) 244 1.1.1.2 mrg m_hash_map.remove (iter); 245 1.1 mrg } 246 1.1 mrg 247 1.1.1.2 mrg /* class region_model. */ 248 1.1.1.2 mrg 249 1.1.1.2 mrg /* Ctor for region_model: construct an "empty" model. */ 250 1.1 mrg 251 1.1.1.2 mrg region_model::region_model (region_model_manager *mgr) 252 1.1.1.2 mrg : m_mgr (mgr), m_store (), m_current_frame (NULL), 253 1.1.1.2 mrg m_dynamic_extents () 254 1.1 mrg { 255 1.1.1.2 mrg m_constraints = new constraint_manager (mgr); 256 1.1 mrg } 257 1.1 mrg 258 1.1.1.2 mrg /* region_model's copy ctor. */ 259 1.1 mrg 260 1.1.1.2 mrg region_model::region_model (const region_model &other) 261 1.1.1.2 mrg : m_mgr (other.m_mgr), m_store (other.m_store), 262 1.1.1.2 mrg m_constraints (new constraint_manager (*other.m_constraints)), 263 1.1.1.2 mrg m_current_frame (other.m_current_frame), 264 1.1.1.2 mrg m_dynamic_extents (other.m_dynamic_extents) 265 1.1 mrg { 266 1.1 mrg } 267 1.1 mrg 268 1.1.1.2 mrg /* region_model's dtor. */ 269 1.1 mrg 270 1.1.1.2 mrg region_model::~region_model () 271 1.1 mrg { 272 1.1.1.2 mrg delete m_constraints; 273 1.1 mrg } 274 1.1 mrg 275 1.1.1.2 mrg /* region_model's assignment operator. */ 276 1.1 mrg 277 1.1.1.2 mrg region_model & 278 1.1.1.2 mrg region_model::operator= (const region_model &other) 279 1.1 mrg { 280 1.1.1.2 mrg /* m_mgr is const. */ 281 1.1.1.2 mrg gcc_assert (m_mgr == other.m_mgr); 282 1.1.1.2 mrg 283 1.1.1.2 mrg m_store = other.m_store; 284 1.1.1.2 mrg 285 1.1.1.2 mrg delete m_constraints; 286 1.1.1.2 mrg m_constraints = new constraint_manager (*other.m_constraints); 287 1.1.1.2 mrg 288 1.1.1.2 mrg m_current_frame = other.m_current_frame; 289 1.1 mrg 290 1.1.1.2 mrg m_dynamic_extents = other.m_dynamic_extents; 291 1.1 mrg 292 1.1.1.2 mrg return *this; 293 1.1.1.2 mrg } 294 1.1.1.2 mrg 295 1.1.1.2 mrg /* Equality operator for region_model. 296 1.1 mrg 297 1.1.1.2 mrg Amongst other things this directly compares the stores and the constraint 298 1.1.1.2 mrg managers, so for this to be meaningful both this and OTHER should 299 1.1.1.2 mrg have been canonicalized. */ 300 1.1 mrg 301 1.1 mrg bool 302 1.1.1.2 mrg region_model::operator== (const region_model &other) const 303 1.1 mrg { 304 1.1.1.2 mrg /* We can only compare instances that use the same manager. */ 305 1.1.1.2 mrg gcc_assert (m_mgr == other.m_mgr); 306 1.1.1.2 mrg 307 1.1.1.2 mrg if (m_store != other.m_store) 308 1.1 mrg return false; 309 1.1 mrg 310 1.1.1.2 mrg if (*m_constraints != *other.m_constraints) 311 1.1 mrg return false; 312 1.1 mrg 313 1.1.1.2 mrg if (m_current_frame != other.m_current_frame) 314 1.1.1.2 mrg return false; 315 1.1.1.2 mrg 316 1.1.1.2 mrg if (m_dynamic_extents != other.m_dynamic_extents) 317 1.1.1.2 mrg return false; 318 1.1.1.2 mrg 319 1.1.1.2 mrg gcc_checking_assert (hash () == other.hash ()); 320 1.1.1.2 mrg 321 1.1.1.2 mrg return true; 322 1.1 mrg } 323 1.1 mrg 324 1.1.1.2 mrg /* Generate a hash value for this region_model. */ 325 1.1 mrg 326 1.1 mrg hashval_t 327 1.1.1.2 mrg region_model::hash () const 328 1.1 mrg { 329 1.1.1.2 mrg hashval_t result = m_store.hash (); 330 1.1.1.2 mrg result ^= m_constraints->hash (); 331 1.1.1.2 mrg return result; 332 1.1 mrg } 333 1.1 mrg 334 1.1.1.2 mrg /* Dump a representation of this model to PP, showing the 335 1.1.1.2 mrg stack, the store, and any constraints. 336 1.1.1.2 mrg Use SIMPLE to control how svalues and regions are printed. */ 337 1.1 mrg 338 1.1 mrg void 339 1.1.1.2 mrg region_model::dump_to_pp (pretty_printer *pp, bool simple, 340 1.1.1.2 mrg bool multiline) const 341 1.1.1.2 mrg { 342 1.1.1.2 mrg /* Dump stack. */ 343 1.1.1.2 mrg pp_printf (pp, "stack depth: %i", get_stack_depth ()); 344 1.1.1.2 mrg if (multiline) 345 1.1.1.2 mrg pp_newline (pp); 346 1.1.1.2 mrg else 347 1.1.1.2 mrg pp_string (pp, " {"); 348 1.1.1.2 mrg for (const frame_region *iter_frame = m_current_frame; iter_frame; 349 1.1.1.2 mrg iter_frame = iter_frame->get_calling_frame ()) 350 1.1.1.2 mrg { 351 1.1.1.2 mrg if (multiline) 352 1.1.1.2 mrg pp_string (pp, " "); 353 1.1.1.2 mrg else if (iter_frame != m_current_frame) 354 1.1.1.2 mrg pp_string (pp, ", "); 355 1.1.1.2 mrg pp_printf (pp, "frame (index %i): ", iter_frame->get_index ()); 356 1.1.1.2 mrg iter_frame->dump_to_pp (pp, simple); 357 1.1.1.2 mrg if (multiline) 358 1.1.1.2 mrg pp_newline (pp); 359 1.1.1.2 mrg } 360 1.1.1.2 mrg if (!multiline) 361 1.1.1.2 mrg pp_string (pp, "}"); 362 1.1.1.2 mrg 363 1.1.1.2 mrg /* Dump store. */ 364 1.1.1.2 mrg if (!multiline) 365 1.1.1.2 mrg pp_string (pp, ", {"); 366 1.1.1.2 mrg m_store.dump_to_pp (pp, simple, multiline, 367 1.1.1.2 mrg m_mgr->get_store_manager ()); 368 1.1.1.2 mrg if (!multiline) 369 1.1.1.2 mrg pp_string (pp, "}"); 370 1.1.1.2 mrg 371 1.1.1.2 mrg /* Dump constraints. */ 372 1.1.1.2 mrg pp_string (pp, "constraint_manager:"); 373 1.1.1.2 mrg if (multiline) 374 1.1.1.2 mrg pp_newline (pp); 375 1.1.1.2 mrg else 376 1.1.1.2 mrg pp_string (pp, " {"); 377 1.1.1.2 mrg m_constraints->dump_to_pp (pp, multiline); 378 1.1.1.2 mrg if (!multiline) 379 1.1.1.2 mrg pp_string (pp, "}"); 380 1.1 mrg 381 1.1.1.2 mrg /* Dump sizes of dynamic regions, if any are known. */ 382 1.1.1.2 mrg if (!m_dynamic_extents.is_empty ()) 383 1.1 mrg { 384 1.1.1.2 mrg pp_string (pp, "dynamic_extents:"); 385 1.1.1.2 mrg m_dynamic_extents.dump_to_pp (pp, simple, multiline); 386 1.1 mrg } 387 1.1 mrg } 388 1.1 mrg 389 1.1.1.2 mrg /* Dump a representation of this model to FILE. */ 390 1.1 mrg 391 1.1 mrg void 392 1.1.1.2 mrg region_model::dump (FILE *fp, bool simple, bool multiline) const 393 1.1 mrg { 394 1.1.1.2 mrg pretty_printer pp; 395 1.1.1.2 mrg pp_format_decoder (&pp) = default_tree_printer; 396 1.1.1.2 mrg pp_show_color (&pp) = pp_show_color (global_dc->printer); 397 1.1.1.2 mrg pp.buffer->stream = fp; 398 1.1.1.2 mrg dump_to_pp (&pp, simple, multiline); 399 1.1.1.2 mrg pp_newline (&pp); 400 1.1.1.2 mrg pp_flush (&pp); 401 1.1 mrg } 402 1.1 mrg 403 1.1.1.2 mrg /* Dump a multiline representation of this model to stderr. */ 404 1.1 mrg 405 1.1.1.2 mrg DEBUG_FUNCTION void 406 1.1.1.2 mrg region_model::dump (bool simple) const 407 1.1 mrg { 408 1.1.1.2 mrg dump (stderr, simple, true); 409 1.1 mrg } 410 1.1 mrg 411 1.1.1.2 mrg /* Dump a multiline representation of this model to stderr. */ 412 1.1 mrg 413 1.1.1.2 mrg DEBUG_FUNCTION void 414 1.1.1.2 mrg region_model::debug () const 415 1.1 mrg { 416 1.1.1.2 mrg dump (true); 417 1.1 mrg } 418 1.1 mrg 419 1.1.1.2 mrg /* Assert that this object is valid. */ 420 1.1 mrg 421 1.1.1.2 mrg void 422 1.1.1.2 mrg region_model::validate () const 423 1.1 mrg { 424 1.1.1.2 mrg m_store.validate (); 425 1.1 mrg } 426 1.1 mrg 427 1.1.1.2 mrg /* Canonicalize the store and constraints, to maximize the chance of 428 1.1.1.2 mrg equality between region_model instances. */ 429 1.1 mrg 430 1.1.1.2 mrg void 431 1.1.1.2 mrg region_model::canonicalize () 432 1.1 mrg { 433 1.1.1.2 mrg m_store.canonicalize (m_mgr->get_store_manager ()); 434 1.1.1.2 mrg m_constraints->canonicalize (); 435 1.1 mrg } 436 1.1 mrg 437 1.1.1.2 mrg /* Return true if this region_model is in canonical form. */ 438 1.1 mrg 439 1.1 mrg bool 440 1.1.1.2 mrg region_model::canonicalized_p () const 441 1.1 mrg { 442 1.1.1.2 mrg region_model copy (*this); 443 1.1.1.2 mrg copy.canonicalize (); 444 1.1.1.2 mrg return *this == copy; 445 1.1 mrg } 446 1.1 mrg 447 1.1.1.2 mrg /* See the comment for store::loop_replay_fixup. */ 448 1.1 mrg 449 1.1 mrg void 450 1.1.1.2 mrg region_model::loop_replay_fixup (const region_model *dst_state) 451 1.1 mrg { 452 1.1.1.2 mrg m_store.loop_replay_fixup (dst_state->get_store (), m_mgr); 453 1.1 mrg } 454 1.1 mrg 455 1.1.1.2 mrg /* A subclass of pending_diagnostic for complaining about uses of 456 1.1.1.2 mrg poisoned values. */ 457 1.1 mrg 458 1.1.1.2 mrg class poisoned_value_diagnostic 459 1.1.1.2 mrg : public pending_diagnostic_subclass<poisoned_value_diagnostic> 460 1.1 mrg { 461 1.1.1.2 mrg public: 462 1.1.1.2 mrg poisoned_value_diagnostic (tree expr, enum poison_kind pkind, 463 1.1.1.2 mrg const region *src_region) 464 1.1.1.2 mrg : m_expr (expr), m_pkind (pkind), 465 1.1.1.2 mrg m_src_region (src_region) 466 1.1.1.2 mrg {} 467 1.1 mrg 468 1.1.1.2 mrg const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; } 469 1.1 mrg 470 1.1.1.2 mrg bool use_of_uninit_p () const FINAL OVERRIDE 471 1.1.1.2 mrg { 472 1.1.1.2 mrg return m_pkind == POISON_KIND_UNINIT; 473 1.1 mrg } 474 1.1 mrg 475 1.1.1.2 mrg bool operator== (const poisoned_value_diagnostic &other) const 476 1.1.1.2 mrg { 477 1.1.1.2 mrg return (m_expr == other.m_expr 478 1.1.1.2 mrg && m_pkind == other.m_pkind 479 1.1.1.2 mrg && m_src_region == other.m_src_region); 480 1.1.1.2 mrg } 481 1.1 mrg 482 1.1.1.2 mrg int get_controlling_option () const FINAL OVERRIDE 483 1.1.1.2 mrg { 484 1.1.1.2 mrg switch (m_pkind) 485 1.1.1.2 mrg { 486 1.1.1.2 mrg default: 487 1.1.1.2 mrg gcc_unreachable (); 488 1.1.1.2 mrg case POISON_KIND_UNINIT: 489 1.1.1.2 mrg return OPT_Wanalyzer_use_of_uninitialized_value; 490 1.1.1.2 mrg case POISON_KIND_FREED: 491 1.1.1.2 mrg return OPT_Wanalyzer_use_after_free; 492 1.1.1.2 mrg case POISON_KIND_POPPED_STACK: 493 1.1.1.2 mrg return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame; 494 1.1.1.2 mrg } 495 1.1.1.2 mrg } 496 1.1 mrg 497 1.1.1.2 mrg bool emit (rich_location *rich_loc) FINAL OVERRIDE 498 1.1.1.2 mrg { 499 1.1.1.2 mrg switch (m_pkind) 500 1.1.1.2 mrg { 501 1.1.1.2 mrg default: 502 1.1.1.2 mrg gcc_unreachable (); 503 1.1.1.2 mrg case POISON_KIND_UNINIT: 504 1.1.1.2 mrg { 505 1.1.1.2 mrg diagnostic_metadata m; 506 1.1.1.2 mrg m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */ 507 1.1.1.2 mrg return warning_meta (rich_loc, m, get_controlling_option (), 508 1.1.1.2 mrg "use of uninitialized value %qE", 509 1.1.1.2 mrg m_expr); 510 1.1.1.2 mrg } 511 1.1.1.2 mrg break; 512 1.1.1.2 mrg case POISON_KIND_FREED: 513 1.1.1.2 mrg { 514 1.1.1.2 mrg diagnostic_metadata m; 515 1.1.1.2 mrg m.add_cwe (416); /* "CWE-416: Use After Free". */ 516 1.1.1.2 mrg return warning_meta (rich_loc, m, get_controlling_option (), 517 1.1.1.2 mrg "use after %<free%> of %qE", 518 1.1.1.2 mrg m_expr); 519 1.1.1.2 mrg } 520 1.1.1.2 mrg break; 521 1.1.1.2 mrg case POISON_KIND_POPPED_STACK: 522 1.1.1.2 mrg { 523 1.1.1.2 mrg /* TODO: which CWE? */ 524 1.1.1.2 mrg return warning_at 525 1.1.1.2 mrg (rich_loc, get_controlling_option (), 526 1.1.1.2 mrg "dereferencing pointer %qE to within stale stack frame", 527 1.1.1.2 mrg m_expr); 528 1.1.1.2 mrg } 529 1.1.1.2 mrg break; 530 1.1.1.2 mrg } 531 1.1.1.2 mrg } 532 1.1 mrg 533 1.1.1.2 mrg label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE 534 1.1.1.2 mrg { 535 1.1.1.2 mrg switch (m_pkind) 536 1.1.1.2 mrg { 537 1.1.1.2 mrg default: 538 1.1.1.2 mrg gcc_unreachable (); 539 1.1.1.2 mrg case POISON_KIND_UNINIT: 540 1.1.1.2 mrg return ev.formatted_print ("use of uninitialized value %qE here", 541 1.1.1.2 mrg m_expr); 542 1.1.1.2 mrg case POISON_KIND_FREED: 543 1.1.1.2 mrg return ev.formatted_print ("use after %<free%> of %qE here", 544 1.1.1.2 mrg m_expr); 545 1.1.1.2 mrg case POISON_KIND_POPPED_STACK: 546 1.1.1.2 mrg return ev.formatted_print 547 1.1.1.2 mrg ("dereferencing pointer %qE to within stale stack frame", 548 1.1.1.2 mrg m_expr); 549 1.1.1.2 mrg } 550 1.1.1.2 mrg } 551 1.1 mrg 552 1.1.1.2 mrg void mark_interesting_stuff (interesting_t *interest) FINAL OVERRIDE 553 1.1.1.2 mrg { 554 1.1.1.2 mrg if (m_src_region) 555 1.1.1.2 mrg interest->add_region_creation (m_src_region); 556 1.1.1.2 mrg } 557 1.1 mrg 558 1.1.1.2 mrg private: 559 1.1.1.2 mrg tree m_expr; 560 1.1.1.2 mrg enum poison_kind m_pkind; 561 1.1.1.2 mrg const region *m_src_region; 562 1.1.1.2 mrg }; 563 1.1 mrg 564 1.1.1.2 mrg /* A subclass of pending_diagnostic for complaining about shifts 565 1.1.1.2 mrg by negative counts. */ 566 1.1 mrg 567 1.1.1.2 mrg class shift_count_negative_diagnostic 568 1.1.1.2 mrg : public pending_diagnostic_subclass<shift_count_negative_diagnostic> 569 1.1.1.2 mrg { 570 1.1.1.2 mrg public: 571 1.1.1.2 mrg shift_count_negative_diagnostic (const gassign *assign, tree count_cst) 572 1.1.1.2 mrg : m_assign (assign), m_count_cst (count_cst) 573 1.1.1.2 mrg {} 574 1.1 mrg 575 1.1.1.2 mrg const char *get_kind () const FINAL OVERRIDE 576 1.1.1.2 mrg { 577 1.1.1.2 mrg return "shift_count_negative_diagnostic"; 578 1.1.1.2 mrg } 579 1.1 mrg 580 1.1.1.2 mrg bool operator== (const shift_count_negative_diagnostic &other) const 581 1.1.1.2 mrg { 582 1.1.1.2 mrg return (m_assign == other.m_assign 583 1.1.1.2 mrg && same_tree_p (m_count_cst, other.m_count_cst)); 584 1.1.1.2 mrg } 585 1.1 mrg 586 1.1.1.2 mrg int get_controlling_option () const FINAL OVERRIDE 587 1.1.1.2 mrg { 588 1.1.1.2 mrg return OPT_Wanalyzer_shift_count_negative; 589 1.1.1.2 mrg } 590 1.1 mrg 591 1.1.1.2 mrg bool emit (rich_location *rich_loc) FINAL OVERRIDE 592 1.1.1.2 mrg { 593 1.1.1.2 mrg return warning_at (rich_loc, get_controlling_option (), 594 1.1.1.2 mrg "shift by negative count (%qE)", m_count_cst); 595 1.1.1.2 mrg } 596 1.1 mrg 597 1.1.1.2 mrg label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE 598 1.1 mrg { 599 1.1.1.2 mrg return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst); 600 1.1 mrg } 601 1.1 mrg 602 1.1.1.2 mrg private: 603 1.1.1.2 mrg const gassign *m_assign; 604 1.1.1.2 mrg tree m_count_cst; 605 1.1.1.2 mrg }; 606 1.1 mrg 607 1.1.1.2 mrg /* A subclass of pending_diagnostic for complaining about shifts 608 1.1.1.2 mrg by counts >= the width of the operand type. */ 609 1.1 mrg 610 1.1.1.2 mrg class shift_count_overflow_diagnostic 611 1.1.1.2 mrg : public pending_diagnostic_subclass<shift_count_overflow_diagnostic> 612 1.1 mrg { 613 1.1.1.2 mrg public: 614 1.1.1.2 mrg shift_count_overflow_diagnostic (const gassign *assign, 615 1.1.1.2 mrg int operand_precision, 616 1.1.1.2 mrg tree count_cst) 617 1.1.1.2 mrg : m_assign (assign), m_operand_precision (operand_precision), 618 1.1.1.2 mrg m_count_cst (count_cst) 619 1.1.1.2 mrg {} 620 1.1 mrg 621 1.1.1.2 mrg const char *get_kind () const FINAL OVERRIDE 622 1.1.1.2 mrg { 623 1.1.1.2 mrg return "shift_count_overflow_diagnostic"; 624 1.1.1.2 mrg } 625 1.1 mrg 626 1.1.1.2 mrg bool operator== (const shift_count_overflow_diagnostic &other) const 627 1.1.1.2 mrg { 628 1.1.1.2 mrg return (m_assign == other.m_assign 629 1.1.1.2 mrg && m_operand_precision == other.m_operand_precision 630 1.1.1.2 mrg && same_tree_p (m_count_cst, other.m_count_cst)); 631 1.1.1.2 mrg } 632 1.1 mrg 633 1.1.1.2 mrg int get_controlling_option () const FINAL OVERRIDE 634 1.1.1.2 mrg { 635 1.1.1.2 mrg return OPT_Wanalyzer_shift_count_overflow; 636 1.1.1.2 mrg } 637 1.1 mrg 638 1.1.1.2 mrg bool emit (rich_location *rich_loc) FINAL OVERRIDE 639 1.1.1.2 mrg { 640 1.1.1.2 mrg return warning_at (rich_loc, get_controlling_option (), 641 1.1.1.2 mrg "shift by count (%qE) >= precision of type (%qi)", 642 1.1.1.2 mrg m_count_cst, m_operand_precision); 643 1.1.1.2 mrg } 644 1.1 mrg 645 1.1.1.2 mrg label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE 646 1.1.1.2 mrg { 647 1.1.1.2 mrg return ev.formatted_print ("shift by count %qE here", m_count_cst); 648 1.1.1.2 mrg } 649 1.1 mrg 650 1.1.1.2 mrg private: 651 1.1.1.2 mrg const gassign *m_assign; 652 1.1.1.2 mrg int m_operand_precision; 653 1.1.1.2 mrg tree m_count_cst; 654 1.1.1.2 mrg }; 655 1.1 mrg 656 1.1.1.2 mrg /* If ASSIGN is a stmt that can be modelled via 657 1.1.1.2 mrg set_value (lhs_reg, SVALUE, CTXT) 658 1.1.1.2 mrg for some SVALUE, get the SVALUE. 659 1.1.1.2 mrg Otherwise return NULL. */ 660 1.1 mrg 661 1.1.1.2 mrg const svalue * 662 1.1.1.2 mrg region_model::get_gassign_result (const gassign *assign, 663 1.1.1.2 mrg region_model_context *ctxt) 664 1.1 mrg { 665 1.1.1.2 mrg tree lhs = gimple_assign_lhs (assign); 666 1.1.1.2 mrg tree rhs1 = gimple_assign_rhs1 (assign); 667 1.1.1.2 mrg enum tree_code op = gimple_assign_rhs_code (assign); 668 1.1.1.2 mrg switch (op) 669 1.1.1.2 mrg { 670 1.1.1.2 mrg default: 671 1.1.1.2 mrg return NULL; 672 1.1 mrg 673 1.1.1.2 mrg case POINTER_PLUS_EXPR: 674 1.1.1.2 mrg { 675 1.1.1.2 mrg /* e.g. "_1 = a_10(D) + 12;" */ 676 1.1.1.2 mrg tree ptr = rhs1; 677 1.1.1.2 mrg tree offset = gimple_assign_rhs2 (assign); 678 1.1 mrg 679 1.1.1.2 mrg const svalue *ptr_sval = get_rvalue (ptr, ctxt); 680 1.1.1.2 mrg const svalue *offset_sval = get_rvalue (offset, ctxt); 681 1.1.1.2 mrg /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR] 682 1.1.1.2 mrg is an integer of type sizetype". */ 683 1.1.1.2 mrg offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval); 684 1.1.1.2 mrg 685 1.1.1.2 mrg const svalue *sval_binop 686 1.1.1.2 mrg = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op, 687 1.1.1.2 mrg ptr_sval, offset_sval); 688 1.1.1.2 mrg return sval_binop; 689 1.1.1.2 mrg } 690 1.1.1.2 mrg break; 691 1.1 mrg 692 1.1.1.2 mrg case POINTER_DIFF_EXPR: 693 1.1.1.2 mrg { 694 1.1.1.2 mrg /* e.g. "_1 = p_2(D) - q_3(D);". */ 695 1.1.1.2 mrg tree rhs2 = gimple_assign_rhs2 (assign); 696 1.1.1.2 mrg const svalue *rhs1_sval = get_rvalue (rhs1, ctxt); 697 1.1.1.2 mrg const svalue *rhs2_sval = get_rvalue (rhs2, ctxt); 698 1.1 mrg 699 1.1.1.2 mrg // TODO: perhaps fold to zero if they're known to be equal? 700 1.1 mrg 701 1.1.1.2 mrg const svalue *sval_binop 702 1.1.1.2 mrg = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op, 703 1.1.1.2 mrg rhs1_sval, rhs2_sval); 704 1.1.1.2 mrg return sval_binop; 705 1.1.1.2 mrg } 706 1.1.1.2 mrg break; 707 1.1 mrg 708 1.1.1.2 mrg /* Assignments of the form 709 1.1.1.2 mrg set_value (lvalue (LHS), rvalue (EXPR)) 710 1.1.1.2 mrg for various EXPR. 711 1.1.1.2 mrg We already have the lvalue for the LHS above, as "lhs_reg". */ 712 1.1.1.2 mrg case ADDR_EXPR: /* LHS = &RHS; */ 713 1.1.1.2 mrg case BIT_FIELD_REF: 714 1.1.1.2 mrg case COMPONENT_REF: /* LHS = op0.op1; */ 715 1.1.1.2 mrg case MEM_REF: 716 1.1.1.2 mrg case REAL_CST: 717 1.1.1.2 mrg case COMPLEX_CST: 718 1.1.1.2 mrg case VECTOR_CST: 719 1.1.1.2 mrg case INTEGER_CST: 720 1.1.1.2 mrg case ARRAY_REF: 721 1.1.1.2 mrg case SSA_NAME: /* LHS = VAR; */ 722 1.1.1.2 mrg case VAR_DECL: /* LHS = VAR; */ 723 1.1.1.2 mrg case PARM_DECL:/* LHS = VAR; */ 724 1.1.1.2 mrg case REALPART_EXPR: 725 1.1.1.2 mrg case IMAGPART_EXPR: 726 1.1.1.2 mrg return get_rvalue (rhs1, ctxt); 727 1.1.1.2 mrg 728 1.1.1.2 mrg case ABS_EXPR: 729 1.1.1.2 mrg case ABSU_EXPR: 730 1.1.1.2 mrg case CONJ_EXPR: 731 1.1.1.2 mrg case BIT_NOT_EXPR: 732 1.1.1.2 mrg case FIX_TRUNC_EXPR: 733 1.1.1.2 mrg case FLOAT_EXPR: 734 1.1.1.2 mrg case NEGATE_EXPR: 735 1.1.1.2 mrg case NOP_EXPR: 736 1.1.1.2 mrg case VIEW_CONVERT_EXPR: 737 1.1.1.2 mrg { 738 1.1.1.2 mrg /* Unary ops. */ 739 1.1.1.2 mrg const svalue *rhs_sval = get_rvalue (rhs1, ctxt); 740 1.1.1.2 mrg const svalue *sval_unaryop 741 1.1.1.2 mrg = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval); 742 1.1.1.2 mrg return sval_unaryop; 743 1.1.1.2 mrg } 744 1.1 mrg 745 1.1.1.2 mrg case EQ_EXPR: 746 1.1.1.2 mrg case GE_EXPR: 747 1.1.1.2 mrg case LE_EXPR: 748 1.1.1.2 mrg case NE_EXPR: 749 1.1.1.2 mrg case GT_EXPR: 750 1.1.1.2 mrg case LT_EXPR: 751 1.1.1.2 mrg case UNORDERED_EXPR: 752 1.1.1.2 mrg case ORDERED_EXPR: 753 1.1.1.2 mrg { 754 1.1.1.2 mrg tree rhs2 = gimple_assign_rhs2 (assign); 755 1.1 mrg 756 1.1.1.2 mrg const svalue *rhs1_sval = get_rvalue (rhs1, ctxt); 757 1.1.1.2 mrg const svalue *rhs2_sval = get_rvalue (rhs2, ctxt); 758 1.1 mrg 759 1.1.1.2 mrg if (TREE_TYPE (lhs) == boolean_type_node) 760 1.1 mrg { 761 1.1.1.2 mrg /* Consider constraints between svalues. */ 762 1.1.1.2 mrg tristate t = eval_condition (rhs1_sval, op, rhs2_sval); 763 1.1.1.2 mrg if (t.is_known ()) 764 1.1.1.2 mrg return m_mgr->get_or_create_constant_svalue 765 1.1.1.2 mrg (t.is_true () ? boolean_true_node : boolean_false_node); 766 1.1 mrg } 767 1.1 mrg 768 1.1.1.2 mrg /* Otherwise, generate a symbolic binary op. */ 769 1.1.1.2 mrg const svalue *sval_binop 770 1.1.1.2 mrg = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op, 771 1.1.1.2 mrg rhs1_sval, rhs2_sval); 772 1.1.1.2 mrg return sval_binop; 773 1.1.1.2 mrg } 774 1.1.1.2 mrg break; 775 1.1 mrg 776 1.1.1.2 mrg case PLUS_EXPR: 777 1.1.1.2 mrg case MINUS_EXPR: 778 1.1.1.2 mrg case MULT_EXPR: 779 1.1.1.2 mrg case MULT_HIGHPART_EXPR: 780 1.1.1.2 mrg case TRUNC_DIV_EXPR: 781 1.1.1.2 mrg case CEIL_DIV_EXPR: 782 1.1.1.2 mrg case FLOOR_DIV_EXPR: 783 1.1.1.2 mrg case ROUND_DIV_EXPR: 784 1.1.1.2 mrg case TRUNC_MOD_EXPR: 785 1.1.1.2 mrg case CEIL_MOD_EXPR: 786 1.1.1.2 mrg case FLOOR_MOD_EXPR: 787 1.1.1.2 mrg case ROUND_MOD_EXPR: 788 1.1.1.2 mrg case RDIV_EXPR: 789 1.1.1.2 mrg case EXACT_DIV_EXPR: 790 1.1.1.2 mrg case LSHIFT_EXPR: 791 1.1.1.2 mrg case RSHIFT_EXPR: 792 1.1.1.2 mrg case LROTATE_EXPR: 793 1.1.1.2 mrg case RROTATE_EXPR: 794 1.1.1.2 mrg case BIT_IOR_EXPR: 795 1.1.1.2 mrg case BIT_XOR_EXPR: 796 1.1.1.2 mrg case BIT_AND_EXPR: 797 1.1.1.2 mrg case MIN_EXPR: 798 1.1.1.2 mrg case MAX_EXPR: 799 1.1.1.2 mrg case COMPLEX_EXPR: 800 1.1.1.2 mrg { 801 1.1.1.2 mrg /* Binary ops. */ 802 1.1.1.2 mrg tree rhs2 = gimple_assign_rhs2 (assign); 803 1.1 mrg 804 1.1.1.2 mrg const svalue *rhs1_sval = get_rvalue (rhs1, ctxt); 805 1.1.1.2 mrg const svalue *rhs2_sval = get_rvalue (rhs2, ctxt); 806 1.1 mrg 807 1.1.1.2 mrg if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR)) 808 1.1.1.2 mrg { 809 1.1.1.2 mrg /* "INT34-C. Do not shift an expression by a negative number of bits 810 1.1.1.2 mrg or by greater than or equal to the number of bits that exist in 811 1.1.1.2 mrg the operand." */ 812 1.1.1.2 mrg if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ()) 813 1.1.1.2 mrg if (TREE_CODE (rhs2_cst) == INTEGER_CST) 814 1.1.1.2 mrg { 815 1.1.1.2 mrg if (tree_int_cst_sgn (rhs2_cst) < 0) 816 1.1.1.2 mrg ctxt->warn (new shift_count_negative_diagnostic 817 1.1.1.2 mrg (assign, rhs2_cst)); 818 1.1.1.2 mrg else if (compare_tree_int (rhs2_cst, 819 1.1.1.2 mrg TYPE_PRECISION (TREE_TYPE (rhs1))) 820 1.1.1.2 mrg >= 0) 821 1.1.1.2 mrg ctxt->warn (new shift_count_overflow_diagnostic 822 1.1.1.2 mrg (assign, TYPE_PRECISION (TREE_TYPE (rhs1)), 823 1.1.1.2 mrg rhs2_cst)); 824 1.1.1.2 mrg } 825 1.1.1.2 mrg } 826 1.1 mrg 827 1.1.1.2 mrg const svalue *sval_binop 828 1.1.1.2 mrg = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op, 829 1.1.1.2 mrg rhs1_sval, rhs2_sval); 830 1.1.1.2 mrg return sval_binop; 831 1.1.1.2 mrg } 832 1.1.1.2 mrg 833 1.1.1.2 mrg /* Vector expressions. In theory we could implement these elementwise, 834 1.1.1.2 mrg but for now, simply return unknown values. */ 835 1.1.1.2 mrg case VEC_DUPLICATE_EXPR: 836 1.1.1.2 mrg case VEC_SERIES_EXPR: 837 1.1.1.2 mrg case VEC_COND_EXPR: 838 1.1.1.2 mrg case VEC_PERM_EXPR: 839 1.1.1.2 mrg case VEC_WIDEN_MULT_HI_EXPR: 840 1.1.1.2 mrg case VEC_WIDEN_MULT_LO_EXPR: 841 1.1.1.2 mrg case VEC_WIDEN_MULT_EVEN_EXPR: 842 1.1.1.2 mrg case VEC_WIDEN_MULT_ODD_EXPR: 843 1.1.1.2 mrg case VEC_UNPACK_HI_EXPR: 844 1.1.1.2 mrg case VEC_UNPACK_LO_EXPR: 845 1.1.1.2 mrg case VEC_UNPACK_FLOAT_HI_EXPR: 846 1.1.1.2 mrg case VEC_UNPACK_FLOAT_LO_EXPR: 847 1.1.1.2 mrg case VEC_UNPACK_FIX_TRUNC_HI_EXPR: 848 1.1.1.2 mrg case VEC_UNPACK_FIX_TRUNC_LO_EXPR: 849 1.1.1.2 mrg case VEC_PACK_TRUNC_EXPR: 850 1.1.1.2 mrg case VEC_PACK_SAT_EXPR: 851 1.1.1.2 mrg case VEC_PACK_FIX_TRUNC_EXPR: 852 1.1.1.2 mrg case VEC_PACK_FLOAT_EXPR: 853 1.1.1.2 mrg case VEC_WIDEN_LSHIFT_HI_EXPR: 854 1.1.1.2 mrg case VEC_WIDEN_LSHIFT_LO_EXPR: 855 1.1.1.2 mrg return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs)); 856 1.1.1.2 mrg } 857 1.1.1.2 mrg } 858 1.1.1.2 mrg 859 1.1.1.2 mrg /* Workaround for discarding certain false positives from 860 1.1.1.2 mrg -Wanalyzer-use-of-uninitialized-value 861 1.1.1.2 mrg of the form: 862 1.1.1.2 mrg ((A OR-IF B) OR-IF C) 863 1.1.1.2 mrg and: 864 1.1.1.2 mrg ((A AND-IF B) AND-IF C) 865 1.1.1.2 mrg where evaluating B is redundant, but could involve simple accesses of 866 1.1.1.2 mrg uninitialized locals. 867 1.1.1.2 mrg 868 1.1.1.2 mrg When optimization is turned on the FE can immediately fold compound 869 1.1.1.2 mrg conditionals. Specifically, c_parser_condition parses this condition: 870 1.1.1.2 mrg ((A OR-IF B) OR-IF C) 871 1.1.1.2 mrg and calls c_fully_fold on the condition. 872 1.1.1.2 mrg Within c_fully_fold, fold_truth_andor is called, which bails when 873 1.1.1.2 mrg optimization is off, but if any optimization is turned on can convert the 874 1.1.1.2 mrg ((A OR-IF B) OR-IF C) 875 1.1.1.2 mrg into: 876 1.1.1.2 mrg ((A OR B) OR_IF C) 877 1.1.1.2 mrg for sufficiently simple B 878 1.1.1.2 mrg i.e. the inner OR-IF becomes an OR. 879 1.1.1.2 mrg At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr), 880 1.1.1.2 mrg giving this for the inner condition: 881 1.1.1.2 mrg tmp = A | B; 882 1.1.1.2 mrg if (tmp) 883 1.1.1.2 mrg thus effectively synthesizing a redundant access of B when optimization 884 1.1.1.2 mrg is turned on, when compared to: 885 1.1.1.2 mrg if (A) goto L1; else goto L4; 886 1.1.1.2 mrg L1: if (B) goto L2; else goto L4; 887 1.1.1.2 mrg L2: if (C) goto L3; else goto L4; 888 1.1.1.2 mrg for the unoptimized case. 889 1.1.1.2 mrg 890 1.1.1.2 mrg Return true if CTXT appears to be handling such a short-circuitable stmt, 891 1.1.1.2 mrg such as the def-stmt for B for the: 892 1.1.1.2 mrg tmp = A | B; 893 1.1.1.2 mrg case above, for the case where A is true and thus B would have been 894 1.1.1.2 mrg short-circuited without optimization, using MODEL for the value of A. */ 895 1.1.1.2 mrg 896 1.1.1.2 mrg static bool 897 1.1.1.2 mrg within_short_circuited_stmt_p (const region_model *model, 898 1.1.1.2 mrg const gassign *assign_stmt) 899 1.1.1.2 mrg { 900 1.1.1.2 mrg /* We must have an assignment to a temporary of _Bool type. */ 901 1.1.1.2 mrg tree lhs = gimple_assign_lhs (assign_stmt); 902 1.1.1.2 mrg if (TREE_TYPE (lhs) != boolean_type_node) 903 1.1.1.2 mrg return false; 904 1.1.1.2 mrg if (TREE_CODE (lhs) != SSA_NAME) 905 1.1.1.2 mrg return false; 906 1.1.1.2 mrg if (SSA_NAME_VAR (lhs) != NULL_TREE) 907 1.1.1.2 mrg return false; 908 1.1 mrg 909 1.1.1.2 mrg /* The temporary bool must be used exactly once: as the second arg of 910 1.1.1.2 mrg a BIT_IOR_EXPR or BIT_AND_EXPR. */ 911 1.1.1.2 mrg use_operand_p use_op; 912 1.1.1.2 mrg gimple *use_stmt; 913 1.1.1.2 mrg if (!single_imm_use (lhs, &use_op, &use_stmt)) 914 1.1.1.2 mrg return false; 915 1.1.1.2 mrg const gassign *use_assign = dyn_cast <const gassign *> (use_stmt); 916 1.1.1.2 mrg if (!use_assign) 917 1.1.1.2 mrg return false; 918 1.1.1.2 mrg enum tree_code op = gimple_assign_rhs_code (use_assign); 919 1.1.1.2 mrg if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR)) 920 1.1.1.2 mrg return false; 921 1.1.1.2 mrg if (!(gimple_assign_rhs1 (use_assign) != lhs 922 1.1.1.2 mrg && gimple_assign_rhs2 (use_assign) == lhs)) 923 1.1.1.2 mrg return false; 924 1.1 mrg 925 1.1.1.2 mrg /* The first arg of the bitwise stmt must have a known value in MODEL 926 1.1.1.2 mrg that implies that the value of the second arg doesn't matter, i.e. 927 1.1.1.2 mrg 1 for bitwise or, 0 for bitwise and. */ 928 1.1.1.2 mrg tree other_arg = gimple_assign_rhs1 (use_assign); 929 1.1.1.2 mrg /* Use a NULL ctxt here to avoid generating warnings. */ 930 1.1.1.2 mrg const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL); 931 1.1.1.2 mrg tree other_arg_cst = other_arg_sval->maybe_get_constant (); 932 1.1.1.2 mrg if (!other_arg_cst) 933 1.1.1.2 mrg return false; 934 1.1.1.2 mrg switch (op) 935 1.1.1.2 mrg { 936 1.1.1.2 mrg default: 937 1.1.1.2 mrg gcc_unreachable (); 938 1.1.1.2 mrg case BIT_IOR_EXPR: 939 1.1.1.2 mrg if (zerop (other_arg_cst)) 940 1.1.1.2 mrg return false; 941 1.1.1.2 mrg break; 942 1.1.1.2 mrg case BIT_AND_EXPR: 943 1.1.1.2 mrg if (!zerop (other_arg_cst)) 944 1.1.1.2 mrg return false; 945 1.1.1.2 mrg break; 946 1.1.1.2 mrg } 947 1.1 mrg 948 1.1.1.2 mrg /* All tests passed. We appear to be in a stmt that generates a boolean 949 1.1.1.2 mrg temporary with a value that won't matter. */ 950 1.1.1.2 mrg return true; 951 1.1 mrg } 952 1.1 mrg 953 1.1.1.2 mrg /* Workaround for discarding certain false positives from 954 1.1.1.2 mrg -Wanalyzer-use-of-uninitialized-value 955 1.1.1.2 mrg seen with -ftrivial-auto-var-init=. 956 1.1 mrg 957 1.1.1.2 mrg -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT. 958 1.1 mrg 959 1.1.1.2 mrg If the address of the var is taken, gimplification will give us 960 1.1.1.2 mrg something like: 961 1.1 mrg 962 1.1.1.2 mrg _1 = .DEFERRED_INIT (4, 2, &"len"[0]); 963 1.1.1.2 mrg len = _1; 964 1.1 mrg 965 1.1.1.2 mrg The result of DEFERRED_INIT will be an uninit value; we don't 966 1.1.1.2 mrg want to emit a false positive for "len = _1;" 967 1.1 mrg 968 1.1.1.2 mrg Return true if ASSIGN_STMT is such a stmt. */ 969 1.1 mrg 970 1.1.1.2 mrg static bool 971 1.1.1.2 mrg due_to_ifn_deferred_init_p (const gassign *assign_stmt) 972 1.1 mrg 973 1.1 mrg { 974 1.1.1.2 mrg /* We must have an assignment to a decl from an SSA name that's the 975 1.1.1.2 mrg result of a IFN_DEFERRED_INIT call. */ 976 1.1.1.2 mrg if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME) 977 1.1.1.2 mrg return false; 978 1.1.1.2 mrg tree lhs = gimple_assign_lhs (assign_stmt); 979 1.1.1.2 mrg if (TREE_CODE (lhs) != VAR_DECL) 980 1.1.1.2 mrg return false; 981 1.1.1.2 mrg tree rhs = gimple_assign_rhs1 (assign_stmt); 982 1.1.1.2 mrg if (TREE_CODE (rhs) != SSA_NAME) 983 1.1.1.2 mrg return false; 984 1.1.1.2 mrg const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs); 985 1.1.1.2 mrg const gcall *call = dyn_cast <const gcall *> (def_stmt); 986 1.1.1.2 mrg if (!call) 987 1.1.1.2 mrg return false; 988 1.1.1.2 mrg if (gimple_call_internal_p (call) 989 1.1.1.2 mrg && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT) 990 1.1.1.2 mrg return true; 991 1.1.1.2 mrg return false; 992 1.1 mrg } 993 1.1 mrg 994 1.1.1.2 mrg /* Check for SVAL being poisoned, adding a warning to CTXT. 995 1.1.1.2 mrg Return SVAL, or, if a warning is added, another value, to avoid 996 1.1.1.2 mrg repeatedly complaining about the same poisoned value in followup code. */ 997 1.1.1.2 mrg 998 1.1.1.2 mrg const svalue * 999 1.1.1.2 mrg region_model::check_for_poison (const svalue *sval, 1000 1.1.1.2 mrg tree expr, 1001 1.1.1.2 mrg region_model_context *ctxt) const 1002 1.1 mrg { 1003 1.1.1.2 mrg if (!ctxt) 1004 1.1.1.2 mrg return sval; 1005 1.1 mrg 1006 1.1.1.2 mrg if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ()) 1007 1.1.1.2 mrg { 1008 1.1.1.2 mrg enum poison_kind pkind = poisoned_sval->get_poison_kind (); 1009 1.1 mrg 1010 1.1.1.2 mrg /* Ignore uninitialized uses of empty types; there's nothing 1011 1.1.1.2 mrg to initialize. */ 1012 1.1.1.2 mrg if (pkind == POISON_KIND_UNINIT 1013 1.1.1.2 mrg && sval->get_type () 1014 1.1.1.2 mrg && is_empty_type (sval->get_type ())) 1015 1.1.1.2 mrg return sval; 1016 1.1.1.2 mrg 1017 1.1.1.2 mrg if (pkind == POISON_KIND_UNINIT) 1018 1.1.1.2 mrg if (const gimple *curr_stmt = ctxt->get_stmt ()) 1019 1.1.1.2 mrg if (const gassign *assign_stmt 1020 1.1.1.2 mrg = dyn_cast <const gassign *> (curr_stmt)) 1021 1.1.1.2 mrg { 1022 1.1.1.2 mrg /* Special case to avoid certain false positives. */ 1023 1.1.1.2 mrg if (within_short_circuited_stmt_p (this, assign_stmt)) 1024 1.1.1.2 mrg return sval; 1025 1.1.1.2 mrg 1026 1.1.1.2 mrg /* Special case to avoid false positive on 1027 1.1.1.2 mrg -ftrivial-auto-var-init=. */ 1028 1.1.1.2 mrg if (due_to_ifn_deferred_init_p (assign_stmt)) 1029 1.1.1.2 mrg return sval; 1030 1.1.1.2 mrg } 1031 1.1 mrg 1032 1.1.1.2 mrg /* If we have an SSA name for a temporary, we don't want to print 1033 1.1.1.2 mrg '<unknown>'. 1034 1.1.1.2 mrg Poisoned values are shared by type, and so we can't reconstruct 1035 1.1.1.2 mrg the tree other than via the def stmts, using 1036 1.1.1.2 mrg fixup_tree_for_diagnostic. */ 1037 1.1.1.2 mrg tree diag_arg = fixup_tree_for_diagnostic (expr); 1038 1.1.1.2 mrg const region *src_region = NULL; 1039 1.1.1.2 mrg if (pkind == POISON_KIND_UNINIT) 1040 1.1.1.2 mrg src_region = get_region_for_poisoned_expr (expr); 1041 1.1.1.2 mrg if (ctxt->warn (new poisoned_value_diagnostic (diag_arg, pkind, 1042 1.1.1.2 mrg src_region))) 1043 1.1 mrg { 1044 1.1.1.2 mrg /* We only want to report use of a poisoned value at the first 1045 1.1.1.2 mrg place it gets used; return an unknown value to avoid generating 1046 1.1.1.2 mrg a chain of followup warnings. */ 1047 1.1.1.2 mrg sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ()); 1048 1.1 mrg } 1049 1.1 mrg 1050 1.1.1.2 mrg return sval; 1051 1.1.1.2 mrg } 1052 1.1 mrg 1053 1.1.1.2 mrg return sval; 1054 1.1.1.2 mrg } 1055 1.1 mrg 1056 1.1.1.2 mrg /* Attempt to get a region for describing EXPR, the source of region of 1057 1.1.1.2 mrg a poisoned_svalue for use in a poisoned_value_diagnostic. 1058 1.1.1.2 mrg Return NULL if there is no good region to use. */ 1059 1.1 mrg 1060 1.1.1.2 mrg const region * 1061 1.1.1.2 mrg region_model::get_region_for_poisoned_expr (tree expr) const 1062 1.1 mrg { 1063 1.1.1.2 mrg if (TREE_CODE (expr) == SSA_NAME) 1064 1.1 mrg { 1065 1.1.1.2 mrg tree decl = SSA_NAME_VAR (expr); 1066 1.1.1.2 mrg if (decl && DECL_P (decl)) 1067 1.1.1.2 mrg expr = decl; 1068 1.1.1.2 mrg else 1069 1.1.1.2 mrg return NULL; 1070 1.1 mrg } 1071 1.1.1.2 mrg return get_lvalue (expr, NULL); 1072 1.1 mrg } 1073 1.1 mrg 1074 1.1 mrg /* Update this model for the ASSIGN stmt, using CTXT to report any 1075 1.1 mrg diagnostics. */ 1076 1.1 mrg 1077 1.1 mrg void 1078 1.1 mrg region_model::on_assignment (const gassign *assign, region_model_context *ctxt) 1079 1.1 mrg { 1080 1.1 mrg tree lhs = gimple_assign_lhs (assign); 1081 1.1 mrg tree rhs1 = gimple_assign_rhs1 (assign); 1082 1.1 mrg 1083 1.1.1.2 mrg const region *lhs_reg = get_lvalue (lhs, ctxt); 1084 1.1 mrg 1085 1.1.1.2 mrg /* Most assignments are handled by: 1086 1.1.1.2 mrg set_value (lhs_reg, SVALUE, CTXT) 1087 1.1.1.2 mrg for some SVALUE. */ 1088 1.1.1.2 mrg if (const svalue *sval = get_gassign_result (assign, ctxt)) 1089 1.1.1.2 mrg { 1090 1.1.1.2 mrg tree expr = get_diagnostic_tree_for_gassign (assign); 1091 1.1.1.2 mrg check_for_poison (sval, expr, ctxt); 1092 1.1.1.2 mrg set_value (lhs_reg, sval, ctxt); 1093 1.1.1.2 mrg return; 1094 1.1 mrg } 1095 1.1 mrg 1096 1.1 mrg enum tree_code op = gimple_assign_rhs_code (assign); 1097 1.1 mrg switch (op) 1098 1.1 mrg { 1099 1.1 mrg default: 1100 1.1 mrg { 1101 1.1 mrg if (0) 1102 1.1 mrg sorry_at (assign->location, "unhandled assignment op: %qs", 1103 1.1 mrg get_tree_code_name (op)); 1104 1.1.1.2 mrg const svalue *unknown_sval 1105 1.1.1.2 mrg = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs)); 1106 1.1.1.2 mrg set_value (lhs_reg, unknown_sval, ctxt); 1107 1.1 mrg } 1108 1.1 mrg break; 1109 1.1 mrg 1110 1.1.1.2 mrg case CONSTRUCTOR: 1111 1.1 mrg { 1112 1.1.1.2 mrg if (TREE_CLOBBER_P (rhs1)) 1113 1.1.1.2 mrg { 1114 1.1.1.2 mrg /* e.g. "x ={v} {CLOBBER};" */ 1115 1.1.1.2 mrg clobber_region (lhs_reg); 1116 1.1.1.2 mrg } 1117 1.1.1.2 mrg else 1118 1.1.1.2 mrg { 1119 1.1.1.2 mrg /* Any CONSTRUCTOR that survives to this point is either 1120 1.1.1.2 mrg just a zero-init of everything, or a vector. */ 1121 1.1.1.2 mrg if (!CONSTRUCTOR_NO_CLEARING (rhs1)) 1122 1.1.1.2 mrg zero_fill_region (lhs_reg); 1123 1.1.1.2 mrg unsigned ix; 1124 1.1.1.2 mrg tree index; 1125 1.1.1.2 mrg tree val; 1126 1.1.1.2 mrg FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val) 1127 1.1.1.2 mrg { 1128 1.1.1.2 mrg gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE); 1129 1.1.1.2 mrg if (!index) 1130 1.1.1.2 mrg index = build_int_cst (integer_type_node, ix); 1131 1.1.1.2 mrg gcc_assert (TREE_CODE (index) == INTEGER_CST); 1132 1.1.1.2 mrg const svalue *index_sval 1133 1.1.1.2 mrg = m_mgr->get_or_create_constant_svalue (index); 1134 1.1.1.2 mrg gcc_assert (index_sval); 1135 1.1.1.2 mrg const region *sub_reg 1136 1.1.1.2 mrg = m_mgr->get_element_region (lhs_reg, 1137 1.1.1.2 mrg TREE_TYPE (val), 1138 1.1.1.2 mrg index_sval); 1139 1.1.1.2 mrg const svalue *val_sval = get_rvalue (val, ctxt); 1140 1.1.1.2 mrg set_value (sub_reg, val_sval, ctxt); 1141 1.1.1.2 mrg } 1142 1.1.1.2 mrg } 1143 1.1 mrg } 1144 1.1 mrg break; 1145 1.1 mrg 1146 1.1.1.2 mrg case STRING_CST: 1147 1.1 mrg { 1148 1.1.1.2 mrg /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */ 1149 1.1.1.2 mrg const svalue *rhs_sval = get_rvalue (rhs1, ctxt); 1150 1.1.1.2 mrg m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval, 1151 1.1.1.2 mrg ctxt ? ctxt->get_uncertainty () : NULL); 1152 1.1 mrg } 1153 1.1 mrg break; 1154 1.1.1.2 mrg } 1155 1.1.1.2 mrg } 1156 1.1 mrg 1157 1.1.1.2 mrg /* A pending_diagnostic subclass for implementing "__analyzer_dump_path". */ 1158 1.1.1.2 mrg 1159 1.1.1.2 mrg class dump_path_diagnostic 1160 1.1.1.2 mrg : public pending_diagnostic_subclass<dump_path_diagnostic> 1161 1.1.1.2 mrg { 1162 1.1.1.2 mrg public: 1163 1.1.1.2 mrg int get_controlling_option () const FINAL OVERRIDE 1164 1.1.1.2 mrg { 1165 1.1.1.2 mrg return 0; 1166 1.1.1.2 mrg } 1167 1.1.1.2 mrg 1168 1.1.1.2 mrg bool emit (rich_location *richloc) FINAL OVERRIDE 1169 1.1.1.2 mrg { 1170 1.1.1.2 mrg inform (richloc, "path"); 1171 1.1.1.2 mrg return true; 1172 1.1.1.2 mrg } 1173 1.1 mrg 1174 1.1.1.2 mrg const char *get_kind () const FINAL OVERRIDE { return "dump_path_diagnostic"; } 1175 1.1 mrg 1176 1.1.1.2 mrg bool operator== (const dump_path_diagnostic &) const 1177 1.1.1.2 mrg { 1178 1.1.1.2 mrg return true; 1179 1.1.1.2 mrg } 1180 1.1.1.2 mrg }; 1181 1.1 mrg 1182 1.1.1.2 mrg /* Handle the pre-sm-state part of STMT, modifying this object in-place. 1183 1.1.1.2 mrg Write true to *OUT_TERMINATE_PATH if the path should be terminated. 1184 1.1.1.2 mrg Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown 1185 1.1.1.2 mrg side effects. */ 1186 1.1 mrg 1187 1.1.1.2 mrg void 1188 1.1.1.2 mrg region_model::on_stmt_pre (const gimple *stmt, 1189 1.1.1.2 mrg bool *out_terminate_path, 1190 1.1.1.2 mrg bool *out_unknown_side_effects, 1191 1.1.1.2 mrg region_model_context *ctxt) 1192 1.1.1.2 mrg { 1193 1.1.1.2 mrg switch (gimple_code (stmt)) 1194 1.1.1.2 mrg { 1195 1.1.1.2 mrg default: 1196 1.1.1.2 mrg /* No-op for now. */ 1197 1.1 mrg break; 1198 1.1 mrg 1199 1.1.1.2 mrg case GIMPLE_ASSIGN: 1200 1.1 mrg { 1201 1.1.1.2 mrg const gassign *assign = as_a <const gassign *> (stmt); 1202 1.1.1.2 mrg on_assignment (assign, ctxt); 1203 1.1 mrg } 1204 1.1 mrg break; 1205 1.1 mrg 1206 1.1.1.2 mrg case GIMPLE_ASM: 1207 1.1 mrg { 1208 1.1.1.2 mrg const gasm *asm_stmt = as_a <const gasm *> (stmt); 1209 1.1.1.2 mrg on_asm_stmt (asm_stmt, ctxt); 1210 1.1 mrg } 1211 1.1 mrg break; 1212 1.1 mrg 1213 1.1.1.2 mrg case GIMPLE_CALL: 1214 1.1 mrg { 1215 1.1.1.2 mrg /* Track whether we have a gcall to a function that's not recognized by 1216 1.1.1.2 mrg anything, for which we don't have a function body, or for which we 1217 1.1.1.2 mrg don't know the fndecl. */ 1218 1.1.1.2 mrg const gcall *call = as_a <const gcall *> (stmt); 1219 1.1 mrg 1220 1.1.1.2 mrg /* Debugging/test support. */ 1221 1.1.1.2 mrg if (is_special_named_call_p (call, "__analyzer_describe", 2)) 1222 1.1.1.2 mrg impl_call_analyzer_describe (call, ctxt); 1223 1.1.1.2 mrg else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1)) 1224 1.1.1.2 mrg impl_call_analyzer_dump_capacity (call, ctxt); 1225 1.1.1.2 mrg else if (is_special_named_call_p (call, "__analyzer_dump_escaped", 0)) 1226 1.1.1.2 mrg impl_call_analyzer_dump_escaped (call); 1227 1.1.1.2 mrg else if (is_special_named_call_p (call, "__analyzer_dump_path", 0)) 1228 1.1.1.2 mrg { 1229 1.1.1.2 mrg /* Handle the builtin "__analyzer_dump_path" by queuing a 1230 1.1.1.2 mrg diagnostic at this exploded_node. */ 1231 1.1.1.2 mrg ctxt->warn (new dump_path_diagnostic ()); 1232 1.1.1.2 mrg } 1233 1.1.1.2 mrg else if (is_special_named_call_p (call, "__analyzer_dump_region_model", 1234 1.1.1.2 mrg 0)) 1235 1.1.1.2 mrg { 1236 1.1.1.2 mrg /* Handle the builtin "__analyzer_dump_region_model" by dumping 1237 1.1.1.2 mrg the region model's state to stderr. */ 1238 1.1.1.2 mrg dump (false); 1239 1.1.1.2 mrg } 1240 1.1.1.2 mrg else if (is_special_named_call_p (call, "__analyzer_eval", 1)) 1241 1.1.1.2 mrg impl_call_analyzer_eval (call, ctxt); 1242 1.1.1.2 mrg else if (is_special_named_call_p (call, "__analyzer_break", 0)) 1243 1.1.1.2 mrg { 1244 1.1.1.2 mrg /* Handle the builtin "__analyzer_break" by triggering a 1245 1.1.1.2 mrg breakpoint. */ 1246 1.1.1.2 mrg /* TODO: is there a good cross-platform way to do this? */ 1247 1.1.1.2 mrg raise (SIGINT); 1248 1.1.1.2 mrg } 1249 1.1.1.2 mrg else if (is_special_named_call_p (call, 1250 1.1.1.2 mrg "__analyzer_dump_exploded_nodes", 1251 1.1.1.2 mrg 1)) 1252 1.1.1.2 mrg { 1253 1.1.1.2 mrg /* This is handled elsewhere. */ 1254 1.1.1.2 mrg } 1255 1.1.1.2 mrg else 1256 1.1.1.2 mrg *out_unknown_side_effects = on_call_pre (call, ctxt, 1257 1.1.1.2 mrg out_terminate_path); 1258 1.1 mrg } 1259 1.1 mrg break; 1260 1.1 mrg 1261 1.1.1.2 mrg case GIMPLE_RETURN: 1262 1.1 mrg { 1263 1.1.1.2 mrg const greturn *return_ = as_a <const greturn *> (stmt); 1264 1.1.1.2 mrg on_return (return_, ctxt); 1265 1.1 mrg } 1266 1.1 mrg break; 1267 1.1.1.2 mrg } 1268 1.1.1.2 mrg } 1269 1.1 mrg 1270 1.1.1.2 mrg /* Ensure that all arguments at the call described by CD are checked 1271 1.1.1.2 mrg for poisoned values, by calling get_rvalue on each argument. */ 1272 1.1 mrg 1273 1.1.1.2 mrg void 1274 1.1.1.2 mrg region_model::check_call_args (const call_details &cd) const 1275 1.1.1.2 mrg { 1276 1.1.1.2 mrg for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++) 1277 1.1.1.2 mrg cd.get_arg_svalue (arg_idx); 1278 1.1.1.2 mrg } 1279 1.1 mrg 1280 1.1.1.2 mrg /* Return true if CD is known to be a call to a function with 1281 1.1.1.2 mrg __attribute__((const)). */ 1282 1.1 mrg 1283 1.1.1.2 mrg static bool 1284 1.1.1.2 mrg const_fn_p (const call_details &cd) 1285 1.1.1.2 mrg { 1286 1.1.1.2 mrg tree fndecl = cd.get_fndecl_for_call (); 1287 1.1.1.2 mrg if (!fndecl) 1288 1.1.1.2 mrg return false; 1289 1.1.1.2 mrg gcc_assert (DECL_P (fndecl)); 1290 1.1.1.2 mrg return TREE_READONLY (fndecl); 1291 1.1.1.2 mrg } 1292 1.1 mrg 1293 1.1.1.2 mrg /* If this CD is known to be a call to a function with 1294 1.1.1.2 mrg __attribute__((const)), attempt to get a const_fn_result_svalue 1295 1.1.1.2 mrg based on the arguments, or return NULL otherwise. */ 1296 1.1 mrg 1297 1.1.1.2 mrg static const svalue * 1298 1.1.1.2 mrg maybe_get_const_fn_result (const call_details &cd) 1299 1.1.1.2 mrg { 1300 1.1.1.2 mrg if (!const_fn_p (cd)) 1301 1.1.1.2 mrg return NULL; 1302 1.1 mrg 1303 1.1.1.2 mrg unsigned num_args = cd.num_args (); 1304 1.1.1.2 mrg if (num_args > const_fn_result_svalue::MAX_INPUTS) 1305 1.1.1.2 mrg /* Too many arguments. */ 1306 1.1.1.2 mrg return NULL; 1307 1.1.1.2 mrg 1308 1.1.1.2 mrg auto_vec<const svalue *> inputs (num_args); 1309 1.1.1.2 mrg for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++) 1310 1.1.1.2 mrg { 1311 1.1.1.2 mrg const svalue *arg_sval = cd.get_arg_svalue (arg_idx); 1312 1.1.1.2 mrg if (!arg_sval->can_have_associated_state_p ()) 1313 1.1.1.2 mrg return NULL; 1314 1.1.1.2 mrg inputs.quick_push (arg_sval); 1315 1.1 mrg } 1316 1.1.1.2 mrg 1317 1.1.1.2 mrg region_model_manager *mgr = cd.get_manager (); 1318 1.1.1.2 mrg const svalue *sval 1319 1.1.1.2 mrg = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (), 1320 1.1.1.2 mrg cd.get_fndecl_for_call (), 1321 1.1.1.2 mrg inputs); 1322 1.1.1.2 mrg return sval; 1323 1.1 mrg } 1324 1.1 mrg 1325 1.1 mrg /* Update this model for the CALL stmt, using CTXT to report any 1326 1.1 mrg diagnostics - the first half. 1327 1.1 mrg 1328 1.1 mrg Updates to the region_model that should be made *before* sm-states 1329 1.1 mrg are updated are done here; other updates to the region_model are done 1330 1.1 mrg in region_model::on_call_post. 1331 1.1 mrg 1332 1.1 mrg Return true if the function call has unknown side effects (it wasn't 1333 1.1 mrg recognized and we don't have a body for it, or are unable to tell which 1334 1.1.1.2 mrg fndecl it is). 1335 1.1.1.2 mrg 1336 1.1.1.2 mrg Write true to *OUT_TERMINATE_PATH if this execution path should be 1337 1.1.1.2 mrg terminated (e.g. the function call terminates the process). */ 1338 1.1 mrg 1339 1.1 mrg bool 1340 1.1.1.2 mrg region_model::on_call_pre (const gcall *call, region_model_context *ctxt, 1341 1.1.1.2 mrg bool *out_terminate_path) 1342 1.1 mrg { 1343 1.1.1.2 mrg call_details cd (call, this, ctxt); 1344 1.1.1.2 mrg 1345 1.1.1.2 mrg bool unknown_side_effects = false; 1346 1.1.1.2 mrg 1347 1.1.1.2 mrg /* Special-case for IFN_DEFERRED_INIT. 1348 1.1.1.2 mrg We want to report uninitialized variables with -fanalyzer (treating 1349 1.1.1.2 mrg -ftrivial-auto-var-init= as purely a mitigation feature). 1350 1.1.1.2 mrg Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the 1351 1.1.1.2 mrg lhs of the call, so that it is still uninitialized from the point of 1352 1.1.1.2 mrg view of the analyzer. */ 1353 1.1.1.2 mrg if (gimple_call_internal_p (call) 1354 1.1.1.2 mrg && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT) 1355 1.1.1.2 mrg return false; 1356 1.1.1.2 mrg 1357 1.1.1.2 mrg /* Get svalues for all of the arguments at the callsite, to ensure that we 1358 1.1.1.2 mrg complain about any uninitialized arguments. This might lead to 1359 1.1.1.2 mrg duplicates if any of the handling below also looks up the svalues, 1360 1.1.1.2 mrg but the deduplication code should deal with that. */ 1361 1.1.1.2 mrg if (ctxt) 1362 1.1.1.2 mrg check_call_args (cd); 1363 1.1.1.2 mrg 1364 1.1.1.2 mrg /* Some of the cases below update the lhs of the call based on the 1365 1.1.1.2 mrg return value, but not all. Provide a default value, which may 1366 1.1.1.2 mrg get overwritten below. */ 1367 1.1 mrg if (tree lhs = gimple_call_lhs (call)) 1368 1.1 mrg { 1369 1.1.1.2 mrg const region *lhs_region = get_lvalue (lhs, ctxt); 1370 1.1.1.2 mrg const svalue *sval = maybe_get_const_fn_result (cd); 1371 1.1.1.2 mrg if (!sval) 1372 1.1.1.2 mrg { 1373 1.1.1.2 mrg /* For the common case of functions without __attribute__((const)), 1374 1.1.1.2 mrg use a conjured value, and purge any prior state involving that 1375 1.1.1.2 mrg value (in case this is in a loop). */ 1376 1.1.1.2 mrg sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call, 1377 1.1.1.2 mrg lhs_region, 1378 1.1.1.2 mrg conjured_purge (this, 1379 1.1.1.2 mrg ctxt)); 1380 1.1.1.2 mrg } 1381 1.1.1.2 mrg set_value (lhs_region, sval, ctxt); 1382 1.1.1.2 mrg } 1383 1.1.1.2 mrg 1384 1.1.1.2 mrg if (gimple_call_internal_p (call)) 1385 1.1.1.2 mrg { 1386 1.1.1.2 mrg switch (gimple_call_internal_fn (call)) 1387 1.1.1.2 mrg { 1388 1.1.1.2 mrg default: 1389 1.1.1.2 mrg break; 1390 1.1.1.2 mrg case IFN_BUILTIN_EXPECT: 1391 1.1.1.2 mrg impl_call_builtin_expect (cd); 1392 1.1.1.2 mrg return false; 1393 1.1.1.2 mrg case IFN_UBSAN_BOUNDS: 1394 1.1.1.2 mrg return false; 1395 1.1.1.2 mrg } 1396 1.1 mrg } 1397 1.1 mrg 1398 1.1 mrg if (tree callee_fndecl = get_fndecl_for_call (call, ctxt)) 1399 1.1 mrg { 1400 1.1.1.2 mrg /* The various impl_call_* member functions are implemented 1401 1.1.1.2 mrg in region-model-impl-calls.cc. 1402 1.1.1.2 mrg Having them split out into separate functions makes it easier 1403 1.1.1.2 mrg to put breakpoints on the handling of specific functions. */ 1404 1.1.1.2 mrg int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl); 1405 1.1.1.2 mrg 1406 1.1.1.2 mrg if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL) 1407 1.1.1.2 mrg && gimple_builtin_call_types_compatible_p (call, callee_fndecl)) 1408 1.1.1.2 mrg switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl)) 1409 1.1.1.2 mrg { 1410 1.1.1.2 mrg default: 1411 1.1.1.2 mrg if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE))) 1412 1.1.1.2 mrg unknown_side_effects = true; 1413 1.1.1.2 mrg break; 1414 1.1.1.2 mrg case BUILT_IN_ALLOCA: 1415 1.1.1.2 mrg case BUILT_IN_ALLOCA_WITH_ALIGN: 1416 1.1.1.2 mrg impl_call_alloca (cd); 1417 1.1.1.2 mrg return false; 1418 1.1.1.2 mrg case BUILT_IN_CALLOC: 1419 1.1.1.2 mrg impl_call_calloc (cd); 1420 1.1.1.2 mrg return false; 1421 1.1.1.2 mrg case BUILT_IN_EXPECT: 1422 1.1.1.2 mrg case BUILT_IN_EXPECT_WITH_PROBABILITY: 1423 1.1.1.2 mrg impl_call_builtin_expect (cd); 1424 1.1.1.2 mrg return false; 1425 1.1.1.2 mrg case BUILT_IN_FREE: 1426 1.1.1.2 mrg /* Handle in "on_call_post". */ 1427 1.1.1.2 mrg break; 1428 1.1.1.2 mrg case BUILT_IN_MALLOC: 1429 1.1.1.2 mrg impl_call_malloc (cd); 1430 1.1.1.2 mrg return false; 1431 1.1.1.2 mrg case BUILT_IN_MEMCPY: 1432 1.1.1.2 mrg case BUILT_IN_MEMCPY_CHK: 1433 1.1.1.2 mrg impl_call_memcpy (cd); 1434 1.1.1.2 mrg return false; 1435 1.1.1.2 mrg case BUILT_IN_MEMSET: 1436 1.1.1.2 mrg case BUILT_IN_MEMSET_CHK: 1437 1.1.1.2 mrg impl_call_memset (cd); 1438 1.1.1.2 mrg return false; 1439 1.1.1.2 mrg break; 1440 1.1.1.2 mrg case BUILT_IN_REALLOC: 1441 1.1.1.2 mrg return false; 1442 1.1.1.2 mrg case BUILT_IN_STRCHR: 1443 1.1.1.2 mrg impl_call_strchr (cd); 1444 1.1.1.2 mrg return false; 1445 1.1.1.2 mrg case BUILT_IN_STRCPY: 1446 1.1.1.2 mrg case BUILT_IN_STRCPY_CHK: 1447 1.1.1.2 mrg impl_call_strcpy (cd); 1448 1.1.1.2 mrg return false; 1449 1.1.1.2 mrg case BUILT_IN_STRLEN: 1450 1.1.1.2 mrg impl_call_strlen (cd); 1451 1.1.1.2 mrg return false; 1452 1.1.1.2 mrg 1453 1.1.1.2 mrg case BUILT_IN_STACK_SAVE: 1454 1.1.1.2 mrg case BUILT_IN_STACK_RESTORE: 1455 1.1.1.2 mrg return false; 1456 1.1.1.2 mrg 1457 1.1.1.2 mrg /* Stdio builtins. */ 1458 1.1.1.2 mrg case BUILT_IN_FPRINTF: 1459 1.1.1.2 mrg case BUILT_IN_FPRINTF_UNLOCKED: 1460 1.1.1.2 mrg case BUILT_IN_PUTC: 1461 1.1.1.2 mrg case BUILT_IN_PUTC_UNLOCKED: 1462 1.1.1.2 mrg case BUILT_IN_FPUTC: 1463 1.1.1.2 mrg case BUILT_IN_FPUTC_UNLOCKED: 1464 1.1.1.2 mrg case BUILT_IN_FPUTS: 1465 1.1.1.2 mrg case BUILT_IN_FPUTS_UNLOCKED: 1466 1.1.1.2 mrg case BUILT_IN_FWRITE: 1467 1.1.1.2 mrg case BUILT_IN_FWRITE_UNLOCKED: 1468 1.1.1.2 mrg case BUILT_IN_PRINTF: 1469 1.1.1.2 mrg case BUILT_IN_PRINTF_UNLOCKED: 1470 1.1.1.2 mrg case BUILT_IN_PUTCHAR: 1471 1.1.1.2 mrg case BUILT_IN_PUTCHAR_UNLOCKED: 1472 1.1.1.2 mrg case BUILT_IN_PUTS: 1473 1.1.1.2 mrg case BUILT_IN_PUTS_UNLOCKED: 1474 1.1.1.2 mrg case BUILT_IN_VFPRINTF: 1475 1.1.1.2 mrg case BUILT_IN_VPRINTF: 1476 1.1.1.2 mrg /* These stdio builtins have external effects that are out 1477 1.1.1.2 mrg of scope for the analyzer: we only want to model the effects 1478 1.1.1.2 mrg on the return value. */ 1479 1.1.1.2 mrg break; 1480 1.1.1.2 mrg } 1481 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "malloc", call, 1)) 1482 1.1 mrg { 1483 1.1.1.2 mrg impl_call_malloc (cd); 1484 1.1 mrg return false; 1485 1.1 mrg } 1486 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "calloc", call, 2)) 1487 1.1 mrg { 1488 1.1.1.2 mrg impl_call_calloc (cd); 1489 1.1 mrg return false; 1490 1.1 mrg } 1491 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "alloca", call, 1)) 1492 1.1 mrg { 1493 1.1.1.2 mrg impl_call_alloca (cd); 1494 1.1 mrg return false; 1495 1.1 mrg } 1496 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "realloc", call, 2)) 1497 1.1 mrg { 1498 1.1.1.2 mrg impl_call_realloc (cd); 1499 1.1.1.2 mrg return false; 1500 1.1.1.2 mrg } 1501 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "error")) 1502 1.1.1.2 mrg { 1503 1.1.1.2 mrg if (impl_call_error (cd, 3, out_terminate_path)) 1504 1.1.1.2 mrg return false; 1505 1.1.1.2 mrg else 1506 1.1.1.2 mrg unknown_side_effects = true; 1507 1.1.1.2 mrg } 1508 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "error_at_line")) 1509 1.1.1.2 mrg { 1510 1.1.1.2 mrg if (impl_call_error (cd, 5, out_terminate_path)) 1511 1.1.1.2 mrg return false; 1512 1.1.1.2 mrg else 1513 1.1.1.2 mrg unknown_side_effects = true; 1514 1.1.1.2 mrg } 1515 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "fgets", call, 3) 1516 1.1.1.2 mrg || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3)) 1517 1.1.1.2 mrg { 1518 1.1.1.2 mrg impl_call_fgets (cd); 1519 1.1.1.2 mrg return false; 1520 1.1.1.2 mrg } 1521 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "fread", call, 4)) 1522 1.1.1.2 mrg { 1523 1.1.1.2 mrg impl_call_fread (cd); 1524 1.1.1.2 mrg return false; 1525 1.1.1.2 mrg } 1526 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "getchar", call, 0)) 1527 1.1.1.2 mrg { 1528 1.1.1.2 mrg /* No side-effects (tracking stream state is out-of-scope 1529 1.1.1.2 mrg for the analyzer). */ 1530 1.1.1.2 mrg } 1531 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "memset", call, 3) 1532 1.1.1.2 mrg && POINTER_TYPE_P (cd.get_arg_type (0))) 1533 1.1.1.2 mrg { 1534 1.1.1.2 mrg impl_call_memset (cd); 1535 1.1.1.2 mrg return false; 1536 1.1.1.2 mrg } 1537 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "strchr", call, 2) 1538 1.1.1.2 mrg && POINTER_TYPE_P (cd.get_arg_type (0))) 1539 1.1.1.2 mrg { 1540 1.1.1.2 mrg impl_call_strchr (cd); 1541 1.1.1.2 mrg return false; 1542 1.1.1.2 mrg } 1543 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "strlen", call, 1) 1544 1.1.1.2 mrg && POINTER_TYPE_P (cd.get_arg_type (0))) 1545 1.1.1.2 mrg { 1546 1.1.1.2 mrg impl_call_strlen (cd); 1547 1.1.1.2 mrg return false; 1548 1.1 mrg } 1549 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "operator new", call, 1)) 1550 1.1 mrg { 1551 1.1.1.2 mrg impl_call_operator_new (cd); 1552 1.1 mrg return false; 1553 1.1 mrg } 1554 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "operator new []", call, 1)) 1555 1.1.1.2 mrg { 1556 1.1.1.2 mrg impl_call_operator_new (cd); 1557 1.1.1.2 mrg return false; 1558 1.1.1.2 mrg } 1559 1.1.1.2 mrg else if (is_named_call_p (callee_fndecl, "operator delete", call, 1) 1560 1.1.1.2 mrg || is_named_call_p (callee_fndecl, "operator delete", call, 2) 1561 1.1.1.2 mrg || is_named_call_p (callee_fndecl, "operator delete []", call, 1)) 1562 1.1.1.2 mrg { 1563 1.1.1.2 mrg /* Handle in "on_call_post". */ 1564 1.1.1.2 mrg } 1565 1.1 mrg else if (!fndecl_has_gimple_body_p (callee_fndecl) 1566 1.1.1.2 mrg && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE))) 1567 1.1.1.2 mrg && !fndecl_built_in_p (callee_fndecl)) 1568 1.1 mrg unknown_side_effects = true; 1569 1.1 mrg } 1570 1.1 mrg else 1571 1.1 mrg unknown_side_effects = true; 1572 1.1 mrg 1573 1.1 mrg return unknown_side_effects; 1574 1.1 mrg } 1575 1.1 mrg 1576 1.1 mrg /* Update this model for the CALL stmt, using CTXT to report any 1577 1.1 mrg diagnostics - the second half. 1578 1.1 mrg 1579 1.1 mrg Updates to the region_model that should be made *after* sm-states 1580 1.1 mrg are updated are done here; other updates to the region_model are done 1581 1.1 mrg in region_model::on_call_pre. 1582 1.1 mrg 1583 1.1 mrg If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call 1584 1.1 mrg to purge state. */ 1585 1.1 mrg 1586 1.1 mrg void 1587 1.1 mrg region_model::on_call_post (const gcall *call, 1588 1.1 mrg bool unknown_side_effects, 1589 1.1 mrg region_model_context *ctxt) 1590 1.1 mrg { 1591 1.1 mrg if (tree callee_fndecl = get_fndecl_for_call (call, ctxt)) 1592 1.1.1.2 mrg { 1593 1.1.1.2 mrg call_details cd (call, this, ctxt); 1594 1.1.1.2 mrg if (is_named_call_p (callee_fndecl, "free", call, 1)) 1595 1.1.1.2 mrg { 1596 1.1.1.2 mrg impl_call_free (cd); 1597 1.1.1.2 mrg return; 1598 1.1.1.2 mrg } 1599 1.1.1.2 mrg if (is_named_call_p (callee_fndecl, "operator delete", call, 1) 1600 1.1.1.2 mrg || is_named_call_p (callee_fndecl, "operator delete", call, 2) 1601 1.1.1.2 mrg || is_named_call_p (callee_fndecl, "operator delete []", call, 1)) 1602 1.1.1.2 mrg { 1603 1.1.1.2 mrg impl_call_operator_delete (cd); 1604 1.1.1.2 mrg return; 1605 1.1.1.2 mrg } 1606 1.1.1.2 mrg /* Was this fndecl referenced by 1607 1.1.1.2 mrg __attribute__((malloc(FOO)))? */ 1608 1.1.1.2 mrg if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl))) 1609 1.1.1.2 mrg { 1610 1.1.1.2 mrg impl_deallocation_call (cd); 1611 1.1.1.2 mrg return; 1612 1.1.1.2 mrg } 1613 1.1.1.2 mrg if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL) 1614 1.1.1.2 mrg && gimple_builtin_call_types_compatible_p (call, callee_fndecl)) 1615 1.1.1.2 mrg switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl)) 1616 1.1 mrg { 1617 1.1.1.2 mrg default: 1618 1.1.1.2 mrg break; 1619 1.1.1.2 mrg case BUILT_IN_REALLOC: 1620 1.1.1.2 mrg impl_call_realloc (cd); 1621 1.1.1.2 mrg return; 1622 1.1 mrg } 1623 1.1.1.2 mrg } 1624 1.1 mrg 1625 1.1 mrg if (unknown_side_effects) 1626 1.1 mrg handle_unrecognized_call (call, ctxt); 1627 1.1 mrg } 1628 1.1 mrg 1629 1.1.1.2 mrg /* Purge state involving SVAL from this region_model, using CTXT 1630 1.1.1.2 mrg (if non-NULL) to purge other state in a program_state. 1631 1.1.1.2 mrg 1632 1.1.1.2 mrg For example, if we're at the def-stmt of an SSA name, then we need to 1633 1.1.1.2 mrg purge any state for svalues that involve that SSA name. This avoids 1634 1.1.1.2 mrg false positives in loops, since a symbolic value referring to the 1635 1.1.1.2 mrg SSA name will be referring to the previous value of that SSA name. 1636 1.1.1.2 mrg 1637 1.1.1.2 mrg For example, in: 1638 1.1.1.2 mrg while ((e = hashmap_iter_next(&iter))) { 1639 1.1.1.2 mrg struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e; 1640 1.1.1.2 mrg free (e_strbuf->value); 1641 1.1.1.2 mrg } 1642 1.1.1.2 mrg at the def-stmt of e_8: 1643 1.1.1.2 mrg e_8 = hashmap_iter_next (&iter); 1644 1.1.1.2 mrg we should purge the "freed" state of: 1645 1.1.1.2 mrg INIT_VAL(CAST_REG(struct oid2strbuf, (*INIT_VAL(e_8))).value) 1646 1.1.1.2 mrg which is the "e_strbuf->value" value from the previous iteration, 1647 1.1.1.2 mrg or we will erroneously report a double-free - the "e_8" within it 1648 1.1.1.2 mrg refers to the previous value. */ 1649 1.1 mrg 1650 1.1.1.2 mrg void 1651 1.1.1.2 mrg region_model::purge_state_involving (const svalue *sval, 1652 1.1.1.2 mrg region_model_context *ctxt) 1653 1.1 mrg { 1654 1.1.1.2 mrg if (!sval->can_have_associated_state_p ()) 1655 1.1.1.2 mrg return; 1656 1.1.1.2 mrg m_store.purge_state_involving (sval, m_mgr); 1657 1.1.1.2 mrg m_constraints->purge_state_involving (sval); 1658 1.1.1.2 mrg m_dynamic_extents.purge_state_involving (sval); 1659 1.1.1.2 mrg if (ctxt) 1660 1.1.1.2 mrg ctxt->purge_state_involving (sval); 1661 1.1.1.2 mrg } 1662 1.1 mrg 1663 1.1.1.2 mrg /* A pending_note subclass for adding a note about an 1664 1.1.1.2 mrg __attribute__((access, ...)) to a diagnostic. */ 1665 1.1 mrg 1666 1.1.1.2 mrg class reason_attr_access : public pending_note_subclass<reason_attr_access> 1667 1.1.1.2 mrg { 1668 1.1.1.2 mrg public: 1669 1.1.1.2 mrg reason_attr_access (tree callee_fndecl, const attr_access &access) 1670 1.1.1.2 mrg : m_callee_fndecl (callee_fndecl), 1671 1.1.1.2 mrg m_ptr_argno (access.ptrarg), 1672 1.1.1.2 mrg m_access_str (TREE_STRING_POINTER (access.to_external_string ())) 1673 1.1.1.2 mrg { 1674 1.1.1.2 mrg } 1675 1.1 mrg 1676 1.1.1.2 mrg const char *get_kind () const FINAL OVERRIDE { return "reason_attr_access"; } 1677 1.1 mrg 1678 1.1.1.2 mrg void emit () const 1679 1.1.1.2 mrg { 1680 1.1.1.2 mrg inform (DECL_SOURCE_LOCATION (m_callee_fndecl), 1681 1.1.1.2 mrg "parameter %i of %qD marked with attribute %qs", 1682 1.1.1.2 mrg m_ptr_argno + 1, m_callee_fndecl, m_access_str); 1683 1.1 mrg } 1684 1.1 mrg 1685 1.1.1.2 mrg bool operator== (const reason_attr_access &other) const 1686 1.1 mrg { 1687 1.1.1.2 mrg return (m_callee_fndecl == other.m_callee_fndecl 1688 1.1.1.2 mrg && m_ptr_argno == other.m_ptr_argno 1689 1.1.1.2 mrg && !strcmp (m_access_str, other.m_access_str)); 1690 1.1 mrg } 1691 1.1 mrg 1692 1.1 mrg private: 1693 1.1.1.2 mrg tree m_callee_fndecl; 1694 1.1.1.2 mrg unsigned m_ptr_argno; 1695 1.1.1.2 mrg const char *m_access_str; 1696 1.1.1.2 mrg }; 1697 1.1 mrg 1698 1.1.1.2 mrg /* Check CALL a call to external function CALLEE_FNDECL based on 1699 1.1.1.2 mrg any __attribute__ ((access, ....) on the latter, complaining to 1700 1.1.1.2 mrg CTXT about any issues. 1701 1.1.1.2 mrg 1702 1.1.1.2 mrg Currently we merely call check_region_for_write on any regions 1703 1.1.1.2 mrg pointed to by arguments marked with a "write_only" or "read_write" 1704 1.1.1.2 mrg attribute. */ 1705 1.1.1.2 mrg 1706 1.1.1.2 mrg void 1707 1.1.1.2 mrg region_model:: 1708 1.1.1.2 mrg check_external_function_for_access_attr (const gcall *call, 1709 1.1.1.2 mrg tree callee_fndecl, 1710 1.1.1.2 mrg region_model_context *ctxt) const 1711 1.1.1.2 mrg { 1712 1.1.1.2 mrg gcc_assert (call); 1713 1.1.1.2 mrg gcc_assert (callee_fndecl); 1714 1.1.1.2 mrg gcc_assert (ctxt); 1715 1.1 mrg 1716 1.1.1.2 mrg tree fntype = TREE_TYPE (callee_fndecl); 1717 1.1.1.2 mrg if (!fntype) 1718 1.1.1.2 mrg return; 1719 1.1.1.2 mrg 1720 1.1.1.2 mrg if (!TYPE_ATTRIBUTES (fntype)) 1721 1.1.1.2 mrg return; 1722 1.1.1.2 mrg 1723 1.1.1.2 mrg /* Initialize a map of attribute access specifications for arguments 1724 1.1.1.2 mrg to the function call. */ 1725 1.1.1.2 mrg rdwr_map rdwr_idx; 1726 1.1.1.2 mrg init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype)); 1727 1.1.1.2 mrg 1728 1.1.1.2 mrg unsigned argno = 0; 1729 1.1.1.2 mrg 1730 1.1.1.2 mrg for (tree iter = TYPE_ARG_TYPES (fntype); iter; 1731 1.1.1.2 mrg iter = TREE_CHAIN (iter), ++argno) 1732 1.1.1.2 mrg { 1733 1.1.1.2 mrg const attr_access* access = rdwr_idx.get (argno); 1734 1.1.1.2 mrg if (!access) 1735 1.1.1.2 mrg continue; 1736 1.1.1.2 mrg 1737 1.1.1.2 mrg /* Ignore any duplicate entry in the map for the size argument. */ 1738 1.1.1.2 mrg if (access->ptrarg != argno) 1739 1.1.1.2 mrg continue; 1740 1.1.1.2 mrg 1741 1.1.1.2 mrg if (access->mode == access_write_only 1742 1.1.1.2 mrg || access->mode == access_read_write) 1743 1.1.1.2 mrg { 1744 1.1.1.2 mrg /* Subclass of decorated_region_model_context that 1745 1.1.1.2 mrg adds a note about the attr access to any saved diagnostics. */ 1746 1.1.1.2 mrg class annotating_ctxt : public note_adding_context 1747 1.1.1.2 mrg { 1748 1.1.1.2 mrg public: 1749 1.1.1.2 mrg annotating_ctxt (tree callee_fndecl, 1750 1.1.1.2 mrg const attr_access &access, 1751 1.1.1.2 mrg region_model_context *ctxt) 1752 1.1.1.2 mrg : note_adding_context (ctxt), 1753 1.1.1.2 mrg m_callee_fndecl (callee_fndecl), 1754 1.1.1.2 mrg m_access (access) 1755 1.1.1.2 mrg { 1756 1.1.1.2 mrg } 1757 1.1.1.2 mrg pending_note *make_note () FINAL OVERRIDE 1758 1.1.1.2 mrg { 1759 1.1.1.2 mrg return new reason_attr_access (m_callee_fndecl, m_access); 1760 1.1.1.2 mrg } 1761 1.1.1.2 mrg private: 1762 1.1.1.2 mrg tree m_callee_fndecl; 1763 1.1.1.2 mrg const attr_access &m_access; 1764 1.1.1.2 mrg }; 1765 1.1.1.2 mrg 1766 1.1.1.2 mrg /* Use this ctxt below so that any diagnostics get the 1767 1.1.1.2 mrg note added to them. */ 1768 1.1.1.2 mrg annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt); 1769 1.1.1.2 mrg 1770 1.1.1.2 mrg tree ptr_tree = gimple_call_arg (call, access->ptrarg); 1771 1.1.1.2 mrg const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt); 1772 1.1.1.2 mrg const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt); 1773 1.1.1.2 mrg check_region_for_write (reg, &my_ctxt); 1774 1.1.1.2 mrg /* We don't use the size arg for now. */ 1775 1.1.1.2 mrg } 1776 1.1.1.2 mrg } 1777 1.1.1.2 mrg } 1778 1.1 mrg 1779 1.1 mrg /* Handle a call CALL to a function with unknown behavior. 1780 1.1 mrg 1781 1.1 mrg Traverse the regions in this model, determining what regions are 1782 1.1 mrg reachable from pointer arguments to CALL and from global variables, 1783 1.1 mrg recursively. 1784 1.1 mrg 1785 1.1 mrg Set all reachable regions to new unknown values and purge sm-state 1786 1.1 mrg from their values, and from values that point to them. */ 1787 1.1 mrg 1788 1.1 mrg void 1789 1.1 mrg region_model::handle_unrecognized_call (const gcall *call, 1790 1.1 mrg region_model_context *ctxt) 1791 1.1 mrg { 1792 1.1 mrg tree fndecl = get_fndecl_for_call (call, ctxt); 1793 1.1 mrg 1794 1.1.1.2 mrg if (fndecl && ctxt) 1795 1.1.1.2 mrg check_external_function_for_access_attr (call, fndecl, ctxt); 1796 1.1.1.2 mrg 1797 1.1.1.2 mrg reachable_regions reachable_regs (this); 1798 1.1 mrg 1799 1.1 mrg /* Determine the reachable regions and their mutability. */ 1800 1.1 mrg { 1801 1.1.1.2 mrg /* Add globals and regions that already escaped in previous 1802 1.1.1.2 mrg unknown calls. */ 1803 1.1.1.2 mrg m_store.for_each_cluster (reachable_regions::init_cluster_cb, 1804 1.1.1.2 mrg &reachable_regs); 1805 1.1 mrg 1806 1.1 mrg /* Params that are pointers. */ 1807 1.1 mrg tree iter_param_types = NULL_TREE; 1808 1.1 mrg if (fndecl) 1809 1.1 mrg iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); 1810 1.1 mrg for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++) 1811 1.1 mrg { 1812 1.1 mrg /* Track expected param type, where available. */ 1813 1.1 mrg tree param_type = NULL_TREE; 1814 1.1 mrg if (iter_param_types) 1815 1.1 mrg { 1816 1.1 mrg param_type = TREE_VALUE (iter_param_types); 1817 1.1 mrg gcc_assert (param_type); 1818 1.1 mrg iter_param_types = TREE_CHAIN (iter_param_types); 1819 1.1 mrg } 1820 1.1 mrg 1821 1.1 mrg tree parm = gimple_call_arg (call, arg_idx); 1822 1.1.1.2 mrg const svalue *parm_sval = get_rvalue (parm, ctxt); 1823 1.1.1.2 mrg reachable_regs.handle_parm (parm_sval, param_type); 1824 1.1 mrg } 1825 1.1 mrg } 1826 1.1 mrg 1827 1.1.1.2 mrg uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL; 1828 1.1.1.2 mrg 1829 1.1.1.2 mrg /* Purge sm-state for the svalues that were reachable, 1830 1.1.1.2 mrg both in non-mutable and mutable form. */ 1831 1.1.1.2 mrg for (svalue_set::iterator iter 1832 1.1.1.2 mrg = reachable_regs.begin_reachable_svals (); 1833 1.1.1.2 mrg iter != reachable_regs.end_reachable_svals (); ++iter) 1834 1.1 mrg { 1835 1.1.1.2 mrg const svalue *sval = (*iter); 1836 1.1.1.2 mrg if (ctxt) 1837 1.1.1.2 mrg ctxt->on_unknown_change (sval, false); 1838 1.1.1.2 mrg } 1839 1.1.1.2 mrg for (svalue_set::iterator iter 1840 1.1.1.2 mrg = reachable_regs.begin_mutable_svals (); 1841 1.1.1.2 mrg iter != reachable_regs.end_mutable_svals (); ++iter) 1842 1.1.1.2 mrg { 1843 1.1.1.2 mrg const svalue *sval = (*iter); 1844 1.1.1.2 mrg if (ctxt) 1845 1.1.1.2 mrg ctxt->on_unknown_change (sval, true); 1846 1.1.1.2 mrg if (uncertainty) 1847 1.1.1.2 mrg uncertainty->on_mutable_sval_at_unknown_call (sval); 1848 1.1.1.2 mrg } 1849 1.1.1.2 mrg 1850 1.1.1.2 mrg /* Mark any clusters that have escaped. */ 1851 1.1.1.2 mrg reachable_regs.mark_escaped_clusters (ctxt); 1852 1.1.1.2 mrg 1853 1.1.1.2 mrg /* Update bindings for all clusters that have escaped, whether above, 1854 1.1.1.2 mrg or previously. */ 1855 1.1.1.2 mrg m_store.on_unknown_fncall (call, m_mgr->get_store_manager (), 1856 1.1.1.2 mrg conjured_purge (this, ctxt)); 1857 1.1.1.2 mrg 1858 1.1.1.2 mrg /* Purge dynamic extents from any regions that have escaped mutably: 1859 1.1.1.2 mrg realloc could have been called on them. */ 1860 1.1.1.2 mrg for (hash_set<const region *>::iterator 1861 1.1.1.2 mrg iter = reachable_regs.begin_mutable_base_regs (); 1862 1.1.1.2 mrg iter != reachable_regs.end_mutable_base_regs (); 1863 1.1.1.2 mrg ++iter) 1864 1.1.1.2 mrg { 1865 1.1.1.2 mrg const region *base_reg = (*iter); 1866 1.1.1.2 mrg unset_dynamic_extents (base_reg); 1867 1.1.1.2 mrg } 1868 1.1.1.2 mrg } 1869 1.1 mrg 1870 1.1.1.2 mrg /* Traverse the regions in this model, determining what regions are 1871 1.1.1.2 mrg reachable from the store and populating *OUT. 1872 1.1 mrg 1873 1.1.1.2 mrg If EXTRA_SVAL is non-NULL, treat it as an additional "root" 1874 1.1.1.2 mrg for reachability (for handling return values from functions when 1875 1.1.1.2 mrg analyzing return of the only function on the stack). 1876 1.1 mrg 1877 1.1.1.2 mrg If UNCERTAINTY is non-NULL, treat any svalues that were recorded 1878 1.1.1.2 mrg within it as being maybe-bound as additional "roots" for reachability. 1879 1.1 mrg 1880 1.1.1.2 mrg Find svalues that haven't leaked. */ 1881 1.1.1.2 mrg 1882 1.1.1.2 mrg void 1883 1.1.1.2 mrg region_model::get_reachable_svalues (svalue_set *out, 1884 1.1.1.2 mrg const svalue *extra_sval, 1885 1.1.1.2 mrg const uncertainty_t *uncertainty) 1886 1.1.1.2 mrg { 1887 1.1.1.2 mrg reachable_regions reachable_regs (this); 1888 1.1.1.2 mrg 1889 1.1.1.2 mrg /* Add globals and regions that already escaped in previous 1890 1.1.1.2 mrg unknown calls. */ 1891 1.1.1.2 mrg m_store.for_each_cluster (reachable_regions::init_cluster_cb, 1892 1.1.1.2 mrg &reachable_regs); 1893 1.1.1.2 mrg 1894 1.1.1.2 mrg if (extra_sval) 1895 1.1.1.2 mrg reachable_regs.handle_sval (extra_sval); 1896 1.1 mrg 1897 1.1.1.2 mrg if (uncertainty) 1898 1.1.1.2 mrg for (uncertainty_t::iterator iter 1899 1.1.1.2 mrg = uncertainty->begin_maybe_bound_svals (); 1900 1.1.1.2 mrg iter != uncertainty->end_maybe_bound_svals (); ++iter) 1901 1.1.1.2 mrg reachable_regs.handle_sval (*iter); 1902 1.1.1.2 mrg 1903 1.1.1.2 mrg /* Get regions for locals that have explicitly bound values. */ 1904 1.1.1.2 mrg for (store::cluster_map_t::iterator iter = m_store.begin (); 1905 1.1.1.2 mrg iter != m_store.end (); ++iter) 1906 1.1.1.2 mrg { 1907 1.1.1.2 mrg const region *base_reg = (*iter).first; 1908 1.1.1.2 mrg if (const region *parent = base_reg->get_parent_region ()) 1909 1.1.1.2 mrg if (parent->get_kind () == RK_FRAME) 1910 1.1.1.2 mrg reachable_regs.add (base_reg, false); 1911 1.1.1.2 mrg } 1912 1.1.1.2 mrg 1913 1.1.1.2 mrg /* Populate *OUT based on the values that were reachable. */ 1914 1.1.1.2 mrg for (svalue_set::iterator iter 1915 1.1.1.2 mrg = reachable_regs.begin_reachable_svals (); 1916 1.1.1.2 mrg iter != reachable_regs.end_reachable_svals (); ++iter) 1917 1.1.1.2 mrg out->add (*iter); 1918 1.1 mrg } 1919 1.1 mrg 1920 1.1 mrg /* Update this model for the RETURN_STMT, using CTXT to report any 1921 1.1 mrg diagnostics. */ 1922 1.1 mrg 1923 1.1 mrg void 1924 1.1 mrg region_model::on_return (const greturn *return_stmt, region_model_context *ctxt) 1925 1.1 mrg { 1926 1.1 mrg tree callee = get_current_function ()->decl; 1927 1.1 mrg tree lhs = DECL_RESULT (callee); 1928 1.1 mrg tree rhs = gimple_return_retval (return_stmt); 1929 1.1 mrg 1930 1.1 mrg if (lhs && rhs) 1931 1.1.1.2 mrg { 1932 1.1.1.2 mrg const svalue *sval = get_rvalue (rhs, ctxt); 1933 1.1.1.2 mrg const region *ret_reg = get_lvalue (lhs, ctxt); 1934 1.1.1.2 mrg set_value (ret_reg, sval, ctxt); 1935 1.1.1.2 mrg } 1936 1.1 mrg } 1937 1.1 mrg 1938 1.1 mrg /* Update this model for a call and return of setjmp/sigsetjmp at CALL within 1939 1.1 mrg ENODE, using CTXT to report any diagnostics. 1940 1.1 mrg 1941 1.1 mrg This is for the initial direct invocation of setjmp/sigsetjmp (which returns 1942 1.1 mrg 0), as opposed to any second return due to longjmp/sigsetjmp. */ 1943 1.1 mrg 1944 1.1 mrg void 1945 1.1 mrg region_model::on_setjmp (const gcall *call, const exploded_node *enode, 1946 1.1 mrg region_model_context *ctxt) 1947 1.1 mrg { 1948 1.1.1.2 mrg const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt); 1949 1.1.1.2 mrg const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0), 1950 1.1.1.2 mrg ctxt); 1951 1.1 mrg 1952 1.1.1.2 mrg /* Create a setjmp_svalue for this call and store it in BUF_REG's 1953 1.1.1.2 mrg region. */ 1954 1.1.1.2 mrg if (buf_reg) 1955 1.1 mrg { 1956 1.1 mrg setjmp_record r (enode, call); 1957 1.1.1.2 mrg const svalue *sval 1958 1.1.1.2 mrg = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ()); 1959 1.1.1.2 mrg set_value (buf_reg, sval, ctxt); 1960 1.1 mrg } 1961 1.1 mrg 1962 1.1 mrg /* Direct calls to setjmp return 0. */ 1963 1.1 mrg if (tree lhs = gimple_call_lhs (call)) 1964 1.1 mrg { 1965 1.1.1.2 mrg const svalue *new_sval 1966 1.1.1.2 mrg = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0); 1967 1.1.1.2 mrg const region *lhs_reg = get_lvalue (lhs, ctxt); 1968 1.1.1.2 mrg set_value (lhs_reg, new_sval, ctxt); 1969 1.1 mrg } 1970 1.1 mrg } 1971 1.1 mrg 1972 1.1 mrg /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL 1973 1.1 mrg to a "setjmp" at SETJMP_CALL where the final stack depth should be 1974 1.1.1.2 mrg SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not* 1975 1.1.1.2 mrg done, and should be done by the caller. */ 1976 1.1 mrg 1977 1.1 mrg void 1978 1.1 mrg region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call, 1979 1.1.1.2 mrg int setjmp_stack_depth, region_model_context *ctxt) 1980 1.1 mrg { 1981 1.1 mrg /* Evaluate the val, using the frame of the "longjmp". */ 1982 1.1 mrg tree fake_retval = gimple_call_arg (longjmp_call, 1); 1983 1.1.1.2 mrg const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt); 1984 1.1 mrg 1985 1.1 mrg /* Pop any frames until we reach the stack depth of the function where 1986 1.1 mrg setjmp was called. */ 1987 1.1 mrg gcc_assert (get_stack_depth () >= setjmp_stack_depth); 1988 1.1 mrg while (get_stack_depth () > setjmp_stack_depth) 1989 1.1.1.2 mrg pop_frame (NULL, NULL, ctxt, false); 1990 1.1 mrg 1991 1.1 mrg gcc_assert (get_stack_depth () == setjmp_stack_depth); 1992 1.1 mrg 1993 1.1 mrg /* Assign to LHS of "setjmp" in new_state. */ 1994 1.1 mrg if (tree lhs = gimple_call_lhs (setjmp_call)) 1995 1.1 mrg { 1996 1.1 mrg /* Passing 0 as the val to longjmp leads to setjmp returning 1. */ 1997 1.1.1.2 mrg const svalue *zero_sval 1998 1.1.1.2 mrg = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0); 1999 1.1.1.2 mrg tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval); 2000 1.1 mrg /* If we have 0, use 1. */ 2001 1.1 mrg if (eq_zero.is_true ()) 2002 1.1 mrg { 2003 1.1.1.2 mrg const svalue *one_sval 2004 1.1.1.2 mrg = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1); 2005 1.1.1.2 mrg fake_retval_sval = one_sval; 2006 1.1 mrg } 2007 1.1 mrg else 2008 1.1 mrg { 2009 1.1 mrg /* Otherwise note that the value is nonzero. */ 2010 1.1.1.2 mrg m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval); 2011 1.1 mrg } 2012 1.1 mrg 2013 1.1.1.2 mrg /* Decorate the return value from setjmp as being unmergeable, 2014 1.1.1.2 mrg so that we don't attempt to merge states with it as zero 2015 1.1.1.2 mrg with states in which it's nonzero, leading to a clean distinction 2016 1.1.1.2 mrg in the exploded_graph betweeen the first return and the second 2017 1.1.1.2 mrg return. */ 2018 1.1.1.2 mrg fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval); 2019 1.1 mrg 2020 1.1.1.2 mrg const region *lhs_reg = get_lvalue (lhs, ctxt); 2021 1.1.1.2 mrg set_value (lhs_reg, fake_retval_sval, ctxt); 2022 1.1.1.2 mrg } 2023 1.1 mrg } 2024 1.1 mrg 2025 1.1 mrg /* Update this region_model for a phi stmt of the form 2026 1.1 mrg LHS = PHI <...RHS...>. 2027 1.1.1.2 mrg where RHS is for the appropriate edge. 2028 1.1.1.2 mrg Get state from OLD_STATE so that all of the phi stmts for a basic block 2029 1.1.1.2 mrg are effectively handled simultaneously. */ 2030 1.1 mrg 2031 1.1 mrg void 2032 1.1 mrg region_model::handle_phi (const gphi *phi, 2033 1.1.1.2 mrg tree lhs, tree rhs, 2034 1.1.1.2 mrg const region_model &old_state, 2035 1.1 mrg region_model_context *ctxt) 2036 1.1 mrg { 2037 1.1 mrg /* For now, don't bother tracking the .MEM SSA names. */ 2038 1.1 mrg if (tree var = SSA_NAME_VAR (lhs)) 2039 1.1 mrg if (TREE_CODE (var) == VAR_DECL) 2040 1.1 mrg if (VAR_DECL_IS_VIRTUAL_OPERAND (var)) 2041 1.1 mrg return; 2042 1.1 mrg 2043 1.1.1.2 mrg const svalue *src_sval = old_state.get_rvalue (rhs, ctxt); 2044 1.1.1.2 mrg const region *dst_reg = old_state.get_lvalue (lhs, ctxt); 2045 1.1 mrg 2046 1.1.1.2 mrg set_value (dst_reg, src_sval, ctxt); 2047 1.1 mrg 2048 1.1 mrg if (ctxt) 2049 1.1 mrg ctxt->on_phi (phi, rhs); 2050 1.1 mrg } 2051 1.1 mrg 2052 1.1 mrg /* Implementation of region_model::get_lvalue; the latter adds type-checking. 2053 1.1 mrg 2054 1.1 mrg Get the id of the region for PV within this region_model, 2055 1.1 mrg emitting any diagnostics to CTXT. */ 2056 1.1 mrg 2057 1.1.1.2 mrg const region * 2058 1.1.1.2 mrg region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const 2059 1.1 mrg { 2060 1.1 mrg tree expr = pv.m_tree; 2061 1.1 mrg 2062 1.1 mrg gcc_assert (expr); 2063 1.1 mrg 2064 1.1 mrg switch (TREE_CODE (expr)) 2065 1.1 mrg { 2066 1.1 mrg default: 2067 1.1.1.2 mrg return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr, 2068 1.1.1.2 mrg dump_location_t ()); 2069 1.1 mrg 2070 1.1 mrg case ARRAY_REF: 2071 1.1 mrg { 2072 1.1 mrg tree array = TREE_OPERAND (expr, 0); 2073 1.1 mrg tree index = TREE_OPERAND (expr, 1); 2074 1.1 mrg 2075 1.1.1.2 mrg const region *array_reg = get_lvalue (array, ctxt); 2076 1.1.1.2 mrg const svalue *index_sval = get_rvalue (index, ctxt); 2077 1.1.1.2 mrg return m_mgr->get_element_region (array_reg, 2078 1.1.1.2 mrg TREE_TYPE (TREE_TYPE (array)), 2079 1.1.1.2 mrg index_sval); 2080 1.1 mrg } 2081 1.1 mrg break; 2082 1.1 mrg 2083 1.1 mrg case BIT_FIELD_REF: 2084 1.1 mrg { 2085 1.1.1.2 mrg tree inner_expr = TREE_OPERAND (expr, 0); 2086 1.1.1.2 mrg const region *inner_reg = get_lvalue (inner_expr, ctxt); 2087 1.1.1.2 mrg tree num_bits = TREE_OPERAND (expr, 1); 2088 1.1.1.2 mrg tree first_bit_offset = TREE_OPERAND (expr, 2); 2089 1.1.1.2 mrg gcc_assert (TREE_CODE (num_bits) == INTEGER_CST); 2090 1.1.1.2 mrg gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST); 2091 1.1.1.2 mrg bit_range bits (TREE_INT_CST_LOW (first_bit_offset), 2092 1.1.1.2 mrg TREE_INT_CST_LOW (num_bits)); 2093 1.1.1.2 mrg return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits); 2094 1.1.1.2 mrg } 2095 1.1 mrg break; 2096 1.1 mrg 2097 1.1 mrg case MEM_REF: 2098 1.1 mrg { 2099 1.1 mrg tree ptr = TREE_OPERAND (expr, 0); 2100 1.1 mrg tree offset = TREE_OPERAND (expr, 1); 2101 1.1.1.2 mrg const svalue *ptr_sval = get_rvalue (ptr, ctxt); 2102 1.1.1.2 mrg const svalue *offset_sval = get_rvalue (offset, ctxt); 2103 1.1.1.2 mrg const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt); 2104 1.1.1.2 mrg return m_mgr->get_offset_region (star_ptr, 2105 1.1.1.2 mrg TREE_TYPE (expr), 2106 1.1.1.2 mrg offset_sval); 2107 1.1 mrg } 2108 1.1 mrg break; 2109 1.1 mrg 2110 1.1.1.2 mrg case FUNCTION_DECL: 2111 1.1.1.2 mrg return m_mgr->get_region_for_fndecl (expr); 2112 1.1.1.2 mrg 2113 1.1.1.2 mrg case LABEL_DECL: 2114 1.1.1.2 mrg return m_mgr->get_region_for_label (expr); 2115 1.1.1.2 mrg 2116 1.1 mrg case VAR_DECL: 2117 1.1 mrg /* Handle globals. */ 2118 1.1 mrg if (is_global_var (expr)) 2119 1.1.1.2 mrg return m_mgr->get_region_for_global (expr); 2120 1.1 mrg 2121 1.1 mrg /* Fall through. */ 2122 1.1 mrg 2123 1.1 mrg case SSA_NAME: 2124 1.1 mrg case PARM_DECL: 2125 1.1 mrg case RESULT_DECL: 2126 1.1 mrg { 2127 1.1 mrg gcc_assert (TREE_CODE (expr) == SSA_NAME 2128 1.1 mrg || TREE_CODE (expr) == PARM_DECL 2129 1.1 mrg || TREE_CODE (expr) == VAR_DECL 2130 1.1 mrg || TREE_CODE (expr) == RESULT_DECL); 2131 1.1 mrg 2132 1.1.1.2 mrg int stack_index = pv.m_stack_depth; 2133 1.1.1.2 mrg const frame_region *frame = get_frame_at_index (stack_index); 2134 1.1 mrg gcc_assert (frame); 2135 1.1.1.2 mrg return frame->get_region_for_local (m_mgr, expr, ctxt); 2136 1.1 mrg } 2137 1.1 mrg 2138 1.1 mrg case COMPONENT_REF: 2139 1.1 mrg { 2140 1.1 mrg /* obj.field */ 2141 1.1 mrg tree obj = TREE_OPERAND (expr, 0); 2142 1.1 mrg tree field = TREE_OPERAND (expr, 1); 2143 1.1.1.2 mrg const region *obj_reg = get_lvalue (obj, ctxt); 2144 1.1.1.2 mrg return m_mgr->get_field_region (obj_reg, field); 2145 1.1 mrg } 2146 1.1 mrg break; 2147 1.1 mrg 2148 1.1 mrg case STRING_CST: 2149 1.1.1.2 mrg return m_mgr->get_region_for_string (expr); 2150 1.1 mrg } 2151 1.1 mrg } 2152 1.1 mrg 2153 1.1 mrg /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */ 2154 1.1 mrg 2155 1.1 mrg static void 2156 1.1 mrg assert_compat_types (tree src_type, tree dst_type) 2157 1.1 mrg { 2158 1.1 mrg if (src_type && dst_type && !VOID_TYPE_P (dst_type)) 2159 1.1.1.2 mrg { 2160 1.1.1.2 mrg #if CHECKING_P 2161 1.1.1.2 mrg if (!(useless_type_conversion_p (src_type, dst_type))) 2162 1.1.1.2 mrg internal_error ("incompatible types: %qT and %qT", src_type, dst_type); 2163 1.1.1.2 mrg #endif 2164 1.1.1.2 mrg } 2165 1.1.1.2 mrg } 2166 1.1.1.2 mrg 2167 1.1.1.2 mrg /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */ 2168 1.1.1.2 mrg 2169 1.1.1.2 mrg bool 2170 1.1.1.2 mrg compat_types_p (tree src_type, tree dst_type) 2171 1.1.1.2 mrg { 2172 1.1.1.2 mrg if (src_type && dst_type && !VOID_TYPE_P (dst_type)) 2173 1.1.1.2 mrg if (!(useless_type_conversion_p (src_type, dst_type))) 2174 1.1.1.2 mrg return false; 2175 1.1.1.2 mrg return true; 2176 1.1 mrg } 2177 1.1 mrg 2178 1.1.1.2 mrg /* Get the region for PV within this region_model, 2179 1.1 mrg emitting any diagnostics to CTXT. */ 2180 1.1 mrg 2181 1.1.1.2 mrg const region * 2182 1.1.1.2 mrg region_model::get_lvalue (path_var pv, region_model_context *ctxt) const 2183 1.1 mrg { 2184 1.1 mrg if (pv.m_tree == NULL_TREE) 2185 1.1.1.2 mrg return NULL; 2186 1.1 mrg 2187 1.1.1.2 mrg const region *result_reg = get_lvalue_1 (pv, ctxt); 2188 1.1.1.2 mrg assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree)); 2189 1.1.1.2 mrg return result_reg; 2190 1.1 mrg } 2191 1.1 mrg 2192 1.1.1.2 mrg /* Get the region for EXPR within this region_model (assuming the most 2193 1.1 mrg recent stack frame if it's a local). */ 2194 1.1 mrg 2195 1.1.1.2 mrg const region * 2196 1.1.1.2 mrg region_model::get_lvalue (tree expr, region_model_context *ctxt) const 2197 1.1 mrg { 2198 1.1 mrg return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt); 2199 1.1 mrg } 2200 1.1 mrg 2201 1.1 mrg /* Implementation of region_model::get_rvalue; the latter adds type-checking. 2202 1.1 mrg 2203 1.1 mrg Get the value of PV within this region_model, 2204 1.1 mrg emitting any diagnostics to CTXT. */ 2205 1.1 mrg 2206 1.1.1.2 mrg const svalue * 2207 1.1.1.2 mrg region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const 2208 1.1 mrg { 2209 1.1 mrg gcc_assert (pv.m_tree); 2210 1.1 mrg 2211 1.1 mrg switch (TREE_CODE (pv.m_tree)) 2212 1.1 mrg { 2213 1.1 mrg default: 2214 1.1.1.2 mrg return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree)); 2215 1.1 mrg 2216 1.1 mrg case ADDR_EXPR: 2217 1.1 mrg { 2218 1.1 mrg /* "&EXPR". */ 2219 1.1 mrg tree expr = pv.m_tree; 2220 1.1 mrg tree op0 = TREE_OPERAND (expr, 0); 2221 1.1.1.2 mrg const region *expr_reg = get_lvalue (op0, ctxt); 2222 1.1.1.2 mrg return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg); 2223 1.1 mrg } 2224 1.1 mrg break; 2225 1.1 mrg 2226 1.1.1.2 mrg case BIT_FIELD_REF: 2227 1.1.1.2 mrg { 2228 1.1.1.2 mrg tree expr = pv.m_tree; 2229 1.1.1.2 mrg tree op0 = TREE_OPERAND (expr, 0); 2230 1.1.1.2 mrg const region *reg = get_lvalue (op0, ctxt); 2231 1.1.1.2 mrg tree num_bits = TREE_OPERAND (expr, 1); 2232 1.1.1.2 mrg tree first_bit_offset = TREE_OPERAND (expr, 2); 2233 1.1.1.2 mrg gcc_assert (TREE_CODE (num_bits) == INTEGER_CST); 2234 1.1.1.2 mrg gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST); 2235 1.1.1.2 mrg bit_range bits (TREE_INT_CST_LOW (first_bit_offset), 2236 1.1.1.2 mrg TREE_INT_CST_LOW (num_bits)); 2237 1.1.1.2 mrg return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt); 2238 1.1.1.2 mrg } 2239 1.1.1.2 mrg 2240 1.1.1.2 mrg case VAR_DECL: 2241 1.1.1.2 mrg if (DECL_HARD_REGISTER (pv.m_tree)) 2242 1.1.1.2 mrg { 2243 1.1.1.2 mrg /* If it has a hard register, it doesn't have a memory region 2244 1.1.1.2 mrg and can't be referred to as an lvalue. */ 2245 1.1.1.2 mrg return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree)); 2246 1.1.1.2 mrg } 2247 1.1.1.2 mrg /* Fall through. */ 2248 1.1.1.2 mrg case PARM_DECL: 2249 1.1.1.2 mrg case SSA_NAME: 2250 1.1.1.2 mrg case RESULT_DECL: 2251 1.1 mrg case ARRAY_REF: 2252 1.1 mrg { 2253 1.1.1.2 mrg const region *reg = get_lvalue (pv, ctxt); 2254 1.1.1.2 mrg return get_store_value (reg, ctxt); 2255 1.1 mrg } 2256 1.1 mrg 2257 1.1.1.2 mrg case REALPART_EXPR: 2258 1.1.1.2 mrg case IMAGPART_EXPR: 2259 1.1.1.2 mrg case VIEW_CONVERT_EXPR: 2260 1.1.1.2 mrg { 2261 1.1.1.2 mrg tree expr = pv.m_tree; 2262 1.1.1.2 mrg tree arg = TREE_OPERAND (expr, 0); 2263 1.1.1.2 mrg const svalue *arg_sval = get_rvalue (arg, ctxt); 2264 1.1.1.2 mrg const svalue *sval_unaryop 2265 1.1.1.2 mrg = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr), 2266 1.1.1.2 mrg arg_sval); 2267 1.1.1.2 mrg return sval_unaryop; 2268 1.1.1.2 mrg }; 2269 1.1.1.2 mrg 2270 1.1 mrg case INTEGER_CST: 2271 1.1 mrg case REAL_CST: 2272 1.1.1.2 mrg case COMPLEX_CST: 2273 1.1.1.2 mrg case VECTOR_CST: 2274 1.1 mrg case STRING_CST: 2275 1.1.1.2 mrg return m_mgr->get_or_create_constant_svalue (pv.m_tree); 2276 1.1.1.2 mrg 2277 1.1.1.2 mrg case POINTER_PLUS_EXPR: 2278 1.1.1.2 mrg { 2279 1.1.1.2 mrg tree expr = pv.m_tree; 2280 1.1.1.2 mrg tree ptr = TREE_OPERAND (expr, 0); 2281 1.1.1.2 mrg tree offset = TREE_OPERAND (expr, 1); 2282 1.1.1.2 mrg const svalue *ptr_sval = get_rvalue (ptr, ctxt); 2283 1.1.1.2 mrg const svalue *offset_sval = get_rvalue (offset, ctxt); 2284 1.1.1.2 mrg const svalue *sval_binop 2285 1.1.1.2 mrg = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR, 2286 1.1.1.2 mrg ptr_sval, offset_sval); 2287 1.1.1.2 mrg return sval_binop; 2288 1.1.1.2 mrg } 2289 1.1.1.2 mrg 2290 1.1.1.2 mrg /* Binary ops. */ 2291 1.1.1.2 mrg case PLUS_EXPR: 2292 1.1.1.2 mrg case MULT_EXPR: 2293 1.1.1.2 mrg { 2294 1.1.1.2 mrg tree expr = pv.m_tree; 2295 1.1.1.2 mrg tree arg0 = TREE_OPERAND (expr, 0); 2296 1.1.1.2 mrg tree arg1 = TREE_OPERAND (expr, 1); 2297 1.1.1.2 mrg const svalue *arg0_sval = get_rvalue (arg0, ctxt); 2298 1.1.1.2 mrg const svalue *arg1_sval = get_rvalue (arg1, ctxt); 2299 1.1.1.2 mrg const svalue *sval_binop 2300 1.1.1.2 mrg = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr), 2301 1.1.1.2 mrg arg0_sval, arg1_sval); 2302 1.1.1.2 mrg return sval_binop; 2303 1.1.1.2 mrg } 2304 1.1 mrg 2305 1.1 mrg case COMPONENT_REF: 2306 1.1 mrg case MEM_REF: 2307 1.1 mrg { 2308 1.1.1.2 mrg const region *ref_reg = get_lvalue (pv, ctxt); 2309 1.1.1.2 mrg return get_store_value (ref_reg, ctxt); 2310 1.1.1.2 mrg } 2311 1.1.1.2 mrg case OBJ_TYPE_REF: 2312 1.1.1.2 mrg { 2313 1.1.1.2 mrg tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree); 2314 1.1.1.2 mrg return get_rvalue (expr, ctxt); 2315 1.1 mrg } 2316 1.1 mrg } 2317 1.1 mrg } 2318 1.1 mrg 2319 1.1 mrg /* Get the value of PV within this region_model, 2320 1.1 mrg emitting any diagnostics to CTXT. */ 2321 1.1 mrg 2322 1.1.1.2 mrg const svalue * 2323 1.1.1.2 mrg region_model::get_rvalue (path_var pv, region_model_context *ctxt) const 2324 1.1 mrg { 2325 1.1 mrg if (pv.m_tree == NULL_TREE) 2326 1.1.1.2 mrg return NULL; 2327 1.1 mrg 2328 1.1.1.2 mrg const svalue *result_sval = get_rvalue_1 (pv, ctxt); 2329 1.1 mrg 2330 1.1.1.2 mrg assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree)); 2331 1.1.1.2 mrg 2332 1.1.1.2 mrg result_sval = check_for_poison (result_sval, pv.m_tree, ctxt); 2333 1.1.1.2 mrg 2334 1.1.1.2 mrg return result_sval; 2335 1.1 mrg } 2336 1.1 mrg 2337 1.1 mrg /* Get the value of EXPR within this region_model (assuming the most 2338 1.1 mrg recent stack frame if it's a local). */ 2339 1.1 mrg 2340 1.1.1.2 mrg const svalue * 2341 1.1.1.2 mrg region_model::get_rvalue (tree expr, region_model_context *ctxt) const 2342 1.1 mrg { 2343 1.1 mrg return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt); 2344 1.1 mrg } 2345 1.1 mrg 2346 1.1.1.2 mrg /* Return true if this model is on a path with "main" as the entrypoint 2347 1.1.1.2 mrg (as opposed to one in which we're merely analyzing a subset of the 2348 1.1.1.2 mrg path through the code). */ 2349 1.1 mrg 2350 1.1.1.2 mrg bool 2351 1.1.1.2 mrg region_model::called_from_main_p () const 2352 1.1 mrg { 2353 1.1.1.2 mrg if (!m_current_frame) 2354 1.1.1.2 mrg return false; 2355 1.1.1.2 mrg /* Determine if the oldest stack frame in this model is for "main". */ 2356 1.1.1.2 mrg const frame_region *frame0 = get_frame_at_index (0); 2357 1.1.1.2 mrg gcc_assert (frame0); 2358 1.1.1.2 mrg return id_equal (DECL_NAME (frame0->get_function ()->decl), "main"); 2359 1.1.1.2 mrg } 2360 1.1.1.2 mrg 2361 1.1.1.2 mrg /* Subroutine of region_model::get_store_value for when REG is (or is within) 2362 1.1.1.2 mrg a global variable that hasn't been touched since the start of this path 2363 1.1.1.2 mrg (or was implicitly touched due to a call to an unknown function). */ 2364 1.1.1.2 mrg 2365 1.1.1.2 mrg const svalue * 2366 1.1.1.2 mrg region_model::get_initial_value_for_global (const region *reg) const 2367 1.1.1.2 mrg { 2368 1.1.1.2 mrg /* Get the decl that REG is for (or is within). */ 2369 1.1.1.2 mrg const decl_region *base_reg 2370 1.1.1.2 mrg = reg->get_base_region ()->dyn_cast_decl_region (); 2371 1.1.1.2 mrg gcc_assert (base_reg); 2372 1.1.1.2 mrg tree decl = base_reg->get_decl (); 2373 1.1.1.2 mrg 2374 1.1.1.2 mrg /* Special-case: to avoid having to explicitly update all previously 2375 1.1.1.2 mrg untracked globals when calling an unknown fn, they implicitly have 2376 1.1.1.2 mrg an unknown value if an unknown call has occurred, unless this is 2377 1.1.1.2 mrg static to-this-TU and hasn't escaped. Globals that have escaped 2378 1.1.1.2 mrg are explicitly tracked, so we shouldn't hit this case for them. */ 2379 1.1.1.2 mrg if (m_store.called_unknown_fn_p () 2380 1.1.1.2 mrg && TREE_PUBLIC (decl) 2381 1.1.1.2 mrg && !TREE_READONLY (decl)) 2382 1.1.1.2 mrg return m_mgr->get_or_create_unknown_svalue (reg->get_type ()); 2383 1.1.1.2 mrg 2384 1.1.1.2 mrg /* If we are on a path from the entrypoint from "main" and we have a 2385 1.1.1.2 mrg global decl defined in this TU that hasn't been touched yet, then 2386 1.1.1.2 mrg the initial value of REG can be taken from the initialization value 2387 1.1.1.2 mrg of the decl. */ 2388 1.1.1.2 mrg if (called_from_main_p () || TREE_READONLY (decl)) 2389 1.1.1.2 mrg { 2390 1.1.1.2 mrg /* Attempt to get the initializer value for base_reg. */ 2391 1.1.1.2 mrg if (const svalue *base_reg_init 2392 1.1.1.2 mrg = base_reg->get_svalue_for_initializer (m_mgr)) 2393 1.1.1.2 mrg { 2394 1.1.1.2 mrg if (reg == base_reg) 2395 1.1.1.2 mrg return base_reg_init; 2396 1.1.1.2 mrg else 2397 1.1.1.2 mrg { 2398 1.1.1.2 mrg /* Get the value for REG within base_reg_init. */ 2399 1.1.1.2 mrg binding_cluster c (base_reg); 2400 1.1.1.2 mrg c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init); 2401 1.1.1.2 mrg const svalue *sval 2402 1.1.1.2 mrg = c.get_any_binding (m_mgr->get_store_manager (), reg); 2403 1.1.1.2 mrg if (sval) 2404 1.1.1.2 mrg { 2405 1.1.1.2 mrg if (reg->get_type ()) 2406 1.1.1.2 mrg sval = m_mgr->get_or_create_cast (reg->get_type (), 2407 1.1.1.2 mrg sval); 2408 1.1.1.2 mrg return sval; 2409 1.1.1.2 mrg } 2410 1.1.1.2 mrg } 2411 1.1.1.2 mrg } 2412 1.1.1.2 mrg } 2413 1.1 mrg 2414 1.1.1.2 mrg /* Otherwise, return INIT_VAL(REG). */ 2415 1.1.1.2 mrg return m_mgr->get_or_create_initial_value (reg); 2416 1.1 mrg } 2417 1.1 mrg 2418 1.1.1.2 mrg /* Get a value for REG, looking it up in the store, or otherwise falling 2419 1.1.1.2 mrg back to "initial" or "unknown" values. 2420 1.1.1.2 mrg Use CTXT to report any warnings associated with reading from REG. */ 2421 1.1 mrg 2422 1.1.1.2 mrg const svalue * 2423 1.1.1.2 mrg region_model::get_store_value (const region *reg, 2424 1.1.1.2 mrg region_model_context *ctxt) const 2425 1.1 mrg { 2426 1.1.1.2 mrg check_region_for_read (reg, ctxt); 2427 1.1 mrg 2428 1.1.1.2 mrg /* Special-case: handle var_decls in the constant pool. */ 2429 1.1.1.2 mrg if (const decl_region *decl_reg = reg->dyn_cast_decl_region ()) 2430 1.1.1.2 mrg if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr)) 2431 1.1.1.2 mrg return sval; 2432 1.1 mrg 2433 1.1.1.2 mrg const svalue *sval 2434 1.1.1.2 mrg = m_store.get_any_binding (m_mgr->get_store_manager (), reg); 2435 1.1.1.2 mrg if (sval) 2436 1.1.1.2 mrg { 2437 1.1.1.2 mrg if (reg->get_type ()) 2438 1.1.1.2 mrg sval = m_mgr->get_or_create_cast (reg->get_type (), sval); 2439 1.1.1.2 mrg return sval; 2440 1.1.1.2 mrg } 2441 1.1.1.2 mrg 2442 1.1.1.2 mrg /* Special-case: read at a constant index within a STRING_CST. */ 2443 1.1.1.2 mrg if (const offset_region *offset_reg = reg->dyn_cast_offset_region ()) 2444 1.1.1.2 mrg if (tree byte_offset_cst 2445 1.1.1.2 mrg = offset_reg->get_byte_offset ()->maybe_get_constant ()) 2446 1.1.1.2 mrg if (const string_region *str_reg 2447 1.1.1.2 mrg = reg->get_parent_region ()->dyn_cast_string_region ()) 2448 1.1.1.2 mrg { 2449 1.1.1.2 mrg tree string_cst = str_reg->get_string_cst (); 2450 1.1.1.2 mrg if (const svalue *char_sval 2451 1.1.1.2 mrg = m_mgr->maybe_get_char_from_string_cst (string_cst, 2452 1.1.1.2 mrg byte_offset_cst)) 2453 1.1.1.2 mrg return m_mgr->get_or_create_cast (reg->get_type (), char_sval); 2454 1.1.1.2 mrg } 2455 1.1.1.2 mrg 2456 1.1.1.2 mrg /* Special-case: read the initial char of a STRING_CST. */ 2457 1.1.1.2 mrg if (const cast_region *cast_reg = reg->dyn_cast_cast_region ()) 2458 1.1.1.2 mrg if (const string_region *str_reg 2459 1.1.1.2 mrg = cast_reg->get_original_region ()->dyn_cast_string_region ()) 2460 1.1.1.2 mrg { 2461 1.1.1.2 mrg tree string_cst = str_reg->get_string_cst (); 2462 1.1.1.2 mrg tree byte_offset_cst = build_int_cst (integer_type_node, 0); 2463 1.1.1.2 mrg if (const svalue *char_sval 2464 1.1.1.2 mrg = m_mgr->maybe_get_char_from_string_cst (string_cst, 2465 1.1.1.2 mrg byte_offset_cst)) 2466 1.1.1.2 mrg return m_mgr->get_or_create_cast (reg->get_type (), char_sval); 2467 1.1.1.2 mrg } 2468 1.1.1.2 mrg 2469 1.1.1.2 mrg /* Otherwise we implicitly have the initial value of the region 2470 1.1.1.2 mrg (if the cluster had been touched, binding_cluster::get_any_binding, 2471 1.1.1.2 mrg would have returned UNKNOWN, and we would already have returned 2472 1.1.1.2 mrg that above). */ 2473 1.1.1.2 mrg 2474 1.1.1.2 mrg /* Handle globals. */ 2475 1.1.1.2 mrg if (reg->get_base_region ()->get_parent_region ()->get_kind () 2476 1.1.1.2 mrg == RK_GLOBALS) 2477 1.1.1.2 mrg return get_initial_value_for_global (reg); 2478 1.1.1.2 mrg 2479 1.1.1.2 mrg return m_mgr->get_or_create_initial_value (reg); 2480 1.1.1.2 mrg } 2481 1.1.1.2 mrg 2482 1.1.1.2 mrg /* Return false if REG does not exist, true if it may do. 2483 1.1.1.2 mrg This is for detecting regions within the stack that don't exist anymore 2484 1.1.1.2 mrg after frames are popped. */ 2485 1.1 mrg 2486 1.1.1.2 mrg bool 2487 1.1.1.2 mrg region_model::region_exists_p (const region *reg) const 2488 1.1 mrg { 2489 1.1.1.2 mrg /* If within a stack frame, check that the stack frame is live. */ 2490 1.1.1.2 mrg if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ()) 2491 1.1.1.2 mrg { 2492 1.1.1.2 mrg /* Check that the current frame is the enclosing frame, or is called 2493 1.1.1.2 mrg by it. */ 2494 1.1.1.2 mrg for (const frame_region *iter_frame = get_current_frame (); iter_frame; 2495 1.1.1.2 mrg iter_frame = iter_frame->get_calling_frame ()) 2496 1.1.1.2 mrg if (iter_frame == enclosing_frame) 2497 1.1.1.2 mrg return true; 2498 1.1.1.2 mrg return false; 2499 1.1.1.2 mrg } 2500 1.1.1.2 mrg 2501 1.1.1.2 mrg return true; 2502 1.1 mrg } 2503 1.1 mrg 2504 1.1.1.2 mrg /* Get a region for referencing PTR_SVAL, creating a region if need be, and 2505 1.1.1.2 mrg potentially generating warnings via CTXT. 2506 1.1.1.2 mrg PTR_SVAL must be of pointer type. 2507 1.1.1.2 mrg PTR_TREE if non-NULL can be used when emitting diagnostics. */ 2508 1.1.1.2 mrg 2509 1.1.1.2 mrg const region * 2510 1.1.1.2 mrg region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree, 2511 1.1.1.2 mrg region_model_context *ctxt) const 2512 1.1.1.2 mrg { 2513 1.1.1.2 mrg gcc_assert (ptr_sval); 2514 1.1.1.2 mrg gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ())); 2515 1.1.1.2 mrg 2516 1.1.1.2 mrg /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this 2517 1.1.1.2 mrg as a constraint. This suppresses false positives from 2518 1.1.1.2 mrg -Wanalyzer-null-dereference for the case where we later have an 2519 1.1.1.2 mrg if (PTR_SVAL) that would occur if we considered the false branch 2520 1.1.1.2 mrg and transitioned the malloc state machine from start->null. */ 2521 1.1.1.2 mrg tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0); 2522 1.1.1.2 mrg const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst); 2523 1.1.1.2 mrg m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr); 2524 1.1 mrg 2525 1.1.1.2 mrg switch (ptr_sval->get_kind ()) 2526 1.1.1.2 mrg { 2527 1.1.1.2 mrg default: 2528 1.1.1.2 mrg break; 2529 1.1.1.2 mrg 2530 1.1.1.2 mrg case SK_REGION: 2531 1.1.1.2 mrg { 2532 1.1.1.2 mrg const region_svalue *region_sval 2533 1.1.1.2 mrg = as_a <const region_svalue *> (ptr_sval); 2534 1.1.1.2 mrg return region_sval->get_pointee (); 2535 1.1.1.2 mrg } 2536 1.1.1.2 mrg 2537 1.1.1.2 mrg case SK_BINOP: 2538 1.1.1.2 mrg { 2539 1.1.1.2 mrg const binop_svalue *binop_sval 2540 1.1.1.2 mrg = as_a <const binop_svalue *> (ptr_sval); 2541 1.1.1.2 mrg switch (binop_sval->get_op ()) 2542 1.1.1.2 mrg { 2543 1.1.1.2 mrg case POINTER_PLUS_EXPR: 2544 1.1.1.2 mrg { 2545 1.1.1.2 mrg /* If we have a symbolic value expressing pointer arithmentic, 2546 1.1.1.2 mrg try to convert it to a suitable region. */ 2547 1.1.1.2 mrg const region *parent_region 2548 1.1.1.2 mrg = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt); 2549 1.1.1.2 mrg const svalue *offset = binop_sval->get_arg1 (); 2550 1.1.1.2 mrg tree type= TREE_TYPE (ptr_sval->get_type ()); 2551 1.1.1.2 mrg return m_mgr->get_offset_region (parent_region, type, offset); 2552 1.1.1.2 mrg } 2553 1.1.1.2 mrg default: 2554 1.1.1.2 mrg break; 2555 1.1.1.2 mrg } 2556 1.1.1.2 mrg } 2557 1.1.1.2 mrg break; 2558 1.1 mrg 2559 1.1.1.2 mrg case SK_POISONED: 2560 1.1.1.2 mrg { 2561 1.1.1.2 mrg if (ctxt) 2562 1.1.1.2 mrg { 2563 1.1.1.2 mrg tree ptr = get_representative_tree (ptr_sval); 2564 1.1.1.2 mrg /* If we can't get a representative tree for PTR_SVAL 2565 1.1.1.2 mrg (e.g. if it hasn't been bound into the store), then 2566 1.1.1.2 mrg fall back on PTR_TREE, if non-NULL. */ 2567 1.1.1.2 mrg if (!ptr) 2568 1.1.1.2 mrg ptr = ptr_tree; 2569 1.1.1.2 mrg if (ptr) 2570 1.1.1.2 mrg { 2571 1.1.1.2 mrg const poisoned_svalue *poisoned_sval 2572 1.1.1.2 mrg = as_a <const poisoned_svalue *> (ptr_sval); 2573 1.1.1.2 mrg enum poison_kind pkind = poisoned_sval->get_poison_kind (); 2574 1.1.1.2 mrg ctxt->warn (new poisoned_value_diagnostic (ptr, pkind, NULL)); 2575 1.1.1.2 mrg } 2576 1.1.1.2 mrg } 2577 1.1.1.2 mrg } 2578 1.1.1.2 mrg break; 2579 1.1.1.2 mrg } 2580 1.1 mrg 2581 1.1.1.2 mrg return m_mgr->get_symbolic_region (ptr_sval); 2582 1.1 mrg } 2583 1.1 mrg 2584 1.1.1.2 mrg /* Attempt to get BITS within any value of REG, as TYPE. 2585 1.1.1.2 mrg In particular, extract values from compound_svalues for the case 2586 1.1.1.2 mrg where there's a concrete binding at BITS. 2587 1.1.1.2 mrg Return an unknown svalue if we can't handle the given case. 2588 1.1.1.2 mrg Use CTXT to report any warnings associated with reading from REG. */ 2589 1.1 mrg 2590 1.1.1.2 mrg const svalue * 2591 1.1.1.2 mrg region_model::get_rvalue_for_bits (tree type, 2592 1.1.1.2 mrg const region *reg, 2593 1.1.1.2 mrg const bit_range &bits, 2594 1.1.1.2 mrg region_model_context *ctxt) const 2595 1.1 mrg { 2596 1.1.1.2 mrg const svalue *sval = get_store_value (reg, ctxt); 2597 1.1.1.2 mrg return m_mgr->get_or_create_bits_within (type, bits, sval); 2598 1.1 mrg } 2599 1.1 mrg 2600 1.1.1.2 mrg /* A subclass of pending_diagnostic for complaining about writes to 2601 1.1.1.2 mrg constant regions of memory. */ 2602 1.1 mrg 2603 1.1.1.2 mrg class write_to_const_diagnostic 2604 1.1.1.2 mrg : public pending_diagnostic_subclass<write_to_const_diagnostic> 2605 1.1 mrg { 2606 1.1.1.2 mrg public: 2607 1.1.1.2 mrg write_to_const_diagnostic (const region *reg, tree decl) 2608 1.1.1.2 mrg : m_reg (reg), m_decl (decl) 2609 1.1.1.2 mrg {} 2610 1.1 mrg 2611 1.1.1.2 mrg const char *get_kind () const FINAL OVERRIDE 2612 1.1.1.2 mrg { 2613 1.1.1.2 mrg return "write_to_const_diagnostic"; 2614 1.1.1.2 mrg } 2615 1.1 mrg 2616 1.1.1.2 mrg bool operator== (const write_to_const_diagnostic &other) const 2617 1.1.1.2 mrg { 2618 1.1.1.2 mrg return (m_reg == other.m_reg 2619 1.1.1.2 mrg && m_decl == other.m_decl); 2620 1.1.1.2 mrg } 2621 1.1.1.2 mrg 2622 1.1.1.2 mrg int get_controlling_option () const FINAL OVERRIDE 2623 1.1.1.2 mrg { 2624 1.1.1.2 mrg return OPT_Wanalyzer_write_to_const; 2625 1.1.1.2 mrg } 2626 1.1.1.2 mrg 2627 1.1.1.2 mrg bool emit (rich_location *rich_loc) FINAL OVERRIDE 2628 1.1.1.2 mrg { 2629 1.1.1.2 mrg auto_diagnostic_group d; 2630 1.1.1.2 mrg bool warned; 2631 1.1.1.2 mrg switch (m_reg->get_kind ()) 2632 1.1.1.2 mrg { 2633 1.1.1.2 mrg default: 2634 1.1.1.2 mrg warned = warning_at (rich_loc, get_controlling_option (), 2635 1.1.1.2 mrg "write to %<const%> object %qE", m_decl); 2636 1.1.1.2 mrg break; 2637 1.1.1.2 mrg case RK_FUNCTION: 2638 1.1.1.2 mrg warned = warning_at (rich_loc, get_controlling_option (), 2639 1.1.1.2 mrg "write to function %qE", m_decl); 2640 1.1.1.2 mrg break; 2641 1.1.1.2 mrg case RK_LABEL: 2642 1.1.1.2 mrg warned = warning_at (rich_loc, get_controlling_option (), 2643 1.1.1.2 mrg "write to label %qE", m_decl); 2644 1.1.1.2 mrg break; 2645 1.1.1.2 mrg } 2646 1.1.1.2 mrg if (warned) 2647 1.1.1.2 mrg inform (DECL_SOURCE_LOCATION (m_decl), "declared here"); 2648 1.1.1.2 mrg return warned; 2649 1.1.1.2 mrg } 2650 1.1.1.2 mrg 2651 1.1.1.2 mrg label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE 2652 1.1.1.2 mrg { 2653 1.1.1.2 mrg switch (m_reg->get_kind ()) 2654 1.1.1.2 mrg { 2655 1.1.1.2 mrg default: 2656 1.1.1.2 mrg return ev.formatted_print ("write to %<const%> object %qE here", m_decl); 2657 1.1.1.2 mrg case RK_FUNCTION: 2658 1.1.1.2 mrg return ev.formatted_print ("write to function %qE here", m_decl); 2659 1.1.1.2 mrg case RK_LABEL: 2660 1.1.1.2 mrg return ev.formatted_print ("write to label %qE here", m_decl); 2661 1.1.1.2 mrg } 2662 1.1.1.2 mrg } 2663 1.1 mrg 2664 1.1.1.2 mrg private: 2665 1.1.1.2 mrg const region *m_reg; 2666 1.1.1.2 mrg tree m_decl; 2667 1.1.1.2 mrg }; 2668 1.1 mrg 2669 1.1.1.2 mrg /* A subclass of pending_diagnostic for complaining about writes to 2670 1.1.1.2 mrg string literals. */ 2671 1.1 mrg 2672 1.1.1.2 mrg class write_to_string_literal_diagnostic 2673 1.1.1.2 mrg : public pending_diagnostic_subclass<write_to_string_literal_diagnostic> 2674 1.1 mrg { 2675 1.1.1.2 mrg public: 2676 1.1.1.2 mrg write_to_string_literal_diagnostic (const region *reg) 2677 1.1.1.2 mrg : m_reg (reg) 2678 1.1.1.2 mrg {} 2679 1.1 mrg 2680 1.1.1.2 mrg const char *get_kind () const FINAL OVERRIDE 2681 1.1.1.2 mrg { 2682 1.1.1.2 mrg return "write_to_string_literal_diagnostic"; 2683 1.1.1.2 mrg } 2684 1.1 mrg 2685 1.1.1.2 mrg bool operator== (const write_to_string_literal_diagnostic &other) const 2686 1.1.1.2 mrg { 2687 1.1.1.2 mrg return m_reg == other.m_reg; 2688 1.1.1.2 mrg } 2689 1.1 mrg 2690 1.1.1.2 mrg int get_controlling_option () const FINAL OVERRIDE 2691 1.1.1.2 mrg { 2692 1.1.1.2 mrg return OPT_Wanalyzer_write_to_string_literal; 2693 1.1.1.2 mrg } 2694 1.1 mrg 2695 1.1.1.2 mrg bool emit (rich_location *rich_loc) FINAL OVERRIDE 2696 1.1.1.2 mrg { 2697 1.1.1.2 mrg return warning_at (rich_loc, get_controlling_option (), 2698 1.1.1.2 mrg "write to string literal"); 2699 1.1.1.2 mrg /* Ideally we would show the location of the STRING_CST as well, 2700 1.1.1.2 mrg but it is not available at this point. */ 2701 1.1.1.2 mrg } 2702 1.1.1.2 mrg 2703 1.1.1.2 mrg label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE 2704 1.1.1.2 mrg { 2705 1.1.1.2 mrg return ev.formatted_print ("write to string literal here"); 2706 1.1.1.2 mrg } 2707 1.1.1.2 mrg 2708 1.1.1.2 mrg private: 2709 1.1.1.2 mrg const region *m_reg; 2710 1.1.1.2 mrg }; 2711 1.1.1.2 mrg 2712 1.1.1.2 mrg /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */ 2713 1.1.1.2 mrg 2714 1.1.1.2 mrg void 2715 1.1.1.2 mrg region_model::check_for_writable_region (const region* dest_reg, 2716 1.1.1.2 mrg region_model_context *ctxt) const 2717 1.1.1.2 mrg { 2718 1.1.1.2 mrg /* Fail gracefully if CTXT is NULL. */ 2719 1.1.1.2 mrg if (!ctxt) 2720 1.1.1.2 mrg return; 2721 1.1.1.2 mrg 2722 1.1.1.2 mrg const region *base_reg = dest_reg->get_base_region (); 2723 1.1.1.2 mrg switch (base_reg->get_kind ()) 2724 1.1.1.2 mrg { 2725 1.1.1.2 mrg default: 2726 1.1.1.2 mrg break; 2727 1.1.1.2 mrg case RK_FUNCTION: 2728 1.1.1.2 mrg { 2729 1.1.1.2 mrg const function_region *func_reg = as_a <const function_region *> (base_reg); 2730 1.1.1.2 mrg tree fndecl = func_reg->get_fndecl (); 2731 1.1.1.2 mrg ctxt->warn (new write_to_const_diagnostic (func_reg, fndecl)); 2732 1.1.1.2 mrg } 2733 1.1.1.2 mrg break; 2734 1.1.1.2 mrg case RK_LABEL: 2735 1.1.1.2 mrg { 2736 1.1.1.2 mrg const label_region *label_reg = as_a <const label_region *> (base_reg); 2737 1.1.1.2 mrg tree label = label_reg->get_label (); 2738 1.1.1.2 mrg ctxt->warn (new write_to_const_diagnostic (label_reg, label)); 2739 1.1.1.2 mrg } 2740 1.1.1.2 mrg break; 2741 1.1.1.2 mrg case RK_DECL: 2742 1.1.1.2 mrg { 2743 1.1.1.2 mrg const decl_region *decl_reg = as_a <const decl_region *> (base_reg); 2744 1.1.1.2 mrg tree decl = decl_reg->get_decl (); 2745 1.1.1.2 mrg /* Warn about writes to const globals. 2746 1.1.1.2 mrg Don't warn for writes to const locals, and params in particular, 2747 1.1.1.2 mrg since we would warn in push_frame when setting them up (e.g the 2748 1.1.1.2 mrg "this" param is "T* const"). */ 2749 1.1.1.2 mrg if (TREE_READONLY (decl) 2750 1.1.1.2 mrg && is_global_var (decl)) 2751 1.1.1.2 mrg ctxt->warn (new write_to_const_diagnostic (dest_reg, decl)); 2752 1.1.1.2 mrg } 2753 1.1.1.2 mrg break; 2754 1.1.1.2 mrg case RK_STRING: 2755 1.1.1.2 mrg ctxt->warn (new write_to_string_literal_diagnostic (dest_reg)); 2756 1.1.1.2 mrg break; 2757 1.1 mrg } 2758 1.1.1.2 mrg } 2759 1.1.1.2 mrg 2760 1.1.1.2 mrg /* Get the capacity of REG in bytes. */ 2761 1.1 mrg 2762 1.1.1.2 mrg const svalue * 2763 1.1.1.2 mrg region_model::get_capacity (const region *reg) const 2764 1.1.1.2 mrg { 2765 1.1.1.2 mrg switch (reg->get_kind ()) 2766 1.1 mrg { 2767 1.1.1.2 mrg default: 2768 1.1.1.2 mrg break; 2769 1.1.1.2 mrg case RK_DECL: 2770 1.1.1.2 mrg { 2771 1.1.1.2 mrg const decl_region *decl_reg = as_a <const decl_region *> (reg); 2772 1.1.1.2 mrg tree decl = decl_reg->get_decl (); 2773 1.1.1.2 mrg if (TREE_CODE (decl) == SSA_NAME) 2774 1.1.1.2 mrg { 2775 1.1.1.2 mrg tree type = TREE_TYPE (decl); 2776 1.1.1.2 mrg tree size = TYPE_SIZE (type); 2777 1.1.1.2 mrg return get_rvalue (size, NULL); 2778 1.1.1.2 mrg } 2779 1.1.1.2 mrg else 2780 1.1.1.2 mrg { 2781 1.1.1.2 mrg tree size = decl_init_size (decl, false); 2782 1.1.1.2 mrg if (size) 2783 1.1.1.2 mrg return get_rvalue (size, NULL); 2784 1.1.1.2 mrg } 2785 1.1.1.2 mrg } 2786 1.1.1.2 mrg break; 2787 1.1.1.2 mrg case RK_SIZED: 2788 1.1.1.2 mrg /* Look through sized regions to get at the capacity 2789 1.1.1.2 mrg of the underlying regions. */ 2790 1.1.1.2 mrg return get_capacity (reg->get_parent_region ()); 2791 1.1 mrg } 2792 1.1 mrg 2793 1.1.1.2 mrg if (const svalue *recorded = get_dynamic_extents (reg)) 2794 1.1.1.2 mrg return recorded; 2795 1.1.1.2 mrg 2796 1.1.1.2 mrg return m_mgr->get_or_create_unknown_svalue (sizetype); 2797 1.1 mrg } 2798 1.1 mrg 2799 1.1.1.2 mrg /* If CTXT is non-NULL, use it to warn about any problems accessing REG, 2800 1.1.1.2 mrg using DIR to determine if this access is a read or write. */ 2801 1.1.1.2 mrg 2802 1.1.1.2 mrg void 2803 1.1.1.2 mrg region_model::check_region_access (const region *reg, 2804 1.1.1.2 mrg enum access_direction dir, 2805 1.1.1.2 mrg region_model_context *ctxt) const 2806 1.1.1.2 mrg { 2807 1.1.1.2 mrg /* Fail gracefully if CTXT is NULL. */ 2808 1.1.1.2 mrg if (!ctxt) 2809 1.1.1.2 mrg return; 2810 1.1.1.2 mrg 2811 1.1.1.2 mrg check_region_for_taint (reg, dir, ctxt); 2812 1.1.1.2 mrg 2813 1.1.1.2 mrg switch (dir) 2814 1.1.1.2 mrg { 2815 1.1.1.2 mrg default: 2816 1.1.1.2 mrg gcc_unreachable (); 2817 1.1.1.2 mrg case DIR_READ: 2818 1.1.1.2 mrg /* Currently a no-op. */ 2819 1.1.1.2 mrg break; 2820 1.1.1.2 mrg case DIR_WRITE: 2821 1.1.1.2 mrg check_for_writable_region (reg, ctxt); 2822 1.1.1.2 mrg break; 2823 1.1.1.2 mrg } 2824 1.1.1.2 mrg } 2825 1.1 mrg 2826 1.1.1.2 mrg /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */ 2827 1.1 mrg 2828 1.1.1.2 mrg void 2829 1.1.1.2 mrg region_model::check_region_for_write (const region *dest_reg, 2830 1.1.1.2 mrg region_model_context *ctxt) const 2831 1.1 mrg { 2832 1.1.1.2 mrg check_region_access (dest_reg, DIR_WRITE, ctxt); 2833 1.1 mrg } 2834 1.1 mrg 2835 1.1.1.2 mrg /* If CTXT is non-NULL, use it to warn about any problems reading from REG. */ 2836 1.1 mrg 2837 1.1.1.2 mrg void 2838 1.1.1.2 mrg region_model::check_region_for_read (const region *src_reg, 2839 1.1.1.2 mrg region_model_context *ctxt) const 2840 1.1.1.2 mrg { 2841 1.1.1.2 mrg check_region_access (src_reg, DIR_READ, ctxt); 2842 1.1 mrg } 2843 1.1 mrg 2844 1.1.1.2 mrg /* Set the value of the region given by LHS_REG to the value given 2845 1.1.1.2 mrg by RHS_SVAL. 2846 1.1.1.2 mrg Use CTXT to report any warnings associated with writing to LHS_REG. */ 2847 1.1 mrg 2848 1.1.1.2 mrg void 2849 1.1.1.2 mrg region_model::set_value (const region *lhs_reg, const svalue *rhs_sval, 2850 1.1.1.2 mrg region_model_context *ctxt) 2851 1.1 mrg { 2852 1.1.1.2 mrg gcc_assert (lhs_reg); 2853 1.1.1.2 mrg gcc_assert (rhs_sval); 2854 1.1 mrg 2855 1.1.1.2 mrg check_region_for_write (lhs_reg, ctxt); 2856 1.1 mrg 2857 1.1.1.2 mrg m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval, 2858 1.1.1.2 mrg ctxt ? ctxt->get_uncertainty () : NULL); 2859 1.1.1.2 mrg } 2860 1.1 mrg 2861 1.1.1.2 mrg /* Set the value of the region given by LHS to the value given by RHS. */ 2862 1.1 mrg 2863 1.1.1.2 mrg void 2864 1.1.1.2 mrg region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt) 2865 1.1.1.2 mrg { 2866 1.1.1.2 mrg const region *lhs_reg = get_lvalue (lhs, ctxt); 2867 1.1.1.2 mrg const svalue *rhs_sval = get_rvalue (rhs, ctxt); 2868 1.1.1.2 mrg gcc_assert (lhs_reg); 2869 1.1.1.2 mrg gcc_assert (rhs_sval); 2870 1.1.1.2 mrg set_value (lhs_reg, rhs_sval, ctxt); 2871 1.1.1.2 mrg } 2872 1.1 mrg 2873 1.1.1.2 mrg /* Remove all bindings overlapping REG within the store. */ 2874 1.1 mrg 2875 1.1.1.2 mrg void 2876 1.1.1.2 mrg region_model::clobber_region (const region *reg) 2877 1.1.1.2 mrg { 2878 1.1.1.2 mrg m_store.clobber_region (m_mgr->get_store_manager(), reg); 2879 1.1.1.2 mrg } 2880 1.1 mrg 2881 1.1.1.2 mrg /* Remove any bindings for REG within the store. */ 2882 1.1 mrg 2883 1.1.1.2 mrg void 2884 1.1.1.2 mrg region_model::purge_region (const region *reg) 2885 1.1.1.2 mrg { 2886 1.1.1.2 mrg m_store.purge_region (m_mgr->get_store_manager(), reg); 2887 1.1 mrg } 2888 1.1 mrg 2889 1.1.1.2 mrg /* Fill REG with SVAL. */ 2890 1.1 mrg 2891 1.1.1.2 mrg void 2892 1.1.1.2 mrg region_model::fill_region (const region *reg, const svalue *sval) 2893 1.1 mrg { 2894 1.1.1.2 mrg m_store.fill_region (m_mgr->get_store_manager(), reg, sval); 2895 1.1 mrg } 2896 1.1 mrg 2897 1.1.1.2 mrg /* Zero-fill REG. */ 2898 1.1 mrg 2899 1.1 mrg void 2900 1.1.1.2 mrg region_model::zero_fill_region (const region *reg) 2901 1.1 mrg { 2902 1.1.1.2 mrg m_store.zero_fill_region (m_mgr->get_store_manager(), reg); 2903 1.1 mrg } 2904 1.1 mrg 2905 1.1.1.2 mrg /* Mark REG as having unknown content. */ 2906 1.1 mrg 2907 1.1 mrg void 2908 1.1.1.2 mrg region_model::mark_region_as_unknown (const region *reg, 2909 1.1.1.2 mrg uncertainty_t *uncertainty) 2910 1.1 mrg { 2911 1.1.1.2 mrg m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg, 2912 1.1.1.2 mrg uncertainty); 2913 1.1 mrg } 2914 1.1 mrg 2915 1.1.1.2 mrg /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within 2916 1.1 mrg this model. */ 2917 1.1 mrg 2918 1.1 mrg tristate 2919 1.1.1.2 mrg region_model::eval_condition (const svalue *lhs, 2920 1.1.1.2 mrg enum tree_code op, 2921 1.1.1.2 mrg const svalue *rhs) const 2922 1.1 mrg { 2923 1.1 mrg /* For now, make no attempt to capture constraints on floating-point 2924 1.1 mrg values. */ 2925 1.1 mrg if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ())) 2926 1.1 mrg || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ()))) 2927 1.1 mrg return tristate::unknown (); 2928 1.1 mrg 2929 1.1.1.2 mrg tristate ts = eval_condition_without_cm (lhs, op, rhs); 2930 1.1 mrg if (ts.is_known ()) 2931 1.1 mrg return ts; 2932 1.1 mrg 2933 1.1 mrg /* Otherwise, try constraints. */ 2934 1.1.1.2 mrg return m_constraints->eval_condition (lhs, op, rhs); 2935 1.1 mrg } 2936 1.1 mrg 2937 1.1.1.2 mrg /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within 2938 1.1 mrg this model, without resorting to the constraint_manager. 2939 1.1 mrg 2940 1.1 mrg This is exposed so that impl_region_model_context::on_state_leak can 2941 1.1 mrg check for equality part-way through region_model::purge_unused_svalues 2942 1.1 mrg without risking creating new ECs. */ 2943 1.1 mrg 2944 1.1 mrg tristate 2945 1.1.1.2 mrg region_model::eval_condition_without_cm (const svalue *lhs, 2946 1.1.1.2 mrg enum tree_code op, 2947 1.1.1.2 mrg const svalue *rhs) const 2948 1.1 mrg { 2949 1.1 mrg gcc_assert (lhs); 2950 1.1 mrg gcc_assert (rhs); 2951 1.1 mrg 2952 1.1 mrg /* See what we know based on the values. */ 2953 1.1.1.2 mrg 2954 1.1.1.2 mrg /* For now, make no attempt to capture constraints on floating-point 2955 1.1.1.2 mrg values. */ 2956 1.1.1.2 mrg if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ())) 2957 1.1.1.2 mrg || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ()))) 2958 1.1.1.2 mrg return tristate::unknown (); 2959 1.1.1.2 mrg 2960 1.1.1.2 mrg /* Unwrap any unmergeable values. */ 2961 1.1.1.2 mrg lhs = lhs->unwrap_any_unmergeable (); 2962 1.1.1.2 mrg rhs = rhs->unwrap_any_unmergeable (); 2963 1.1.1.2 mrg 2964 1.1.1.2 mrg if (lhs == rhs) 2965 1.1.1.2 mrg { 2966 1.1.1.2 mrg /* If we have the same svalue, then we have equality 2967 1.1.1.2 mrg (apart from NaN-handling). 2968 1.1.1.2 mrg TODO: should this definitely be the case for poisoned values? */ 2969 1.1.1.2 mrg /* Poisoned and unknown values are "unknowable". */ 2970 1.1.1.2 mrg if (lhs->get_kind () == SK_POISONED 2971 1.1.1.2 mrg || lhs->get_kind () == SK_UNKNOWN) 2972 1.1.1.2 mrg return tristate::TS_UNKNOWN; 2973 1.1.1.2 mrg 2974 1.1.1.2 mrg switch (op) 2975 1.1.1.2 mrg { 2976 1.1.1.2 mrg case EQ_EXPR: 2977 1.1.1.2 mrg case GE_EXPR: 2978 1.1.1.2 mrg case LE_EXPR: 2979 1.1.1.2 mrg return tristate::TS_TRUE; 2980 1.1.1.2 mrg 2981 1.1.1.2 mrg case NE_EXPR: 2982 1.1.1.2 mrg case GT_EXPR: 2983 1.1.1.2 mrg case LT_EXPR: 2984 1.1.1.2 mrg return tristate::TS_FALSE; 2985 1.1.1.2 mrg 2986 1.1.1.2 mrg default: 2987 1.1.1.2 mrg /* For other ops, use the logic below. */ 2988 1.1.1.2 mrg break; 2989 1.1.1.2 mrg } 2990 1.1.1.2 mrg } 2991 1.1.1.2 mrg 2992 1.1.1.2 mrg /* If we have a pair of region_svalues, compare them. */ 2993 1.1.1.2 mrg if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ()) 2994 1.1.1.2 mrg if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ()) 2995 1.1.1.2 mrg { 2996 1.1.1.2 mrg tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr); 2997 1.1.1.2 mrg if (res.is_known ()) 2998 1.1.1.2 mrg return res; 2999 1.1.1.2 mrg /* Otherwise, only known through constraints. */ 3000 1.1.1.2 mrg } 3001 1.1.1.2 mrg 3002 1.1.1.2 mrg if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ()) 3003 1.1 mrg { 3004 1.1.1.2 mrg /* If we have a pair of constants, compare them. */ 3005 1.1.1.2 mrg if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ()) 3006 1.1.1.2 mrg return constant_svalue::eval_condition (cst_lhs, op, cst_rhs); 3007 1.1.1.2 mrg else 3008 1.1.1.2 mrg { 3009 1.1.1.2 mrg /* When we have one constant, put it on the RHS. */ 3010 1.1.1.2 mrg std::swap (lhs, rhs); 3011 1.1.1.2 mrg op = swap_tree_comparison (op); 3012 1.1 mrg } 3013 1.1.1.2 mrg } 3014 1.1.1.2 mrg gcc_assert (lhs->get_kind () != SK_CONSTANT); 3015 1.1.1.2 mrg 3016 1.1.1.2 mrg /* Handle comparison against zero. */ 3017 1.1.1.2 mrg if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ()) 3018 1.1.1.2 mrg if (zerop (cst_rhs->get_constant ())) 3019 1.1.1.2 mrg { 3020 1.1.1.2 mrg if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ()) 3021 1.1.1.2 mrg { 3022 1.1.1.2 mrg /* A region_svalue is a non-NULL pointer, except in certain 3023 1.1.1.2 mrg special cases (see the comment for region::non_null_p). */ 3024 1.1.1.2 mrg const region *pointee = ptr->get_pointee (); 3025 1.1.1.2 mrg if (pointee->non_null_p ()) 3026 1.1.1.2 mrg { 3027 1.1.1.2 mrg switch (op) 3028 1.1.1.2 mrg { 3029 1.1.1.2 mrg default: 3030 1.1.1.2 mrg gcc_unreachable (); 3031 1.1 mrg 3032 1.1.1.2 mrg case EQ_EXPR: 3033 1.1.1.2 mrg case GE_EXPR: 3034 1.1.1.2 mrg case LE_EXPR: 3035 1.1.1.2 mrg return tristate::TS_FALSE; 3036 1.1.1.2 mrg 3037 1.1.1.2 mrg case NE_EXPR: 3038 1.1.1.2 mrg case GT_EXPR: 3039 1.1.1.2 mrg case LT_EXPR: 3040 1.1.1.2 mrg return tristate::TS_TRUE; 3041 1.1.1.2 mrg } 3042 1.1.1.2 mrg } 3043 1.1.1.2 mrg } 3044 1.1.1.2 mrg else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ()) 3045 1.1.1.2 mrg { 3046 1.1.1.2 mrg /* Treat offsets from a non-NULL pointer as being non-NULL. This 3047 1.1.1.2 mrg isn't strictly true, in that eventually ptr++ will wrap 3048 1.1.1.2 mrg around and be NULL, but it won't occur in practise and thus 3049 1.1.1.2 mrg can be used to suppress effectively false positives that we 3050 1.1.1.2 mrg shouldn't warn for. */ 3051 1.1.1.2 mrg if (binop->get_op () == POINTER_PLUS_EXPR) 3052 1.1.1.2 mrg { 3053 1.1.1.2 mrg tristate lhs_ts 3054 1.1.1.2 mrg = eval_condition_without_cm (binop->get_arg0 (), 3055 1.1.1.2 mrg op, rhs); 3056 1.1.1.2 mrg if (lhs_ts.is_known ()) 3057 1.1.1.2 mrg return lhs_ts; 3058 1.1.1.2 mrg } 3059 1.1.1.2 mrg } 3060 1.1.1.2 mrg else if (const unaryop_svalue *unaryop 3061 1.1.1.2 mrg = lhs->dyn_cast_unaryop_svalue ()) 3062 1.1 mrg { 3063 1.1.1.2 mrg if (unaryop->get_op () == NEGATE_EXPR) 3064 1.1.1.2 mrg { 3065 1.1.1.2 mrg /* e.g. "-X <= 0" is equivalent to X >= 0". */ 3066 1.1.1.2 mrg tristate lhs_ts = eval_condition (unaryop->get_arg (), 3067 1.1.1.2 mrg swap_tree_comparison (op), 3068 1.1.1.2 mrg rhs); 3069 1.1.1.2 mrg if (lhs_ts.is_known ()) 3070 1.1.1.2 mrg return lhs_ts; 3071 1.1.1.2 mrg } 3072 1.1 mrg } 3073 1.1.1.2 mrg } 3074 1.1 mrg 3075 1.1.1.2 mrg /* Handle rejection of equality for comparisons of the initial values of 3076 1.1.1.2 mrg "external" values (such as params) with the address of locals. */ 3077 1.1.1.2 mrg if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ()) 3078 1.1.1.2 mrg if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ()) 3079 1.1.1.2 mrg { 3080 1.1.1.2 mrg tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr); 3081 1.1.1.2 mrg if (res.is_known ()) 3082 1.1.1.2 mrg return res; 3083 1.1.1.2 mrg } 3084 1.1.1.2 mrg if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ()) 3085 1.1.1.2 mrg if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ()) 3086 1.1.1.2 mrg { 3087 1.1.1.2 mrg tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr); 3088 1.1.1.2 mrg if (res.is_known ()) 3089 1.1.1.2 mrg return res; 3090 1.1.1.2 mrg } 3091 1.1.1.2 mrg 3092 1.1.1.2 mrg if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ()) 3093 1.1.1.2 mrg if (tree rhs_cst = rhs->maybe_get_constant ()) 3094 1.1.1.2 mrg { 3095 1.1.1.2 mrg tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst); 3096 1.1.1.2 mrg if (res.is_known ()) 3097 1.1.1.2 mrg return res; 3098 1.1.1.2 mrg } 3099 1.1.1.2 mrg 3100 1.1.1.2 mrg return tristate::TS_UNKNOWN; 3101 1.1.1.2 mrg } 3102 1.1.1.2 mrg 3103 1.1.1.2 mrg /* Subroutine of region_model::eval_condition_without_cm, for rejecting 3104 1.1.1.2 mrg equality of INIT_VAL(PARM) with &LOCAL. */ 3105 1.1.1.2 mrg 3106 1.1.1.2 mrg tristate 3107 1.1.1.2 mrg region_model::compare_initial_and_pointer (const initial_svalue *init, 3108 1.1.1.2 mrg const region_svalue *ptr) const 3109 1.1.1.2 mrg { 3110 1.1.1.2 mrg const region *pointee = ptr->get_pointee (); 3111 1.1.1.2 mrg 3112 1.1.1.2 mrg /* If we have a pointer to something within a stack frame, it can't be the 3113 1.1.1.2 mrg initial value of a param. */ 3114 1.1.1.2 mrg if (pointee->maybe_get_frame_region ()) 3115 1.1.1.2 mrg if (init->initial_value_of_param_p ()) 3116 1.1.1.2 mrg return tristate::TS_FALSE; 3117 1.1 mrg 3118 1.1 mrg return tristate::TS_UNKNOWN; 3119 1.1 mrg } 3120 1.1 mrg 3121 1.1.1.2 mrg /* Handle various constraints of the form: 3122 1.1.1.2 mrg LHS: ((bool)INNER_LHS INNER_OP INNER_RHS)) 3123 1.1.1.2 mrg OP : == or != 3124 1.1.1.2 mrg RHS: zero 3125 1.1.1.2 mrg and (with a cast): 3126 1.1.1.2 mrg LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS)) 3127 1.1.1.2 mrg OP : == or != 3128 1.1.1.2 mrg RHS: zero 3129 1.1.1.2 mrg by adding constraints for INNER_LHS INNEROP INNER_RHS. 3130 1.1.1.2 mrg 3131 1.1.1.2 mrg Return true if this function can fully handle the constraint; if 3132 1.1.1.2 mrg so, add the implied constraint(s) and write true to *OUT if they 3133 1.1.1.2 mrg are consistent with existing constraints, or write false to *OUT 3134 1.1.1.2 mrg if they contradicts existing constraints. 3135 1.1.1.2 mrg 3136 1.1.1.2 mrg Return false for cases that this function doeesn't know how to handle. 3137 1.1.1.2 mrg 3138 1.1.1.2 mrg For example, if we're checking a stored conditional, we'll have 3139 1.1.1.2 mrg something like: 3140 1.1.1.2 mrg LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)) 3141 1.1.1.2 mrg OP : NE_EXPR 3142 1.1.1.2 mrg RHS: zero 3143 1.1.1.2 mrg which this function can turn into an add_constraint of: 3144 1.1.1.2 mrg (&HEAP_ALLOCATED_REGION(8) != (int *)0B) 3145 1.1.1.2 mrg 3146 1.1.1.2 mrg Similarly, optimized && and || conditionals lead to e.g. 3147 1.1.1.2 mrg if (p && q) 3148 1.1.1.2 mrg becoming gimple like this: 3149 1.1.1.2 mrg _1 = p_6 == 0B; 3150 1.1.1.2 mrg _2 = q_8 == 0B 3151 1.1.1.2 mrg _3 = _1 | _2 3152 1.1.1.2 mrg On the "_3 is false" branch we can have constraints of the form: 3153 1.1.1.2 mrg ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B) 3154 1.1.1.2 mrg | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B)) 3155 1.1.1.2 mrg == 0 3156 1.1.1.2 mrg which implies that both _1 and _2 are false, 3157 1.1.1.2 mrg which this function can turn into a pair of add_constraints of 3158 1.1.1.2 mrg (&HEAP_ALLOCATED_REGION(8)!=(int *)0B) 3159 1.1.1.2 mrg and: 3160 1.1.1.2 mrg (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */ 3161 1.1.1.2 mrg 3162 1.1.1.2 mrg bool 3163 1.1.1.2 mrg region_model::add_constraints_from_binop (const svalue *outer_lhs, 3164 1.1.1.2 mrg enum tree_code outer_op, 3165 1.1.1.2 mrg const svalue *outer_rhs, 3166 1.1.1.2 mrg bool *out, 3167 1.1.1.2 mrg region_model_context *ctxt) 3168 1.1.1.2 mrg { 3169 1.1.1.2 mrg while (const svalue *cast = outer_lhs->maybe_undo_cast ()) 3170 1.1.1.2 mrg outer_lhs = cast; 3171 1.1.1.2 mrg const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue (); 3172 1.1.1.2 mrg if (!binop_sval) 3173 1.1.1.2 mrg return false; 3174 1.1.1.2 mrg if (!outer_rhs->all_zeroes_p ()) 3175 1.1.1.2 mrg return false; 3176 1.1.1.2 mrg 3177 1.1.1.2 mrg const svalue *inner_lhs = binop_sval->get_arg0 (); 3178 1.1.1.2 mrg enum tree_code inner_op = binop_sval->get_op (); 3179 1.1.1.2 mrg const svalue *inner_rhs = binop_sval->get_arg1 (); 3180 1.1.1.2 mrg 3181 1.1.1.2 mrg if (outer_op != NE_EXPR && outer_op != EQ_EXPR) 3182 1.1.1.2 mrg return false; 3183 1.1.1.2 mrg 3184 1.1.1.2 mrg /* We have either 3185 1.1.1.2 mrg - "OUTER_LHS != false" (i.e. OUTER is true), or 3186 1.1.1.2 mrg - "OUTER_LHS == false" (i.e. OUTER is false). */ 3187 1.1.1.2 mrg bool is_true = outer_op == NE_EXPR; 3188 1.1.1.2 mrg 3189 1.1.1.2 mrg switch (inner_op) 3190 1.1.1.2 mrg { 3191 1.1.1.2 mrg default: 3192 1.1.1.2 mrg return false; 3193 1.1.1.2 mrg 3194 1.1.1.2 mrg case EQ_EXPR: 3195 1.1.1.2 mrg case NE_EXPR: 3196 1.1.1.2 mrg { 3197 1.1.1.2 mrg /* ...and "(inner_lhs OP inner_rhs) == 0" 3198 1.1.1.2 mrg then (inner_lhs OP inner_rhs) must have the same 3199 1.1.1.2 mrg logical value as LHS. */ 3200 1.1.1.2 mrg if (!is_true) 3201 1.1.1.2 mrg inner_op = invert_tree_comparison (inner_op, false /* honor_nans */); 3202 1.1.1.2 mrg *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt); 3203 1.1.1.2 mrg return true; 3204 1.1.1.2 mrg } 3205 1.1.1.2 mrg break; 3206 1.1.1.2 mrg 3207 1.1.1.2 mrg case BIT_AND_EXPR: 3208 1.1.1.2 mrg if (is_true) 3209 1.1.1.2 mrg { 3210 1.1.1.2 mrg /* ...and "(inner_lhs & inner_rhs) != 0" 3211 1.1.1.2 mrg then both inner_lhs and inner_rhs must be true. */ 3212 1.1.1.2 mrg const svalue *false_sval 3213 1.1.1.2 mrg = m_mgr->get_or_create_constant_svalue (boolean_false_node); 3214 1.1.1.2 mrg bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt); 3215 1.1.1.2 mrg bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt); 3216 1.1.1.2 mrg *out = sat1 && sat2; 3217 1.1.1.2 mrg return true; 3218 1.1.1.2 mrg } 3219 1.1.1.2 mrg return false; 3220 1.1.1.2 mrg 3221 1.1.1.2 mrg case BIT_IOR_EXPR: 3222 1.1.1.2 mrg if (!is_true) 3223 1.1.1.2 mrg { 3224 1.1.1.2 mrg /* ...and "(inner_lhs | inner_rhs) == 0" 3225 1.1.1.2 mrg i.e. "(inner_lhs | inner_rhs)" is false 3226 1.1.1.2 mrg then both inner_lhs and inner_rhs must be false. */ 3227 1.1.1.2 mrg const svalue *false_sval 3228 1.1.1.2 mrg = m_mgr->get_or_create_constant_svalue (boolean_false_node); 3229 1.1.1.2 mrg bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt); 3230 1.1.1.2 mrg bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt); 3231 1.1.1.2 mrg *out = sat1 && sat2; 3232 1.1.1.2 mrg return true; 3233 1.1.1.2 mrg } 3234 1.1.1.2 mrg return false; 3235 1.1.1.2 mrg } 3236 1.1.1.2 mrg } 3237 1.1.1.2 mrg 3238 1.1 mrg /* Attempt to add the constraint "LHS OP RHS" to this region_model. 3239 1.1 mrg If it is consistent with existing constraints, add it, and return true. 3240 1.1 mrg Return false if it contradicts existing constraints. 3241 1.1 mrg Use CTXT for reporting any diagnostics associated with the accesses. */ 3242 1.1 mrg 3243 1.1 mrg bool 3244 1.1 mrg region_model::add_constraint (tree lhs, enum tree_code op, tree rhs, 3245 1.1 mrg region_model_context *ctxt) 3246 1.1 mrg { 3247 1.1 mrg /* For now, make no attempt to capture constraints on floating-point 3248 1.1 mrg values. */ 3249 1.1 mrg if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs))) 3250 1.1 mrg return true; 3251 1.1 mrg 3252 1.1.1.2 mrg const svalue *lhs_sval = get_rvalue (lhs, ctxt); 3253 1.1.1.2 mrg const svalue *rhs_sval = get_rvalue (rhs, ctxt); 3254 1.1.1.2 mrg 3255 1.1.1.2 mrg return add_constraint (lhs_sval, op, rhs_sval, ctxt); 3256 1.1.1.2 mrg } 3257 1.1.1.2 mrg 3258 1.1.1.2 mrg /* Attempt to add the constraint "LHS OP RHS" to this region_model. 3259 1.1.1.2 mrg If it is consistent with existing constraints, add it, and return true. 3260 1.1.1.2 mrg Return false if it contradicts existing constraints. 3261 1.1.1.2 mrg Use CTXT for reporting any diagnostics associated with the accesses. */ 3262 1.1 mrg 3263 1.1.1.2 mrg bool 3264 1.1.1.2 mrg region_model::add_constraint (const svalue *lhs, 3265 1.1.1.2 mrg enum tree_code op, 3266 1.1.1.2 mrg const svalue *rhs, 3267 1.1.1.2 mrg region_model_context *ctxt) 3268 1.1.1.2 mrg { 3269 1.1.1.2 mrg tristate t_cond = eval_condition (lhs, op, rhs); 3270 1.1 mrg 3271 1.1 mrg /* If we already have the condition, do nothing. */ 3272 1.1 mrg if (t_cond.is_true ()) 3273 1.1 mrg return true; 3274 1.1 mrg 3275 1.1 mrg /* Reject a constraint that would contradict existing knowledge, as 3276 1.1 mrg unsatisfiable. */ 3277 1.1 mrg if (t_cond.is_false ()) 3278 1.1 mrg return false; 3279 1.1 mrg 3280 1.1.1.2 mrg bool out; 3281 1.1.1.2 mrg if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt)) 3282 1.1.1.2 mrg return out; 3283 1.1 mrg 3284 1.1.1.2 mrg /* Attempt to store the constraint. */ 3285 1.1.1.2 mrg if (!m_constraints->add_constraint (lhs, op, rhs)) 3286 1.1.1.2 mrg return false; 3287 1.1 mrg 3288 1.1 mrg /* Notify the context, if any. This exists so that the state machines 3289 1.1 mrg in a program_state can be notified about the condition, and so can 3290 1.1 mrg set sm-state for e.g. unchecked->checked, both for cfg-edges, and 3291 1.1 mrg when synthesizing constraints as above. */ 3292 1.1 mrg if (ctxt) 3293 1.1 mrg ctxt->on_condition (lhs, op, rhs); 3294 1.1 mrg 3295 1.1.1.2 mrg /* If we have ®ION == NULL, then drop dynamic extents for REGION (for 3296 1.1.1.2 mrg the case where REGION is heap-allocated and thus could be NULL). */ 3297 1.1.1.2 mrg if (tree rhs_cst = rhs->maybe_get_constant ()) 3298 1.1.1.2 mrg if (op == EQ_EXPR && zerop (rhs_cst)) 3299 1.1.1.2 mrg if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ()) 3300 1.1.1.2 mrg unset_dynamic_extents (region_sval->get_pointee ()); 3301 1.1 mrg 3302 1.1.1.2 mrg return true; 3303 1.1 mrg } 3304 1.1 mrg 3305 1.1.1.2 mrg /* As above, but when returning false, if OUT is non-NULL, write a 3306 1.1.1.2 mrg new rejected_constraint to *OUT. */ 3307 1.1 mrg 3308 1.1.1.2 mrg bool 3309 1.1.1.2 mrg region_model::add_constraint (tree lhs, enum tree_code op, tree rhs, 3310 1.1.1.2 mrg region_model_context *ctxt, 3311 1.1.1.2 mrg rejected_constraint **out) 3312 1.1 mrg { 3313 1.1.1.2 mrg bool sat = add_constraint (lhs, op, rhs, ctxt); 3314 1.1.1.2 mrg if (!sat && out) 3315 1.1.1.2 mrg *out = new rejected_op_constraint (*this, lhs, op, rhs); 3316 1.1.1.2 mrg return sat; 3317 1.1 mrg } 3318 1.1 mrg 3319 1.1 mrg /* Determine what is known about the condition "LHS OP RHS" within 3320 1.1 mrg this model. 3321 1.1 mrg Use CTXT for reporting any diagnostics associated with the accesses. */ 3322 1.1 mrg 3323 1.1 mrg tristate 3324 1.1 mrg region_model::eval_condition (tree lhs, 3325 1.1 mrg enum tree_code op, 3326 1.1 mrg tree rhs, 3327 1.1 mrg region_model_context *ctxt) 3328 1.1 mrg { 3329 1.1 mrg /* For now, make no attempt to model constraints on floating-point 3330 1.1 mrg values. */ 3331 1.1 mrg if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs))) 3332 1.1 mrg return tristate::unknown (); 3333 1.1 mrg 3334 1.1 mrg return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt)); 3335 1.1 mrg } 3336 1.1 mrg 3337 1.1.1.2 mrg /* Implementation of region_model::get_representative_path_var. 3338 1.1.1.2 mrg Attempt to return a path_var that represents SVAL, or return NULL_TREE. 3339 1.1.1.2 mrg Use VISITED to prevent infinite mutual recursion with the overload for 3340 1.1.1.2 mrg regions. */ 3341 1.1 mrg 3342 1.1.1.2 mrg path_var 3343 1.1.1.2 mrg region_model::get_representative_path_var_1 (const svalue *sval, 3344 1.1.1.2 mrg svalue_set *visited) const 3345 1.1 mrg { 3346 1.1.1.2 mrg gcc_assert (sval); 3347 1.1 mrg 3348 1.1.1.2 mrg /* Prevent infinite recursion. */ 3349 1.1.1.2 mrg if (visited->contains (sval)) 3350 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3351 1.1.1.2 mrg visited->add (sval); 3352 1.1.1.2 mrg 3353 1.1.1.2 mrg /* Handle casts by recursion into get_representative_path_var. */ 3354 1.1.1.2 mrg if (const svalue *cast_sval = sval->maybe_undo_cast ()) 3355 1.1.1.2 mrg { 3356 1.1.1.2 mrg path_var result = get_representative_path_var (cast_sval, visited); 3357 1.1.1.2 mrg tree orig_type = sval->get_type (); 3358 1.1.1.2 mrg /* If necessary, wrap the result in a cast. */ 3359 1.1.1.2 mrg if (result.m_tree && orig_type) 3360 1.1.1.2 mrg result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree); 3361 1.1.1.2 mrg return result; 3362 1.1.1.2 mrg } 3363 1.1 mrg 3364 1.1.1.2 mrg auto_vec<path_var> pvs; 3365 1.1.1.2 mrg m_store.get_representative_path_vars (this, visited, sval, &pvs); 3366 1.1 mrg 3367 1.1.1.2 mrg if (tree cst = sval->maybe_get_constant ()) 3368 1.1.1.2 mrg pvs.safe_push (path_var (cst, 0)); 3369 1.1 mrg 3370 1.1 mrg /* Handle string literals and various other pointers. */ 3371 1.1.1.2 mrg if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ()) 3372 1.1 mrg { 3373 1.1.1.2 mrg const region *reg = ptr_sval->get_pointee (); 3374 1.1.1.2 mrg if (path_var pv = get_representative_path_var (reg, visited)) 3375 1.1.1.2 mrg return path_var (build1 (ADDR_EXPR, 3376 1.1.1.2 mrg sval->get_type (), 3377 1.1.1.2 mrg pv.m_tree), 3378 1.1.1.2 mrg pv.m_stack_depth); 3379 1.1.1.2 mrg } 3380 1.1.1.2 mrg 3381 1.1.1.2 mrg /* If we have a sub_svalue, look for ways to represent the parent. */ 3382 1.1.1.2 mrg if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ()) 3383 1.1.1.2 mrg { 3384 1.1.1.2 mrg const svalue *parent_sval = sub_sval->get_parent (); 3385 1.1.1.2 mrg const region *subreg = sub_sval->get_subregion (); 3386 1.1.1.2 mrg if (path_var parent_pv 3387 1.1.1.2 mrg = get_representative_path_var (parent_sval, visited)) 3388 1.1.1.2 mrg if (const field_region *field_reg = subreg->dyn_cast_field_region ()) 3389 1.1.1.2 mrg return path_var (build3 (COMPONENT_REF, 3390 1.1.1.2 mrg sval->get_type (), 3391 1.1.1.2 mrg parent_pv.m_tree, 3392 1.1.1.2 mrg field_reg->get_field (), 3393 1.1.1.2 mrg NULL_TREE), 3394 1.1.1.2 mrg parent_pv.m_stack_depth); 3395 1.1.1.2 mrg } 3396 1.1.1.2 mrg 3397 1.1.1.2 mrg /* Handle binops. */ 3398 1.1.1.2 mrg if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ()) 3399 1.1.1.2 mrg if (path_var lhs_pv 3400 1.1.1.2 mrg = get_representative_path_var (binop_sval->get_arg0 (), visited)) 3401 1.1.1.2 mrg if (path_var rhs_pv 3402 1.1.1.2 mrg = get_representative_path_var (binop_sval->get_arg1 (), visited)) 3403 1.1.1.2 mrg return path_var (build2 (binop_sval->get_op (), 3404 1.1.1.2 mrg sval->get_type (), 3405 1.1.1.2 mrg lhs_pv.m_tree, rhs_pv.m_tree), 3406 1.1.1.2 mrg lhs_pv.m_stack_depth); 3407 1.1.1.2 mrg 3408 1.1.1.2 mrg if (pvs.length () < 1) 3409 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3410 1.1.1.2 mrg 3411 1.1.1.2 mrg pvs.qsort (readability_comparator); 3412 1.1.1.2 mrg return pvs[0]; 3413 1.1.1.2 mrg } 3414 1.1.1.2 mrg 3415 1.1.1.2 mrg /* Attempt to return a path_var that represents SVAL, or return NULL_TREE. 3416 1.1.1.2 mrg Use VISITED to prevent infinite mutual recursion with the overload for 3417 1.1.1.2 mrg regions 3418 1.1.1.2 mrg 3419 1.1.1.2 mrg This function defers to get_representative_path_var_1 to do the work; 3420 1.1.1.2 mrg it adds verification that get_representative_path_var_1 returned a tree 3421 1.1.1.2 mrg of the correct type. */ 3422 1.1 mrg 3423 1.1 mrg path_var 3424 1.1.1.2 mrg region_model::get_representative_path_var (const svalue *sval, 3425 1.1.1.2 mrg svalue_set *visited) const 3426 1.1 mrg { 3427 1.1.1.2 mrg if (sval == NULL) 3428 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3429 1.1 mrg 3430 1.1.1.2 mrg tree orig_type = sval->get_type (); 3431 1.1 mrg 3432 1.1.1.2 mrg path_var result = get_representative_path_var_1 (sval, visited); 3433 1.1 mrg 3434 1.1.1.2 mrg /* Verify that the result has the same type as SVAL, if any. */ 3435 1.1.1.2 mrg if (result.m_tree && orig_type) 3436 1.1.1.2 mrg gcc_assert (TREE_TYPE (result.m_tree) == orig_type); 3437 1.1.1.2 mrg 3438 1.1.1.2 mrg return result; 3439 1.1.1.2 mrg } 3440 1.1.1.2 mrg 3441 1.1.1.2 mrg /* Attempt to return a tree that represents SVAL, or return NULL_TREE. 3442 1.1.1.2 mrg 3443 1.1.1.2 mrg Strip off any top-level cast, to avoid messages like 3444 1.1.1.2 mrg double-free of '(void *)ptr' 3445 1.1.1.2 mrg from analyzer diagnostics. */ 3446 1.1.1.2 mrg 3447 1.1.1.2 mrg tree 3448 1.1.1.2 mrg region_model::get_representative_tree (const svalue *sval) const 3449 1.1.1.2 mrg { 3450 1.1.1.2 mrg svalue_set visited; 3451 1.1.1.2 mrg tree expr = get_representative_path_var (sval, &visited).m_tree; 3452 1.1 mrg 3453 1.1.1.2 mrg /* Strip off any top-level cast. */ 3454 1.1.1.2 mrg if (expr && TREE_CODE (expr) == NOP_EXPR) 3455 1.1.1.2 mrg expr = TREE_OPERAND (expr, 0); 3456 1.1 mrg 3457 1.1.1.2 mrg return fixup_tree_for_diagnostic (expr); 3458 1.1 mrg } 3459 1.1 mrg 3460 1.1.1.2 mrg /* Implementation of region_model::get_representative_path_var. 3461 1.1 mrg 3462 1.1.1.2 mrg Attempt to return a path_var that represents REG, or return 3463 1.1.1.2 mrg the NULL path_var. 3464 1.1.1.2 mrg For example, a region for a field of a local would be a path_var 3465 1.1.1.2 mrg wrapping a COMPONENT_REF. 3466 1.1.1.2 mrg Use VISITED to prevent infinite mutual recursion with the overload for 3467 1.1.1.2 mrg svalues. */ 3468 1.1.1.2 mrg 3469 1.1.1.2 mrg path_var 3470 1.1.1.2 mrg region_model::get_representative_path_var_1 (const region *reg, 3471 1.1.1.2 mrg svalue_set *visited) const 3472 1.1 mrg { 3473 1.1.1.2 mrg switch (reg->get_kind ()) 3474 1.1.1.2 mrg { 3475 1.1.1.2 mrg default: 3476 1.1.1.2 mrg gcc_unreachable (); 3477 1.1.1.2 mrg 3478 1.1.1.2 mrg case RK_FRAME: 3479 1.1.1.2 mrg case RK_GLOBALS: 3480 1.1.1.2 mrg case RK_CODE: 3481 1.1.1.2 mrg case RK_HEAP: 3482 1.1.1.2 mrg case RK_STACK: 3483 1.1.1.2 mrg case RK_ROOT: 3484 1.1.1.2 mrg /* Regions that represent memory spaces are not expressible as trees. */ 3485 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3486 1.1.1.2 mrg 3487 1.1.1.2 mrg case RK_FUNCTION: 3488 1.1.1.2 mrg { 3489 1.1.1.2 mrg const function_region *function_reg 3490 1.1.1.2 mrg = as_a <const function_region *> (reg); 3491 1.1.1.2 mrg return path_var (function_reg->get_fndecl (), 0); 3492 1.1.1.2 mrg } 3493 1.1.1.2 mrg case RK_LABEL: 3494 1.1.1.2 mrg { 3495 1.1.1.2 mrg const label_region *label_reg = as_a <const label_region *> (reg); 3496 1.1.1.2 mrg return path_var (label_reg->get_label (), 0); 3497 1.1.1.2 mrg } 3498 1.1.1.2 mrg 3499 1.1.1.2 mrg case RK_SYMBOLIC: 3500 1.1 mrg { 3501 1.1.1.2 mrg const symbolic_region *symbolic_reg 3502 1.1.1.2 mrg = as_a <const symbolic_region *> (reg); 3503 1.1.1.2 mrg const svalue *pointer = symbolic_reg->get_pointer (); 3504 1.1.1.2 mrg path_var pointer_pv = get_representative_path_var (pointer, visited); 3505 1.1.1.2 mrg if (!pointer_pv) 3506 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3507 1.1.1.2 mrg tree offset = build_int_cst (pointer->get_type (), 0); 3508 1.1.1.2 mrg return path_var (build2 (MEM_REF, 3509 1.1.1.2 mrg reg->get_type (), 3510 1.1.1.2 mrg pointer_pv.m_tree, 3511 1.1.1.2 mrg offset), 3512 1.1.1.2 mrg pointer_pv.m_stack_depth); 3513 1.1.1.2 mrg } 3514 1.1.1.2 mrg case RK_DECL: 3515 1.1.1.2 mrg { 3516 1.1.1.2 mrg const decl_region *decl_reg = as_a <const decl_region *> (reg); 3517 1.1.1.2 mrg return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ()); 3518 1.1.1.2 mrg } 3519 1.1.1.2 mrg case RK_FIELD: 3520 1.1.1.2 mrg { 3521 1.1.1.2 mrg const field_region *field_reg = as_a <const field_region *> (reg); 3522 1.1.1.2 mrg path_var parent_pv 3523 1.1.1.2 mrg = get_representative_path_var (reg->get_parent_region (), visited); 3524 1.1.1.2 mrg if (!parent_pv) 3525 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3526 1.1.1.2 mrg return path_var (build3 (COMPONENT_REF, 3527 1.1.1.2 mrg reg->get_type (), 3528 1.1.1.2 mrg parent_pv.m_tree, 3529 1.1.1.2 mrg field_reg->get_field (), 3530 1.1.1.2 mrg NULL_TREE), 3531 1.1.1.2 mrg parent_pv.m_stack_depth); 3532 1.1.1.2 mrg } 3533 1.1.1.2 mrg 3534 1.1.1.2 mrg case RK_ELEMENT: 3535 1.1.1.2 mrg { 3536 1.1.1.2 mrg const element_region *element_reg 3537 1.1.1.2 mrg = as_a <const element_region *> (reg); 3538 1.1.1.2 mrg path_var parent_pv 3539 1.1.1.2 mrg = get_representative_path_var (reg->get_parent_region (), visited); 3540 1.1.1.2 mrg if (!parent_pv) 3541 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3542 1.1.1.2 mrg path_var index_pv 3543 1.1.1.2 mrg = get_representative_path_var (element_reg->get_index (), visited); 3544 1.1.1.2 mrg if (!index_pv) 3545 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3546 1.1.1.2 mrg return path_var (build4 (ARRAY_REF, 3547 1.1.1.2 mrg reg->get_type (), 3548 1.1.1.2 mrg parent_pv.m_tree, index_pv.m_tree, 3549 1.1.1.2 mrg NULL_TREE, NULL_TREE), 3550 1.1.1.2 mrg parent_pv.m_stack_depth); 3551 1.1.1.2 mrg } 3552 1.1.1.2 mrg 3553 1.1.1.2 mrg case RK_OFFSET: 3554 1.1.1.2 mrg { 3555 1.1.1.2 mrg const offset_region *offset_reg 3556 1.1.1.2 mrg = as_a <const offset_region *> (reg); 3557 1.1.1.2 mrg path_var parent_pv 3558 1.1.1.2 mrg = get_representative_path_var (reg->get_parent_region (), visited); 3559 1.1.1.2 mrg if (!parent_pv) 3560 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3561 1.1.1.2 mrg path_var offset_pv 3562 1.1.1.2 mrg = get_representative_path_var (offset_reg->get_byte_offset (), 3563 1.1.1.2 mrg visited); 3564 1.1.1.2 mrg if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST) 3565 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3566 1.1.1.2 mrg tree addr_parent = build1 (ADDR_EXPR, 3567 1.1.1.2 mrg build_pointer_type (reg->get_type ()), 3568 1.1.1.2 mrg parent_pv.m_tree); 3569 1.1.1.2 mrg return path_var (build2 (MEM_REF, 3570 1.1.1.2 mrg reg->get_type (), 3571 1.1.1.2 mrg addr_parent, offset_pv.m_tree), 3572 1.1.1.2 mrg parent_pv.m_stack_depth); 3573 1.1.1.2 mrg } 3574 1.1.1.2 mrg 3575 1.1.1.2 mrg case RK_SIZED: 3576 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3577 1.1.1.2 mrg 3578 1.1.1.2 mrg case RK_CAST: 3579 1.1.1.2 mrg { 3580 1.1.1.2 mrg path_var parent_pv 3581 1.1.1.2 mrg = get_representative_path_var (reg->get_parent_region (), visited); 3582 1.1.1.2 mrg if (!parent_pv) 3583 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3584 1.1.1.2 mrg return path_var (build1 (NOP_EXPR, 3585 1.1.1.2 mrg reg->get_type (), 3586 1.1.1.2 mrg parent_pv.m_tree), 3587 1.1.1.2 mrg parent_pv.m_stack_depth); 3588 1.1.1.2 mrg } 3589 1.1.1.2 mrg 3590 1.1.1.2 mrg case RK_HEAP_ALLOCATED: 3591 1.1.1.2 mrg case RK_ALLOCA: 3592 1.1.1.2 mrg /* No good way to express heap-allocated/alloca regions as trees. */ 3593 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3594 1.1.1.2 mrg 3595 1.1.1.2 mrg case RK_STRING: 3596 1.1.1.2 mrg { 3597 1.1.1.2 mrg const string_region *string_reg = as_a <const string_region *> (reg); 3598 1.1.1.2 mrg return path_var (string_reg->get_string_cst (), 0); 3599 1.1 mrg } 3600 1.1.1.2 mrg 3601 1.1.1.2 mrg case RK_UNKNOWN: 3602 1.1.1.2 mrg return path_var (NULL_TREE, 0); 3603 1.1.1.2 mrg } 3604 1.1 mrg } 3605 1.1 mrg 3606 1.1.1.2 mrg /* Attempt to return a path_var that represents REG, or return 3607 1.1.1.2 mrg the NULL path_var. 3608 1.1.1.2 mrg For example, a region for a field of a local would be a path_var 3609 1.1.1.2 mrg wrapping a COMPONENT_REF. 3610 1.1.1.2 mrg Use VISITED to prevent infinite mutual recursion with the overload for 3611 1.1.1.2 mrg svalues. 3612 1.1.1.2 mrg 3613 1.1.1.2 mrg This function defers to get_representative_path_var_1 to do the work; 3614 1.1.1.2 mrg it adds verification that get_representative_path_var_1 returned a tree 3615 1.1.1.2 mrg of the correct type. */ 3616 1.1 mrg 3617 1.1.1.2 mrg path_var 3618 1.1.1.2 mrg region_model::get_representative_path_var (const region *reg, 3619 1.1.1.2 mrg svalue_set *visited) const 3620 1.1 mrg { 3621 1.1.1.2 mrg path_var result = get_representative_path_var_1 (reg, visited); 3622 1.1 mrg 3623 1.1.1.2 mrg /* Verify that the result has the same type as REG, if any. */ 3624 1.1.1.2 mrg if (result.m_tree && reg->get_type ()) 3625 1.1.1.2 mrg gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ()); 3626 1.1 mrg 3627 1.1.1.2 mrg return result; 3628 1.1 mrg } 3629 1.1 mrg 3630 1.1 mrg /* Update this model for any phis in SNODE, assuming we came from 3631 1.1 mrg LAST_CFG_SUPEREDGE. */ 3632 1.1 mrg 3633 1.1 mrg void 3634 1.1 mrg region_model::update_for_phis (const supernode *snode, 3635 1.1 mrg const cfg_superedge *last_cfg_superedge, 3636 1.1 mrg region_model_context *ctxt) 3637 1.1 mrg { 3638 1.1 mrg gcc_assert (last_cfg_superedge); 3639 1.1 mrg 3640 1.1.1.2 mrg /* Copy this state and pass it to handle_phi so that all of the phi stmts 3641 1.1.1.2 mrg are effectively handled simultaneously. */ 3642 1.1.1.2 mrg const region_model old_state (*this); 3643 1.1.1.2 mrg 3644 1.1 mrg for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis (); 3645 1.1 mrg !gsi_end_p (gpi); gsi_next (&gpi)) 3646 1.1 mrg { 3647 1.1 mrg gphi *phi = gpi.phi (); 3648 1.1 mrg 3649 1.1 mrg tree src = last_cfg_superedge->get_phi_arg (phi); 3650 1.1 mrg tree lhs = gimple_phi_result (phi); 3651 1.1 mrg 3652 1.1.1.2 mrg /* Update next_state based on phi and old_state. */ 3653 1.1.1.2 mrg handle_phi (phi, lhs, src, old_state, ctxt); 3654 1.1 mrg } 3655 1.1 mrg } 3656 1.1 mrg 3657 1.1 mrg /* Attempt to update this model for taking EDGE (where the last statement 3658 1.1 mrg was LAST_STMT), returning true if the edge can be taken, false 3659 1.1 mrg otherwise. 3660 1.1.1.2 mrg When returning false, if OUT is non-NULL, write a new rejected_constraint 3661 1.1.1.2 mrg to it. 3662 1.1 mrg 3663 1.1 mrg For CFG superedges where LAST_STMT is a conditional or a switch 3664 1.1 mrg statement, attempt to add the relevant conditions for EDGE to this 3665 1.1 mrg model, returning true if they are feasible, or false if they are 3666 1.1 mrg impossible. 3667 1.1 mrg 3668 1.1 mrg For call superedges, push frame information and store arguments 3669 1.1 mrg into parameters. 3670 1.1 mrg 3671 1.1 mrg For return superedges, pop frame information and store return 3672 1.1 mrg values into any lhs. 3673 1.1 mrg 3674 1.1 mrg Rejection of call/return superedges happens elsewhere, in 3675 1.1 mrg program_point::on_edge (i.e. based on program point, rather 3676 1.1 mrg than program state). */ 3677 1.1 mrg 3678 1.1 mrg bool 3679 1.1 mrg region_model::maybe_update_for_edge (const superedge &edge, 3680 1.1 mrg const gimple *last_stmt, 3681 1.1.1.2 mrg region_model_context *ctxt, 3682 1.1.1.2 mrg rejected_constraint **out) 3683 1.1 mrg { 3684 1.1 mrg /* Handle frame updates for interprocedural edges. */ 3685 1.1 mrg switch (edge.m_kind) 3686 1.1 mrg { 3687 1.1 mrg default: 3688 1.1 mrg break; 3689 1.1 mrg 3690 1.1 mrg case SUPEREDGE_CALL: 3691 1.1 mrg { 3692 1.1 mrg const call_superedge *call_edge = as_a <const call_superedge *> (&edge); 3693 1.1 mrg update_for_call_superedge (*call_edge, ctxt); 3694 1.1 mrg } 3695 1.1 mrg break; 3696 1.1 mrg 3697 1.1 mrg case SUPEREDGE_RETURN: 3698 1.1 mrg { 3699 1.1 mrg const return_superedge *return_edge 3700 1.1 mrg = as_a <const return_superedge *> (&edge); 3701 1.1 mrg update_for_return_superedge (*return_edge, ctxt); 3702 1.1 mrg } 3703 1.1 mrg break; 3704 1.1 mrg 3705 1.1 mrg case SUPEREDGE_INTRAPROCEDURAL_CALL: 3706 1.1 mrg { 3707 1.1 mrg const callgraph_superedge *cg_sedge 3708 1.1 mrg = as_a <const callgraph_superedge *> (&edge); 3709 1.1 mrg update_for_call_summary (*cg_sedge, ctxt); 3710 1.1 mrg } 3711 1.1 mrg break; 3712 1.1 mrg } 3713 1.1 mrg 3714 1.1 mrg if (last_stmt == NULL) 3715 1.1 mrg return true; 3716 1.1 mrg 3717 1.1 mrg /* Apply any constraints for conditionals/switch statements. */ 3718 1.1 mrg 3719 1.1 mrg if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt)) 3720 1.1 mrg { 3721 1.1 mrg const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge); 3722 1.1.1.2 mrg return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out); 3723 1.1 mrg } 3724 1.1 mrg 3725 1.1 mrg if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt)) 3726 1.1 mrg { 3727 1.1 mrg const switch_cfg_superedge *switch_sedge 3728 1.1 mrg = as_a <const switch_cfg_superedge *> (&edge); 3729 1.1.1.2 mrg return apply_constraints_for_gswitch (*switch_sedge, switch_stmt, 3730 1.1.1.2 mrg ctxt, out); 3731 1.1 mrg } 3732 1.1 mrg 3733 1.1.1.2 mrg /* Apply any constraints due to an exception being thrown. */ 3734 1.1.1.2 mrg if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge)) 3735 1.1.1.2 mrg if (cfg_sedge->get_flags () & EDGE_EH) 3736 1.1.1.2 mrg return apply_constraints_for_exception (last_stmt, ctxt, out); 3737 1.1.1.2 mrg 3738 1.1 mrg return true; 3739 1.1 mrg } 3740 1.1 mrg 3741 1.1 mrg /* Push a new frame_region on to the stack region. 3742 1.1 mrg Populate the frame_region with child regions for the function call's 3743 1.1 mrg parameters, using values from the arguments at the callsite in the 3744 1.1 mrg caller's frame. */ 3745 1.1 mrg 3746 1.1 mrg void 3747 1.1.1.2 mrg region_model::update_for_gcall (const gcall *call_stmt, 3748 1.1.1.2 mrg region_model_context *ctxt, 3749 1.1.1.2 mrg function *callee) 3750 1.1 mrg { 3751 1.1.1.2 mrg /* Build a vec of argument svalues, using the current top 3752 1.1 mrg frame for resolving tree expressions. */ 3753 1.1.1.2 mrg auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt)); 3754 1.1 mrg 3755 1.1 mrg for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++) 3756 1.1 mrg { 3757 1.1 mrg tree arg = gimple_call_arg (call_stmt, i); 3758 1.1.1.2 mrg arg_svals.quick_push (get_rvalue (arg, ctxt)); 3759 1.1 mrg } 3760 1.1 mrg 3761 1.1.1.2 mrg if(!callee) 3762 1.1.1.2 mrg { 3763 1.1.1.2 mrg /* Get the function * from the gcall. */ 3764 1.1.1.2 mrg tree fn_decl = get_fndecl_for_call (call_stmt,ctxt); 3765 1.1.1.2 mrg callee = DECL_STRUCT_FUNCTION (fn_decl); 3766 1.1.1.2 mrg } 3767 1.1.1.2 mrg 3768 1.1.1.2 mrg push_frame (callee, &arg_svals, ctxt); 3769 1.1 mrg } 3770 1.1 mrg 3771 1.1 mrg /* Pop the top-most frame_region from the stack, and copy the return 3772 1.1 mrg region's values (if any) into the region for the lvalue of the LHS of 3773 1.1 mrg the call (if any). */ 3774 1.1 mrg 3775 1.1 mrg void 3776 1.1.1.2 mrg region_model::update_for_return_gcall (const gcall *call_stmt, 3777 1.1.1.2 mrg region_model_context *ctxt) 3778 1.1.1.2 mrg { 3779 1.1.1.2 mrg /* Get the lvalue for the result of the call, passing it to pop_frame, 3780 1.1.1.2 mrg so that pop_frame can determine the region with respect to the 3781 1.1.1.2 mrg *caller* frame. */ 3782 1.1.1.2 mrg tree lhs = gimple_call_lhs (call_stmt); 3783 1.1.1.2 mrg pop_frame (lhs, NULL, ctxt); 3784 1.1.1.2 mrg } 3785 1.1.1.2 mrg 3786 1.1.1.2 mrg /* Extract calling information from the superedge and update the model for the 3787 1.1.1.2 mrg call */ 3788 1.1.1.2 mrg 3789 1.1.1.2 mrg void 3790 1.1.1.2 mrg region_model::update_for_call_superedge (const call_superedge &call_edge, 3791 1.1.1.2 mrg region_model_context *ctxt) 3792 1.1.1.2 mrg { 3793 1.1.1.2 mrg const gcall *call_stmt = call_edge.get_call_stmt (); 3794 1.1.1.2 mrg update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ()); 3795 1.1.1.2 mrg } 3796 1.1.1.2 mrg 3797 1.1.1.2 mrg /* Extract calling information from the return superedge and update the model 3798 1.1.1.2 mrg for the returning call */ 3799 1.1.1.2 mrg 3800 1.1.1.2 mrg void 3801 1.1 mrg region_model::update_for_return_superedge (const return_superedge &return_edge, 3802 1.1 mrg region_model_context *ctxt) 3803 1.1 mrg { 3804 1.1 mrg const gcall *call_stmt = return_edge.get_call_stmt (); 3805 1.1.1.2 mrg update_for_return_gcall (call_stmt, ctxt); 3806 1.1 mrg } 3807 1.1 mrg 3808 1.1 mrg /* Update this region_model with a summary of the effect of calling 3809 1.1 mrg and returning from CG_SEDGE. 3810 1.1 mrg 3811 1.1 mrg TODO: Currently this is extremely simplistic: we merely set the 3812 1.1 mrg return value to "unknown". A proper implementation would e.g. update 3813 1.1 mrg sm-state, and presumably be reworked to support multiple outcomes. */ 3814 1.1 mrg 3815 1.1 mrg void 3816 1.1 mrg region_model::update_for_call_summary (const callgraph_superedge &cg_sedge, 3817 1.1 mrg region_model_context *ctxt) 3818 1.1 mrg { 3819 1.1 mrg /* For now, set any return value to "unknown". */ 3820 1.1 mrg const gcall *call_stmt = cg_sedge.get_call_stmt (); 3821 1.1 mrg tree lhs = gimple_call_lhs (call_stmt); 3822 1.1 mrg if (lhs) 3823 1.1.1.2 mrg mark_region_as_unknown (get_lvalue (lhs, ctxt), 3824 1.1.1.2 mrg ctxt ? ctxt->get_uncertainty () : NULL); 3825 1.1 mrg 3826 1.1 mrg // TODO: actually implement some kind of summary here 3827 1.1 mrg } 3828 1.1 mrg 3829 1.1 mrg /* Given a true or false edge guarded by conditional statement COND_STMT, 3830 1.1 mrg determine appropriate constraints for the edge to be taken. 3831 1.1 mrg 3832 1.1 mrg If they are feasible, add the constraints and return true. 3833 1.1 mrg 3834 1.1 mrg Return false if the constraints contradict existing knowledge 3835 1.1.1.2 mrg (and so the edge should not be taken). 3836 1.1.1.2 mrg When returning false, if OUT is non-NULL, write a new rejected_constraint 3837 1.1.1.2 mrg to it. */ 3838 1.1 mrg 3839 1.1 mrg bool 3840 1.1 mrg region_model::apply_constraints_for_gcond (const cfg_superedge &sedge, 3841 1.1 mrg const gcond *cond_stmt, 3842 1.1.1.2 mrg region_model_context *ctxt, 3843 1.1.1.2 mrg rejected_constraint **out) 3844 1.1 mrg { 3845 1.1 mrg ::edge cfg_edge = sedge.get_cfg_edge (); 3846 1.1 mrg gcc_assert (cfg_edge != NULL); 3847 1.1 mrg gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)); 3848 1.1 mrg 3849 1.1 mrg enum tree_code op = gimple_cond_code (cond_stmt); 3850 1.1 mrg tree lhs = gimple_cond_lhs (cond_stmt); 3851 1.1 mrg tree rhs = gimple_cond_rhs (cond_stmt); 3852 1.1 mrg if (cfg_edge->flags & EDGE_FALSE_VALUE) 3853 1.1 mrg op = invert_tree_comparison (op, false /* honor_nans */); 3854 1.1.1.2 mrg return add_constraint (lhs, op, rhs, ctxt, out); 3855 1.1 mrg } 3856 1.1 mrg 3857 1.1 mrg /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints 3858 1.1 mrg for the edge to be taken. 3859 1.1 mrg 3860 1.1 mrg If they are feasible, add the constraints and return true. 3861 1.1 mrg 3862 1.1 mrg Return false if the constraints contradict existing knowledge 3863 1.1.1.2 mrg (and so the edge should not be taken). 3864 1.1.1.2 mrg When returning false, if OUT is non-NULL, write a new rejected_constraint 3865 1.1.1.2 mrg to it. */ 3866 1.1 mrg 3867 1.1 mrg bool 3868 1.1 mrg region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge, 3869 1.1 mrg const gswitch *switch_stmt, 3870 1.1.1.2 mrg region_model_context *ctxt, 3871 1.1.1.2 mrg rejected_constraint **out) 3872 1.1 mrg { 3873 1.1.1.2 mrg bounded_ranges_manager *ranges_mgr = get_range_manager (); 3874 1.1.1.2 mrg const bounded_ranges *all_cases_ranges 3875 1.1.1.2 mrg = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt); 3876 1.1 mrg tree index = gimple_switch_index (switch_stmt); 3877 1.1.1.2 mrg const svalue *index_sval = get_rvalue (index, ctxt); 3878 1.1.1.2 mrg bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges); 3879 1.1.1.2 mrg if (!sat && out) 3880 1.1.1.2 mrg *out = new rejected_ranges_constraint (*this, index, all_cases_ranges); 3881 1.1.1.2 mrg return sat; 3882 1.1 mrg } 3883 1.1 mrg 3884 1.1.1.2 mrg /* Apply any constraints due to an exception being thrown at LAST_STMT. 3885 1.1 mrg 3886 1.1.1.2 mrg If they are feasible, add the constraints and return true. 3887 1.1 mrg 3888 1.1.1.2 mrg Return false if the constraints contradict existing knowledge 3889 1.1.1.2 mrg (and so the edge should not be taken). 3890 1.1.1.2 mrg When returning false, if OUT is non-NULL, write a new rejected_constraint 3891 1.1.1.2 mrg to it. */ 3892 1.1 mrg 3893 1.1.1.2 mrg bool 3894 1.1.1.2 mrg region_model::apply_constraints_for_exception (const gimple *last_stmt, 3895 1.1.1.2 mrg region_model_context *ctxt, 3896 1.1.1.2 mrg rejected_constraint **out) 3897 1.1.1.2 mrg { 3898 1.1.1.2 mrg gcc_assert (last_stmt); 3899 1.1.1.2 mrg if (const gcall *call = dyn_cast <const gcall *> (last_stmt)) 3900 1.1.1.2 mrg if (tree callee_fndecl = get_fndecl_for_call (call, ctxt)) 3901 1.1.1.2 mrg if (is_named_call_p (callee_fndecl, "operator new", call, 1) 3902 1.1.1.2 mrg || is_named_call_p (callee_fndecl, "operator new []", call, 1)) 3903 1.1.1.2 mrg { 3904 1.1.1.2 mrg /* We have an exception thrown from operator new. 3905 1.1.1.2 mrg Add a constraint that the result was NULL, to avoid a false 3906 1.1.1.2 mrg leak report due to the result being lost when following 3907 1.1.1.2 mrg the EH edge. */ 3908 1.1.1.2 mrg if (tree lhs = gimple_call_lhs (call)) 3909 1.1.1.2 mrg return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out); 3910 1.1.1.2 mrg return true; 3911 1.1.1.2 mrg } 3912 1.1.1.2 mrg return true; 3913 1.1 mrg } 3914 1.1 mrg 3915 1.1.1.2 mrg /* For use with push_frame when handling a top-level call within the analysis. 3916 1.1.1.2 mrg PARAM has a defined but unknown initial value. 3917 1.1.1.2 mrg Anything it points to has escaped, since the calling context "knows" 3918 1.1.1.2 mrg the pointer, and thus calls to unknown functions could read/write into 3919 1.1.1.2 mrg the region. 3920 1.1.1.2 mrg If NONNULL is true, then assume that PARAM must be non-NULL. */ 3921 1.1 mrg 3922 1.1 mrg void 3923 1.1.1.2 mrg region_model::on_top_level_param (tree param, 3924 1.1.1.2 mrg bool nonnull, 3925 1.1.1.2 mrg region_model_context *ctxt) 3926 1.1 mrg { 3927 1.1.1.2 mrg if (POINTER_TYPE_P (TREE_TYPE (param))) 3928 1.1 mrg { 3929 1.1.1.2 mrg const region *param_reg = get_lvalue (param, ctxt); 3930 1.1.1.2 mrg const svalue *init_ptr_sval 3931 1.1.1.2 mrg = m_mgr->get_or_create_initial_value (param_reg); 3932 1.1.1.2 mrg const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval); 3933 1.1.1.2 mrg m_store.mark_as_escaped (pointee_reg); 3934 1.1.1.2 mrg if (nonnull) 3935 1.1.1.2 mrg { 3936 1.1.1.2 mrg const svalue *null_ptr_sval 3937 1.1.1.2 mrg = m_mgr->get_or_create_null_ptr (TREE_TYPE (param)); 3938 1.1.1.2 mrg add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt); 3939 1.1.1.2 mrg } 3940 1.1 mrg } 3941 1.1 mrg } 3942 1.1 mrg 3943 1.1.1.2 mrg /* Update this region_model to reflect pushing a frame onto the stack 3944 1.1.1.2 mrg for a call to FUN. 3945 1.1 mrg 3946 1.1.1.2 mrg If ARG_SVALS is non-NULL, use it to populate the parameters 3947 1.1.1.2 mrg in the new frame. 3948 1.1.1.2 mrg Otherwise, the params have their initial_svalues. 3949 1.1 mrg 3950 1.1.1.2 mrg Return the frame_region for the new frame. */ 3951 1.1 mrg 3952 1.1.1.2 mrg const region * 3953 1.1.1.2 mrg region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals, 3954 1.1.1.2 mrg region_model_context *ctxt) 3955 1.1.1.2 mrg { 3956 1.1.1.2 mrg m_current_frame = m_mgr->get_frame_region (m_current_frame, fun); 3957 1.1.1.2 mrg if (arg_svals) 3958 1.1 mrg { 3959 1.1.1.2 mrg /* Arguments supplied from a caller frame. */ 3960 1.1.1.2 mrg tree fndecl = fun->decl; 3961 1.1.1.2 mrg unsigned idx = 0; 3962 1.1.1.2 mrg for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm; 3963 1.1.1.2 mrg iter_parm = DECL_CHAIN (iter_parm), ++idx) 3964 1.1 mrg { 3965 1.1.1.2 mrg /* If there's a mismatching declaration, the call stmt might 3966 1.1.1.2 mrg not have enough args. Handle this case by leaving the 3967 1.1.1.2 mrg rest of the params as uninitialized. */ 3968 1.1.1.2 mrg if (idx >= arg_svals->length ()) 3969 1.1.1.2 mrg break; 3970 1.1.1.2 mrg tree parm_lval = iter_parm; 3971 1.1.1.2 mrg if (tree parm_default_ssa = ssa_default_def (fun, iter_parm)) 3972 1.1.1.2 mrg parm_lval = parm_default_ssa; 3973 1.1.1.2 mrg const region *parm_reg = get_lvalue (parm_lval, ctxt); 3974 1.1.1.2 mrg const svalue *arg_sval = (*arg_svals)[idx]; 3975 1.1.1.2 mrg set_value (parm_reg, arg_sval, ctxt); 3976 1.1 mrg } 3977 1.1 mrg } 3978 1.1.1.2 mrg else 3979 1.1 mrg { 3980 1.1.1.2 mrg /* Otherwise we have a top-level call within the analysis. The params 3981 1.1.1.2 mrg have defined but unknown initial values. 3982 1.1.1.2 mrg Anything they point to has escaped. */ 3983 1.1.1.2 mrg tree fndecl = fun->decl; 3984 1.1 mrg 3985 1.1.1.2 mrg /* Handle "__attribute__((nonnull))". */ 3986 1.1.1.2 mrg tree fntype = TREE_TYPE (fndecl); 3987 1.1.1.2 mrg bitmap nonnull_args = get_nonnull_args (fntype); 3988 1.1 mrg 3989 1.1.1.2 mrg unsigned parm_idx = 0; 3990 1.1.1.2 mrg for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm; 3991 1.1.1.2 mrg iter_parm = DECL_CHAIN (iter_parm)) 3992 1.1 mrg { 3993 1.1.1.2 mrg bool non_null = (nonnull_args 3994 1.1.1.2 mrg ? (bitmap_empty_p (nonnull_args) 3995 1.1.1.2 mrg || bitmap_bit_p (nonnull_args, parm_idx)) 3996 1.1.1.2 mrg : false); 3997 1.1.1.2 mrg if (tree parm_default_ssa = ssa_default_def (fun, iter_parm)) 3998 1.1.1.2 mrg on_top_level_param (parm_default_ssa, non_null, ctxt); 3999 1.1.1.2 mrg else 4000 1.1.1.2 mrg on_top_level_param (iter_parm, non_null, ctxt); 4001 1.1.1.2 mrg parm_idx++; 4002 1.1 mrg } 4003 1.1 mrg 4004 1.1.1.2 mrg BITMAP_FREE (nonnull_args); 4005 1.1 mrg } 4006 1.1 mrg 4007 1.1.1.2 mrg return m_current_frame; 4008 1.1 mrg } 4009 1.1 mrg 4010 1.1.1.2 mrg /* Get the function of the top-most frame in this region_model's stack. 4011 1.1.1.2 mrg There must be such a frame. */ 4012 1.1 mrg 4013 1.1.1.2 mrg function * 4014 1.1.1.2 mrg region_model::get_current_function () const 4015 1.1 mrg { 4016 1.1.1.2 mrg const frame_region *frame = get_current_frame (); 4017 1.1.1.2 mrg gcc_assert (frame); 4018 1.1.1.2 mrg return frame->get_function (); 4019 1.1.1.2 mrg } 4020 1.1 mrg 4021 1.1.1.2 mrg /* Pop the topmost frame_region from this region_model's stack; 4022 1.1 mrg 4023 1.1.1.2 mrg If RESULT_LVALUE is non-null, copy any return value from the frame 4024 1.1.1.2 mrg into the corresponding region (evaluated with respect to the *caller* 4025 1.1.1.2 mrg frame, rather than the called frame). 4026 1.1.1.2 mrg If OUT_RESULT is non-null, copy any return value from the frame 4027 1.1.1.2 mrg into *OUT_RESULT. 4028 1.1.1.2 mrg 4029 1.1.1.2 mrg If EVAL_RETURN_SVALUE is false, then don't evaluate the return value. 4030 1.1.1.2 mrg This is for use when unwinding frames e.g. due to longjmp, to suppress 4031 1.1.1.2 mrg erroneously reporting uninitialized return values. 4032 1.1 mrg 4033 1.1.1.2 mrg Purge the frame region and all its descendent regions. 4034 1.1.1.2 mrg Convert any pointers that point into such regions into 4035 1.1.1.2 mrg POISON_KIND_POPPED_STACK svalues. */ 4036 1.1 mrg 4037 1.1 mrg void 4038 1.1.1.2 mrg region_model::pop_frame (tree result_lvalue, 4039 1.1.1.2 mrg const svalue **out_result, 4040 1.1.1.2 mrg region_model_context *ctxt, 4041 1.1.1.2 mrg bool eval_return_svalue) 4042 1.1 mrg { 4043 1.1.1.2 mrg gcc_assert (m_current_frame); 4044 1.1 mrg 4045 1.1.1.2 mrg /* Evaluate the result, within the callee frame. */ 4046 1.1.1.2 mrg const frame_region *frame_reg = m_current_frame; 4047 1.1.1.2 mrg tree fndecl = m_current_frame->get_function ()->decl; 4048 1.1.1.2 mrg tree result = DECL_RESULT (fndecl); 4049 1.1.1.2 mrg const svalue *retval = NULL; 4050 1.1.1.2 mrg if (result 4051 1.1.1.2 mrg && TREE_TYPE (result) != void_type_node 4052 1.1.1.2 mrg && eval_return_svalue) 4053 1.1.1.2 mrg { 4054 1.1.1.2 mrg retval = get_rvalue (result, ctxt); 4055 1.1.1.2 mrg if (out_result) 4056 1.1.1.2 mrg *out_result = retval; 4057 1.1.1.2 mrg } 4058 1.1 mrg 4059 1.1.1.2 mrg /* Pop the frame. */ 4060 1.1.1.2 mrg m_current_frame = m_current_frame->get_calling_frame (); 4061 1.1 mrg 4062 1.1.1.2 mrg if (result_lvalue && retval) 4063 1.1.1.2 mrg { 4064 1.1.1.2 mrg gcc_assert (eval_return_svalue); 4065 1.1 mrg 4066 1.1.1.2 mrg /* Compute result_dst_reg using RESULT_LVALUE *after* popping 4067 1.1.1.2 mrg the frame, but before poisoning pointers into the old frame. */ 4068 1.1.1.2 mrg const region *result_dst_reg = get_lvalue (result_lvalue, ctxt); 4069 1.1.1.2 mrg set_value (result_dst_reg, retval, ctxt); 4070 1.1.1.2 mrg } 4071 1.1 mrg 4072 1.1.1.2 mrg unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK); 4073 1.1 mrg } 4074 1.1 mrg 4075 1.1.1.2 mrg /* Get the number of frames in this region_model's stack. */ 4076 1.1 mrg 4077 1.1.1.2 mrg int 4078 1.1.1.2 mrg region_model::get_stack_depth () const 4079 1.1 mrg { 4080 1.1.1.2 mrg const frame_region *frame = get_current_frame (); 4081 1.1.1.2 mrg if (frame) 4082 1.1.1.2 mrg return frame->get_stack_depth (); 4083 1.1.1.2 mrg else 4084 1.1.1.2 mrg return 0; 4085 1.1 mrg } 4086 1.1 mrg 4087 1.1.1.2 mrg /* Get the frame_region with the given index within the stack. 4088 1.1.1.2 mrg The frame_region must exist. */ 4089 1.1 mrg 4090 1.1.1.2 mrg const frame_region * 4091 1.1.1.2 mrg region_model::get_frame_at_index (int index) const 4092 1.1 mrg { 4093 1.1.1.2 mrg const frame_region *frame = get_current_frame (); 4094 1.1.1.2 mrg gcc_assert (frame); 4095 1.1.1.2 mrg gcc_assert (index >= 0); 4096 1.1.1.2 mrg gcc_assert (index <= frame->get_index ()); 4097 1.1.1.2 mrg while (index != frame->get_index ()) 4098 1.1.1.2 mrg { 4099 1.1.1.2 mrg frame = frame->get_calling_frame (); 4100 1.1.1.2 mrg gcc_assert (frame); 4101 1.1.1.2 mrg } 4102 1.1.1.2 mrg return frame; 4103 1.1 mrg } 4104 1.1 mrg 4105 1.1.1.2 mrg /* Unbind svalues for any regions in REG and below. 4106 1.1.1.2 mrg Find any pointers to such regions; convert them to 4107 1.1.1.2 mrg poisoned values of kind PKIND. 4108 1.1.1.2 mrg Also purge any dynamic extents. */ 4109 1.1 mrg 4110 1.1.1.2 mrg void 4111 1.1.1.2 mrg region_model::unbind_region_and_descendents (const region *reg, 4112 1.1.1.2 mrg enum poison_kind pkind) 4113 1.1 mrg { 4114 1.1.1.2 mrg /* Gather a set of base regions to be unbound. */ 4115 1.1.1.2 mrg hash_set<const region *> base_regs; 4116 1.1.1.2 mrg for (store::cluster_map_t::iterator iter = m_store.begin (); 4117 1.1.1.2 mrg iter != m_store.end (); ++iter) 4118 1.1 mrg { 4119 1.1.1.2 mrg const region *iter_base_reg = (*iter).first; 4120 1.1.1.2 mrg if (iter_base_reg->descendent_of_p (reg)) 4121 1.1.1.2 mrg base_regs.add (iter_base_reg); 4122 1.1.1.2 mrg } 4123 1.1.1.2 mrg for (hash_set<const region *>::iterator iter = base_regs.begin (); 4124 1.1.1.2 mrg iter != base_regs.end (); ++iter) 4125 1.1.1.2 mrg m_store.purge_cluster (*iter); 4126 1.1 mrg 4127 1.1.1.2 mrg /* Find any pointers to REG or its descendents; convert to poisoned. */ 4128 1.1.1.2 mrg poison_any_pointers_to_descendents (reg, pkind); 4129 1.1 mrg 4130 1.1.1.2 mrg /* Purge dynamic extents of any base regions in REG and below 4131 1.1.1.2 mrg (e.g. VLAs and alloca stack regions). */ 4132 1.1.1.2 mrg for (auto iter : m_dynamic_extents) 4133 1.1.1.2 mrg { 4134 1.1.1.2 mrg const region *iter_reg = iter.first; 4135 1.1.1.2 mrg if (iter_reg->descendent_of_p (reg)) 4136 1.1.1.2 mrg unset_dynamic_extents (iter_reg); 4137 1.1 mrg } 4138 1.1 mrg } 4139 1.1 mrg 4140 1.1.1.2 mrg /* Implementation of BindingVisitor. 4141 1.1.1.2 mrg Update the bound svalues for regions below REG to use poisoned 4142 1.1.1.2 mrg values instead. */ 4143 1.1 mrg 4144 1.1.1.2 mrg struct bad_pointer_finder 4145 1.1 mrg { 4146 1.1.1.2 mrg bad_pointer_finder (const region *reg, enum poison_kind pkind, 4147 1.1.1.2 mrg region_model_manager *mgr) 4148 1.1.1.2 mrg : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0) 4149 1.1.1.2 mrg {} 4150 1.1 mrg 4151 1.1.1.2 mrg void on_binding (const binding_key *, const svalue *&sval) 4152 1.1.1.2 mrg { 4153 1.1.1.2 mrg if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ()) 4154 1.1.1.2 mrg { 4155 1.1.1.2 mrg const region *ptr_dst = ptr_sval->get_pointee (); 4156 1.1.1.2 mrg /* Poison ptrs to descendents of REG, but not to REG itself, 4157 1.1.1.2 mrg otherwise double-free detection doesn't work (since sm-state 4158 1.1.1.2 mrg for "free" is stored on the original ptr svalue). */ 4159 1.1.1.2 mrg if (ptr_dst->descendent_of_p (m_reg) 4160 1.1.1.2 mrg && ptr_dst != m_reg) 4161 1.1.1.2 mrg { 4162 1.1.1.2 mrg sval = m_mgr->get_or_create_poisoned_svalue (m_pkind, 4163 1.1.1.2 mrg sval->get_type ()); 4164 1.1.1.2 mrg ++m_count; 4165 1.1.1.2 mrg } 4166 1.1.1.2 mrg } 4167 1.1.1.2 mrg } 4168 1.1 mrg 4169 1.1.1.2 mrg const region *m_reg; 4170 1.1.1.2 mrg enum poison_kind m_pkind; 4171 1.1.1.2 mrg region_model_manager *const m_mgr; 4172 1.1.1.2 mrg int m_count; 4173 1.1.1.2 mrg }; 4174 1.1 mrg 4175 1.1.1.2 mrg /* Find any pointers to REG or its descendents; convert them to 4176 1.1.1.2 mrg poisoned values of kind PKIND. 4177 1.1.1.2 mrg Return the number of pointers that were poisoned. */ 4178 1.1 mrg 4179 1.1.1.2 mrg int 4180 1.1.1.2 mrg region_model::poison_any_pointers_to_descendents (const region *reg, 4181 1.1.1.2 mrg enum poison_kind pkind) 4182 1.1.1.2 mrg { 4183 1.1.1.2 mrg bad_pointer_finder bv (reg, pkind, m_mgr); 4184 1.1.1.2 mrg m_store.for_each_binding (bv); 4185 1.1.1.2 mrg return bv.m_count; 4186 1.1 mrg } 4187 1.1 mrg 4188 1.1.1.2 mrg /* Attempt to merge THIS with OTHER_MODEL, writing the result 4189 1.1.1.2 mrg to OUT_MODEL. Use POINT to distinguish values created as a 4190 1.1.1.2 mrg result of merging. */ 4191 1.1 mrg 4192 1.1.1.2 mrg bool 4193 1.1.1.2 mrg region_model::can_merge_with_p (const region_model &other_model, 4194 1.1.1.2 mrg const program_point &point, 4195 1.1.1.2 mrg region_model *out_model, 4196 1.1.1.2 mrg const extrinsic_state *ext_state, 4197 1.1.1.2 mrg const program_state *state_a, 4198 1.1.1.2 mrg const program_state *state_b) const 4199 1.1 mrg { 4200 1.1.1.2 mrg gcc_assert (out_model); 4201 1.1.1.2 mrg gcc_assert (m_mgr == other_model.m_mgr); 4202 1.1.1.2 mrg gcc_assert (m_mgr == out_model->m_mgr); 4203 1.1.1.2 mrg 4204 1.1.1.2 mrg if (m_current_frame != other_model.m_current_frame) 4205 1.1.1.2 mrg return false; 4206 1.1.1.2 mrg out_model->m_current_frame = m_current_frame; 4207 1.1 mrg 4208 1.1.1.2 mrg model_merger m (this, &other_model, point, out_model, 4209 1.1.1.2 mrg ext_state, state_a, state_b); 4210 1.1 mrg 4211 1.1.1.2 mrg if (!store::can_merge_p (&m_store, &other_model.m_store, 4212 1.1.1.2 mrg &out_model->m_store, m_mgr->get_store_manager (), 4213 1.1.1.2 mrg &m)) 4214 1.1.1.2 mrg return false; 4215 1.1 mrg 4216 1.1.1.2 mrg if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents, 4217 1.1.1.2 mrg &out_model->m_dynamic_extents)) 4218 1.1.1.2 mrg return false; 4219 1.1.1.2 mrg 4220 1.1.1.2 mrg /* Merge constraints. */ 4221 1.1.1.2 mrg constraint_manager::merge (*m_constraints, 4222 1.1.1.2 mrg *other_model.m_constraints, 4223 1.1.1.2 mrg out_model->m_constraints); 4224 1.1 mrg 4225 1.1.1.2 mrg return true; 4226 1.1 mrg } 4227 1.1 mrg 4228 1.1 mrg /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE 4229 1.1 mrg otherwise. */ 4230 1.1 mrg 4231 1.1 mrg tree 4232 1.1 mrg region_model::get_fndecl_for_call (const gcall *call, 4233 1.1 mrg region_model_context *ctxt) 4234 1.1 mrg { 4235 1.1 mrg tree fn_ptr = gimple_call_fn (call); 4236 1.1 mrg if (fn_ptr == NULL_TREE) 4237 1.1 mrg return NULL_TREE; 4238 1.1.1.2 mrg const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt); 4239 1.1.1.2 mrg if (const region_svalue *fn_ptr_ptr 4240 1.1.1.2 mrg = fn_ptr_sval->dyn_cast_region_svalue ()) 4241 1.1.1.2 mrg { 4242 1.1.1.2 mrg const region *reg = fn_ptr_ptr->get_pointee (); 4243 1.1.1.2 mrg if (const function_region *fn_reg = reg->dyn_cast_function_region ()) 4244 1.1 mrg { 4245 1.1.1.2 mrg tree fn_decl = fn_reg->get_fndecl (); 4246 1.1 mrg cgraph_node *node = cgraph_node::get (fn_decl); 4247 1.1 mrg if (!node) 4248 1.1 mrg return NULL_TREE; 4249 1.1 mrg const cgraph_node *ultimate_node = node->ultimate_alias_target (); 4250 1.1 mrg if (ultimate_node) 4251 1.1 mrg return ultimate_node->decl; 4252 1.1 mrg } 4253 1.1 mrg } 4254 1.1 mrg 4255 1.1 mrg return NULL_TREE; 4256 1.1 mrg } 4257 1.1 mrg 4258 1.1.1.2 mrg /* Would be much simpler to use a lambda here, if it were supported. */ 4259 1.1 mrg 4260 1.1.1.2 mrg struct append_regions_cb_data 4261 1.1 mrg { 4262 1.1.1.2 mrg const region_model *model; 4263 1.1.1.2 mrg auto_vec<const decl_region *> *out; 4264 1.1.1.2 mrg }; 4265 1.1 mrg 4266 1.1.1.2 mrg /* Populate *OUT with all decl_regions in the current 4267 1.1.1.2 mrg frame that have clusters within the store. */ 4268 1.1 mrg 4269 1.1.1.2 mrg void 4270 1.1.1.2 mrg region_model:: 4271 1.1.1.2 mrg get_regions_for_current_frame (auto_vec<const decl_region *> *out) const 4272 1.1.1.2 mrg { 4273 1.1.1.2 mrg append_regions_cb_data data; 4274 1.1.1.2 mrg data.model = this; 4275 1.1.1.2 mrg data.out = out; 4276 1.1.1.2 mrg m_store.for_each_cluster (append_regions_cb, &data); 4277 1.1 mrg } 4278 1.1 mrg 4279 1.1.1.2 mrg /* Implementation detail of get_regions_for_current_frame. */ 4280 1.1 mrg 4281 1.1 mrg void 4282 1.1.1.2 mrg region_model::append_regions_cb (const region *base_reg, 4283 1.1.1.2 mrg append_regions_cb_data *cb_data) 4284 1.1 mrg { 4285 1.1.1.2 mrg if (base_reg->get_parent_region () != cb_data->model->m_current_frame) 4286 1.1.1.2 mrg return; 4287 1.1.1.2 mrg if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ()) 4288 1.1.1.2 mrg cb_data->out->safe_push (decl_reg); 4289 1.1 mrg } 4290 1.1 mrg 4291 1.1.1.2 mrg /* Return a new region describing a heap-allocated block of memory. 4292 1.1.1.2 mrg Use CTXT to complain about tainted sizes. */ 4293 1.1 mrg 4294 1.1.1.2 mrg const region * 4295 1.1.1.2 mrg region_model::create_region_for_heap_alloc (const svalue *size_in_bytes, 4296 1.1.1.2 mrg region_model_context *ctxt) 4297 1.1 mrg { 4298 1.1.1.2 mrg const region *reg = m_mgr->create_region_for_heap_alloc (); 4299 1.1.1.2 mrg if (compat_types_p (size_in_bytes->get_type (), size_type_node)) 4300 1.1.1.2 mrg set_dynamic_extents (reg, size_in_bytes, ctxt); 4301 1.1.1.2 mrg return reg; 4302 1.1 mrg } 4303 1.1 mrg 4304 1.1.1.2 mrg /* Return a new region describing a block of memory allocated within the 4305 1.1.1.2 mrg current frame. 4306 1.1.1.2 mrg Use CTXT to complain about tainted sizes. */ 4307 1.1 mrg 4308 1.1.1.2 mrg const region * 4309 1.1.1.2 mrg region_model::create_region_for_alloca (const svalue *size_in_bytes, 4310 1.1.1.2 mrg region_model_context *ctxt) 4311 1.1.1.2 mrg { 4312 1.1.1.2 mrg const region *reg = m_mgr->create_region_for_alloca (m_current_frame); 4313 1.1.1.2 mrg if (compat_types_p (size_in_bytes->get_type (), size_type_node)) 4314 1.1.1.2 mrg set_dynamic_extents (reg, size_in_bytes, ctxt); 4315 1.1.1.2 mrg return reg; 4316 1.1.1.2 mrg } 4317 1.1 mrg 4318 1.1.1.2 mrg /* Record that the size of REG is SIZE_IN_BYTES. 4319 1.1.1.2 mrg Use CTXT to complain about tainted sizes. */ 4320 1.1 mrg 4321 1.1.1.2 mrg void 4322 1.1.1.2 mrg region_model::set_dynamic_extents (const region *reg, 4323 1.1.1.2 mrg const svalue *size_in_bytes, 4324 1.1.1.2 mrg region_model_context *ctxt) 4325 1.1.1.2 mrg { 4326 1.1.1.2 mrg assert_compat_types (size_in_bytes->get_type (), size_type_node); 4327 1.1.1.2 mrg if (ctxt) 4328 1.1.1.2 mrg check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes, 4329 1.1.1.2 mrg ctxt); 4330 1.1.1.2 mrg m_dynamic_extents.put (reg, size_in_bytes); 4331 1.1.1.2 mrg } 4332 1.1 mrg 4333 1.1.1.2 mrg /* Get the recording of REG in bytes, or NULL if no dynamic size was 4334 1.1.1.2 mrg recorded. */ 4335 1.1 mrg 4336 1.1.1.2 mrg const svalue * 4337 1.1.1.2 mrg region_model::get_dynamic_extents (const region *reg) const 4338 1.1.1.2 mrg { 4339 1.1.1.2 mrg if (const svalue * const *slot = m_dynamic_extents.get (reg)) 4340 1.1.1.2 mrg return *slot; 4341 1.1.1.2 mrg return NULL; 4342 1.1 mrg } 4343 1.1 mrg 4344 1.1.1.2 mrg /* Unset any recorded dynamic size of REG. */ 4345 1.1 mrg 4346 1.1 mrg void 4347 1.1.1.2 mrg region_model::unset_dynamic_extents (const region *reg) 4348 1.1 mrg { 4349 1.1.1.2 mrg m_dynamic_extents.remove (reg); 4350 1.1 mrg } 4351 1.1 mrg 4352 1.1.1.2 mrg /* class noop_region_model_context : public region_model_context. */ 4353 1.1 mrg 4354 1.1 mrg void 4355 1.1.1.2 mrg noop_region_model_context::add_note (pending_note *pn) 4356 1.1 mrg { 4357 1.1.1.2 mrg delete pn; 4358 1.1 mrg } 4359 1.1 mrg 4360 1.1.1.2 mrg void 4361 1.1.1.2 mrg noop_region_model_context::bifurcate (custom_edge_info *info) 4362 1.1.1.2 mrg { 4363 1.1.1.2 mrg delete info; 4364 1.1.1.2 mrg } 4365 1.1 mrg 4366 1.1.1.2 mrg void 4367 1.1.1.2 mrg noop_region_model_context::terminate_path () 4368 1.1 mrg { 4369 1.1 mrg } 4370 1.1 mrg 4371 1.1.1.2 mrg /* struct model_merger. */ 4372 1.1.1.2 mrg 4373 1.1.1.2 mrg /* Dump a multiline representation of this merger to PP. */ 4374 1.1 mrg 4375 1.1 mrg void 4376 1.1.1.2 mrg model_merger::dump_to_pp (pretty_printer *pp, bool simple) const 4377 1.1 mrg { 4378 1.1.1.2 mrg pp_string (pp, "model A:"); 4379 1.1.1.2 mrg pp_newline (pp); 4380 1.1.1.2 mrg m_model_a->dump_to_pp (pp, simple, true); 4381 1.1 mrg pp_newline (pp); 4382 1.1.1.2 mrg 4383 1.1.1.2 mrg pp_string (pp, "model B:"); 4384 1.1.1.2 mrg pp_newline (pp); 4385 1.1.1.2 mrg m_model_b->dump_to_pp (pp, simple, true); 4386 1.1 mrg pp_newline (pp); 4387 1.1 mrg 4388 1.1.1.2 mrg pp_string (pp, "merged model:"); 4389 1.1 mrg pp_newline (pp); 4390 1.1.1.2 mrg m_merged_model->dump_to_pp (pp, simple, true); 4391 1.1 mrg pp_newline (pp); 4392 1.1 mrg } 4393 1.1 mrg 4394 1.1.1.2 mrg /* Dump a multiline representation of this merger to FILE. */ 4395 1.1 mrg 4396 1.1 mrg void 4397 1.1.1.2 mrg model_merger::dump (FILE *fp, bool simple) const 4398 1.1 mrg { 4399 1.1 mrg pretty_printer pp; 4400 1.1 mrg pp_format_decoder (&pp) = default_tree_printer; 4401 1.1 mrg pp_show_color (&pp) = pp_show_color (global_dc->printer); 4402 1.1 mrg pp.buffer->stream = fp; 4403 1.1.1.2 mrg dump_to_pp (&pp, simple); 4404 1.1 mrg pp_flush (&pp); 4405 1.1 mrg } 4406 1.1 mrg 4407 1.1.1.2 mrg /* Dump a multiline representation of this merger to stderr. */ 4408 1.1 mrg 4409 1.1 mrg DEBUG_FUNCTION void 4410 1.1.1.2 mrg model_merger::dump (bool simple) const 4411 1.1 mrg { 4412 1.1.1.2 mrg dump (stderr, simple); 4413 1.1 mrg } 4414 1.1 mrg 4415 1.1.1.2 mrg /* Return true if it's OK to merge SVAL with other svalues. */ 4416 1.1 mrg 4417 1.1.1.2 mrg bool 4418 1.1.1.2 mrg model_merger::mergeable_svalue_p (const svalue *sval) const 4419 1.1 mrg { 4420 1.1.1.2 mrg if (m_ext_state) 4421 1.1 mrg { 4422 1.1.1.2 mrg /* Reject merging svalues that have non-purgable sm-state, 4423 1.1.1.2 mrg to avoid falsely reporting memory leaks by merging them 4424 1.1.1.2 mrg with something else. For example, given a local var "p", 4425 1.1.1.2 mrg reject the merger of a: 4426 1.1.1.2 mrg store_a mapping "p" to a malloc-ed ptr 4427 1.1.1.2 mrg with: 4428 1.1.1.2 mrg store_b mapping "p" to a NULL ptr. */ 4429 1.1.1.2 mrg if (m_state_a) 4430 1.1.1.2 mrg if (!m_state_a->can_purge_p (*m_ext_state, sval)) 4431 1.1.1.2 mrg return false; 4432 1.1.1.2 mrg if (m_state_b) 4433 1.1.1.2 mrg if (!m_state_b->can_purge_p (*m_ext_state, sval)) 4434 1.1.1.2 mrg return false; 4435 1.1 mrg } 4436 1.1.1.2 mrg return true; 4437 1.1 mrg } 4438 1.1 mrg 4439 1.1.1.2 mrg } // namespace ana 4440 1.1 mrg 4441 1.1.1.2 mrg /* Dump RMODEL fully to stderr (i.e. without summarization). */ 4442 1.1 mrg 4443 1.1.1.2 mrg DEBUG_FUNCTION void 4444 1.1.1.2 mrg debug (const region_model &rmodel) 4445 1.1 mrg { 4446 1.1.1.2 mrg rmodel.dump (false); 4447 1.1 mrg } 4448 1.1 mrg 4449 1.1.1.2 mrg /* class rejected_op_constraint : public rejected_constraint. */ 4450 1.1 mrg 4451 1.1 mrg void 4452 1.1.1.2 mrg rejected_op_constraint::dump_to_pp (pretty_printer *pp) const 4453 1.1 mrg { 4454 1.1.1.2 mrg region_model m (m_model); 4455 1.1.1.2 mrg const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL); 4456 1.1.1.2 mrg const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL); 4457 1.1.1.2 mrg lhs_sval->dump_to_pp (pp, true); 4458 1.1.1.2 mrg pp_printf (pp, " %s ", op_symbol_code (m_op)); 4459 1.1.1.2 mrg rhs_sval->dump_to_pp (pp, true); 4460 1.1 mrg } 4461 1.1 mrg 4462 1.1.1.2 mrg /* class rejected_ranges_constraint : public rejected_constraint. */ 4463 1.1 mrg 4464 1.1.1.2 mrg void 4465 1.1.1.2 mrg rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const 4466 1.1 mrg { 4467 1.1.1.2 mrg region_model m (m_model); 4468 1.1.1.2 mrg const svalue *sval = m.get_rvalue (m_expr, NULL); 4469 1.1.1.2 mrg sval->dump_to_pp (pp, true); 4470 1.1.1.2 mrg pp_string (pp, " in "); 4471 1.1.1.2 mrg m_ranges->dump_to_pp (pp, true); 4472 1.1 mrg } 4473 1.1 mrg 4474 1.1.1.2 mrg /* class engine. */ 4475 1.1 mrg 4476 1.1.1.2 mrg /* engine's ctor. */ 4477 1.1 mrg 4478 1.1.1.2 mrg engine::engine (const supergraph *sg, logger *logger) 4479 1.1.1.2 mrg : m_sg (sg), m_mgr (logger) 4480 1.1 mrg { 4481 1.1 mrg } 4482 1.1 mrg 4483 1.1.1.2 mrg /* Dump the managed objects by class to LOGGER, and the per-class totals. */ 4484 1.1 mrg 4485 1.1 mrg void 4486 1.1.1.2 mrg engine::log_stats (logger *logger) const 4487 1.1 mrg { 4488 1.1.1.2 mrg m_mgr.log_stats (logger, true); 4489 1.1 mrg } 4490 1.1 mrg 4491 1.1 mrg namespace ana { 4492 1.1 mrg 4493 1.1 mrg #if CHECKING_P 4494 1.1 mrg 4495 1.1 mrg namespace selftest { 4496 1.1 mrg 4497 1.1 mrg /* Build a constant tree of the given type from STR. */ 4498 1.1 mrg 4499 1.1 mrg static tree 4500 1.1 mrg build_real_cst_from_string (tree type, const char *str) 4501 1.1 mrg { 4502 1.1 mrg REAL_VALUE_TYPE real; 4503 1.1 mrg real_from_string (&real, str); 4504 1.1 mrg return build_real (type, real); 4505 1.1 mrg } 4506 1.1 mrg 4507 1.1 mrg /* Append various "interesting" constants to OUT (e.g. NaN). */ 4508 1.1 mrg 4509 1.1 mrg static void 4510 1.1 mrg append_interesting_constants (auto_vec<tree> *out) 4511 1.1 mrg { 4512 1.1 mrg out->safe_push (build_int_cst (integer_type_node, 0)); 4513 1.1 mrg out->safe_push (build_int_cst (integer_type_node, 42)); 4514 1.1 mrg out->safe_push (build_int_cst (unsigned_type_node, 0)); 4515 1.1 mrg out->safe_push (build_int_cst (unsigned_type_node, 42)); 4516 1.1 mrg out->safe_push (build_real_cst_from_string (float_type_node, "QNaN")); 4517 1.1 mrg out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN")); 4518 1.1 mrg out->safe_push (build_real_cst_from_string (float_type_node, "SNaN")); 4519 1.1 mrg out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN")); 4520 1.1 mrg out->safe_push (build_real_cst_from_string (float_type_node, "0.0")); 4521 1.1 mrg out->safe_push (build_real_cst_from_string (float_type_node, "-0.0")); 4522 1.1 mrg out->safe_push (build_real_cst_from_string (float_type_node, "Inf")); 4523 1.1 mrg out->safe_push (build_real_cst_from_string (float_type_node, "-Inf")); 4524 1.1 mrg } 4525 1.1 mrg 4526 1.1 mrg /* Verify that tree_cmp is a well-behaved comparator for qsort, even 4527 1.1 mrg if the underlying constants aren't comparable. */ 4528 1.1 mrg 4529 1.1 mrg static void 4530 1.1 mrg test_tree_cmp_on_constants () 4531 1.1 mrg { 4532 1.1 mrg auto_vec<tree> csts; 4533 1.1 mrg append_interesting_constants (&csts); 4534 1.1 mrg 4535 1.1 mrg /* Try sorting every triple. */ 4536 1.1 mrg const unsigned num = csts.length (); 4537 1.1 mrg for (unsigned i = 0; i < num; i++) 4538 1.1 mrg for (unsigned j = 0; j < num; j++) 4539 1.1 mrg for (unsigned k = 0; k < num; k++) 4540 1.1 mrg { 4541 1.1 mrg auto_vec<tree> v (3); 4542 1.1 mrg v.quick_push (csts[i]); 4543 1.1 mrg v.quick_push (csts[j]); 4544 1.1 mrg v.quick_push (csts[k]); 4545 1.1 mrg v.qsort (tree_cmp); 4546 1.1 mrg } 4547 1.1 mrg } 4548 1.1 mrg 4549 1.1 mrg /* Implementation detail of the ASSERT_CONDITION_* macros. */ 4550 1.1 mrg 4551 1.1 mrg void 4552 1.1 mrg assert_condition (const location &loc, 4553 1.1 mrg region_model &model, 4554 1.1.1.2 mrg const svalue *lhs, tree_code op, const svalue *rhs, 4555 1.1.1.2 mrg tristate expected) 4556 1.1.1.2 mrg { 4557 1.1.1.2 mrg tristate actual = model.eval_condition (lhs, op, rhs); 4558 1.1.1.2 mrg ASSERT_EQ_AT (loc, actual, expected); 4559 1.1.1.2 mrg } 4560 1.1.1.2 mrg 4561 1.1.1.2 mrg /* Implementation detail of the ASSERT_CONDITION_* macros. */ 4562 1.1.1.2 mrg 4563 1.1.1.2 mrg void 4564 1.1.1.2 mrg assert_condition (const location &loc, 4565 1.1.1.2 mrg region_model &model, 4566 1.1 mrg tree lhs, tree_code op, tree rhs, 4567 1.1 mrg tristate expected) 4568 1.1 mrg { 4569 1.1 mrg tristate actual = model.eval_condition (lhs, op, rhs, NULL); 4570 1.1 mrg ASSERT_EQ_AT (loc, actual, expected); 4571 1.1 mrg } 4572 1.1 mrg 4573 1.1 mrg /* Implementation detail of ASSERT_DUMP_TREE_EQ. */ 4574 1.1 mrg 4575 1.1 mrg static void 4576 1.1 mrg assert_dump_tree_eq (const location &loc, tree t, const char *expected) 4577 1.1 mrg { 4578 1.1 mrg auto_fix_quotes sentinel; 4579 1.1 mrg pretty_printer pp; 4580 1.1 mrg pp_format_decoder (&pp) = default_tree_printer; 4581 1.1 mrg dump_tree (&pp, t); 4582 1.1 mrg ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected); 4583 1.1 mrg } 4584 1.1 mrg 4585 1.1 mrg /* Assert that dump_tree (T) is EXPECTED. */ 4586 1.1 mrg 4587 1.1 mrg #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \ 4588 1.1 mrg SELFTEST_BEGIN_STMT \ 4589 1.1 mrg assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \ 4590 1.1 mrg SELFTEST_END_STMT 4591 1.1 mrg 4592 1.1 mrg /* Implementation detail of ASSERT_DUMP_EQ. */ 4593 1.1 mrg 4594 1.1 mrg static void 4595 1.1 mrg assert_dump_eq (const location &loc, 4596 1.1 mrg const region_model &model, 4597 1.1 mrg bool summarize, 4598 1.1 mrg const char *expected) 4599 1.1 mrg { 4600 1.1 mrg auto_fix_quotes sentinel; 4601 1.1 mrg pretty_printer pp; 4602 1.1 mrg pp_format_decoder (&pp) = default_tree_printer; 4603 1.1.1.2 mrg 4604 1.1.1.2 mrg model.dump_to_pp (&pp, summarize, true); 4605 1.1 mrg ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected); 4606 1.1 mrg } 4607 1.1 mrg 4608 1.1 mrg /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */ 4609 1.1 mrg 4610 1.1 mrg #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \ 4611 1.1 mrg SELFTEST_BEGIN_STMT \ 4612 1.1 mrg assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \ 4613 1.1 mrg SELFTEST_END_STMT 4614 1.1 mrg 4615 1.1 mrg /* Smoketest for region_model::dump_to_pp. */ 4616 1.1 mrg 4617 1.1 mrg static void 4618 1.1 mrg test_dump () 4619 1.1 mrg { 4620 1.1.1.2 mrg region_model_manager mgr; 4621 1.1.1.2 mrg region_model model (&mgr); 4622 1.1 mrg 4623 1.1 mrg ASSERT_DUMP_EQ (model, false, 4624 1.1.1.2 mrg "stack depth: 0\n" 4625 1.1.1.2 mrg "m_called_unknown_fn: FALSE\n" 4626 1.1.1.2 mrg "constraint_manager:\n" 4627 1.1.1.2 mrg " equiv classes:\n" 4628 1.1.1.2 mrg " constraints:\n"); 4629 1.1.1.2 mrg ASSERT_DUMP_EQ (model, true, 4630 1.1.1.2 mrg "stack depth: 0\n" 4631 1.1.1.2 mrg "m_called_unknown_fn: FALSE\n" 4632 1.1.1.2 mrg "constraint_manager:\n" 4633 1.1 mrg " equiv classes:\n" 4634 1.1 mrg " constraints:\n"); 4635 1.1 mrg } 4636 1.1 mrg 4637 1.1 mrg /* Helper function for selftests. Create a struct or union type named NAME, 4638 1.1 mrg with the fields given by the FIELD_DECLS in FIELDS. 4639 1.1 mrg If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise 4640 1.1 mrg create a UNION_TYPE. */ 4641 1.1 mrg 4642 1.1 mrg static tree 4643 1.1 mrg make_test_compound_type (const char *name, bool is_struct, 4644 1.1 mrg const auto_vec<tree> *fields) 4645 1.1 mrg { 4646 1.1 mrg tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE); 4647 1.1 mrg TYPE_NAME (t) = get_identifier (name); 4648 1.1 mrg TYPE_SIZE (t) = 0; 4649 1.1 mrg 4650 1.1 mrg tree fieldlist = NULL; 4651 1.1 mrg int i; 4652 1.1 mrg tree field; 4653 1.1 mrg FOR_EACH_VEC_ELT (*fields, i, field) 4654 1.1 mrg { 4655 1.1 mrg gcc_assert (TREE_CODE (field) == FIELD_DECL); 4656 1.1 mrg DECL_CONTEXT (field) = t; 4657 1.1 mrg fieldlist = chainon (field, fieldlist); 4658 1.1 mrg } 4659 1.1 mrg fieldlist = nreverse (fieldlist); 4660 1.1 mrg TYPE_FIELDS (t) = fieldlist; 4661 1.1 mrg 4662 1.1 mrg layout_type (t); 4663 1.1 mrg return t; 4664 1.1 mrg } 4665 1.1 mrg 4666 1.1 mrg /* Selftest fixture for creating the type "struct coord {int x; int y; };". */ 4667 1.1 mrg 4668 1.1 mrg struct coord_test 4669 1.1 mrg { 4670 1.1 mrg coord_test () 4671 1.1 mrg { 4672 1.1 mrg auto_vec<tree> fields; 4673 1.1 mrg m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, 4674 1.1 mrg get_identifier ("x"), integer_type_node); 4675 1.1 mrg fields.safe_push (m_x_field); 4676 1.1 mrg m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, 4677 1.1 mrg get_identifier ("y"), integer_type_node); 4678 1.1 mrg fields.safe_push (m_y_field); 4679 1.1 mrg m_coord_type = make_test_compound_type ("coord", true, &fields); 4680 1.1 mrg } 4681 1.1 mrg 4682 1.1 mrg tree m_x_field; 4683 1.1 mrg tree m_y_field; 4684 1.1 mrg tree m_coord_type; 4685 1.1 mrg }; 4686 1.1 mrg 4687 1.1.1.2 mrg /* Verify usage of a struct. */ 4688 1.1 mrg 4689 1.1 mrg static void 4690 1.1.1.2 mrg test_struct () 4691 1.1 mrg { 4692 1.1 mrg coord_test ct; 4693 1.1 mrg 4694 1.1 mrg tree c = build_global_decl ("c", ct.m_coord_type); 4695 1.1 mrg tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field), 4696 1.1 mrg c, ct.m_x_field, NULL_TREE); 4697 1.1 mrg tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field), 4698 1.1 mrg c, ct.m_y_field, NULL_TREE); 4699 1.1 mrg 4700 1.1 mrg tree int_17 = build_int_cst (integer_type_node, 17); 4701 1.1 mrg tree int_m3 = build_int_cst (integer_type_node, -3); 4702 1.1 mrg 4703 1.1.1.2 mrg region_model_manager mgr; 4704 1.1.1.2 mrg region_model model (&mgr); 4705 1.1 mrg model.set_value (c_x, int_17, NULL); 4706 1.1 mrg model.set_value (c_y, int_m3, NULL); 4707 1.1 mrg 4708 1.1.1.2 mrg /* Verify get_offset for "c.x". */ 4709 1.1.1.2 mrg { 4710 1.1.1.2 mrg const region *c_x_reg = model.get_lvalue (c_x, NULL); 4711 1.1.1.2 mrg region_offset offset = c_x_reg->get_offset (); 4712 1.1.1.2 mrg ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL)); 4713 1.1.1.2 mrg ASSERT_EQ (offset.get_bit_offset (), 0); 4714 1.1.1.2 mrg } 4715 1.1 mrg 4716 1.1.1.2 mrg /* Verify get_offset for "c.y". */ 4717 1.1.1.2 mrg { 4718 1.1.1.2 mrg const region *c_y_reg = model.get_lvalue (c_y, NULL); 4719 1.1.1.2 mrg region_offset offset = c_y_reg->get_offset (); 4720 1.1.1.2 mrg ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL)); 4721 1.1.1.2 mrg ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE); 4722 1.1.1.2 mrg } 4723 1.1 mrg } 4724 1.1 mrg 4725 1.1.1.2 mrg /* Verify usage of an array element. */ 4726 1.1 mrg 4727 1.1 mrg static void 4728 1.1.1.2 mrg test_array_1 () 4729 1.1 mrg { 4730 1.1 mrg tree tlen = size_int (10); 4731 1.1 mrg tree arr_type = build_array_type (char_type_node, build_index_type (tlen)); 4732 1.1 mrg 4733 1.1 mrg tree a = build_global_decl ("a", arr_type); 4734 1.1 mrg 4735 1.1.1.2 mrg region_model_manager mgr; 4736 1.1.1.2 mrg region_model model (&mgr); 4737 1.1 mrg tree int_0 = build_int_cst (integer_type_node, 0); 4738 1.1 mrg tree a_0 = build4 (ARRAY_REF, char_type_node, 4739 1.1 mrg a, int_0, NULL_TREE, NULL_TREE); 4740 1.1 mrg tree char_A = build_int_cst (char_type_node, 'A'); 4741 1.1 mrg model.set_value (a_0, char_A, NULL); 4742 1.1 mrg } 4743 1.1 mrg 4744 1.1 mrg /* Verify that region_model::get_representative_tree works as expected. */ 4745 1.1 mrg 4746 1.1 mrg static void 4747 1.1 mrg test_get_representative_tree () 4748 1.1 mrg { 4749 1.1.1.2 mrg region_model_manager mgr; 4750 1.1.1.2 mrg 4751 1.1 mrg /* STRING_CST. */ 4752 1.1 mrg { 4753 1.1 mrg tree string_cst = build_string (4, "foo"); 4754 1.1.1.2 mrg region_model m (&mgr); 4755 1.1.1.2 mrg const svalue *str_sval = m.get_rvalue (string_cst, NULL); 4756 1.1.1.2 mrg tree rep = m.get_representative_tree (str_sval); 4757 1.1 mrg ASSERT_EQ (rep, string_cst); 4758 1.1 mrg } 4759 1.1 mrg 4760 1.1.1.2 mrg /* String literal. */ 4761 1.1.1.2 mrg { 4762 1.1.1.2 mrg tree string_cst_ptr = build_string_literal (4, "foo"); 4763 1.1.1.2 mrg region_model m (&mgr); 4764 1.1.1.2 mrg const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL); 4765 1.1.1.2 mrg tree rep = m.get_representative_tree (str_sval); 4766 1.1.1.2 mrg ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]"); 4767 1.1.1.2 mrg } 4768 1.1.1.2 mrg 4769 1.1.1.2 mrg /* Value of an element within an array. */ 4770 1.1.1.2 mrg { 4771 1.1.1.2 mrg tree tlen = size_int (10); 4772 1.1.1.2 mrg tree arr_type = build_array_type (char_type_node, build_index_type (tlen)); 4773 1.1.1.2 mrg tree a = build_global_decl ("a", arr_type); 4774 1.1.1.2 mrg placeholder_svalue test_sval (char_type_node, "test value"); 4775 1.1.1.2 mrg 4776 1.1.1.2 mrg /* Value of a[3]. */ 4777 1.1.1.2 mrg { 4778 1.1.1.2 mrg test_region_model_context ctxt; 4779 1.1.1.2 mrg region_model model (&mgr); 4780 1.1.1.2 mrg tree int_3 = build_int_cst (integer_type_node, 3); 4781 1.1.1.2 mrg tree a_3 = build4 (ARRAY_REF, char_type_node, 4782 1.1.1.2 mrg a, int_3, NULL_TREE, NULL_TREE); 4783 1.1.1.2 mrg const region *a_3_reg = model.get_lvalue (a_3, &ctxt); 4784 1.1.1.2 mrg model.set_value (a_3_reg, &test_sval, &ctxt); 4785 1.1.1.2 mrg tree rep = model.get_representative_tree (&test_sval); 4786 1.1.1.2 mrg ASSERT_DUMP_TREE_EQ (rep, "a[3]"); 4787 1.1.1.2 mrg } 4788 1.1.1.2 mrg 4789 1.1.1.2 mrg /* Value of a[0]. */ 4790 1.1.1.2 mrg { 4791 1.1.1.2 mrg test_region_model_context ctxt; 4792 1.1.1.2 mrg region_model model (&mgr); 4793 1.1.1.2 mrg tree idx = build_int_cst (integer_type_node, 0); 4794 1.1.1.2 mrg tree a_0 = build4 (ARRAY_REF, char_type_node, 4795 1.1.1.2 mrg a, idx, NULL_TREE, NULL_TREE); 4796 1.1.1.2 mrg const region *a_0_reg = model.get_lvalue (a_0, &ctxt); 4797 1.1.1.2 mrg model.set_value (a_0_reg, &test_sval, &ctxt); 4798 1.1.1.2 mrg tree rep = model.get_representative_tree (&test_sval); 4799 1.1.1.2 mrg ASSERT_DUMP_TREE_EQ (rep, "a[0]"); 4800 1.1.1.2 mrg } 4801 1.1.1.2 mrg } 4802 1.1.1.2 mrg 4803 1.1.1.2 mrg /* Value of a field within a struct. */ 4804 1.1 mrg { 4805 1.1.1.2 mrg coord_test ct; 4806 1.1.1.2 mrg 4807 1.1.1.2 mrg tree c = build_global_decl ("c", ct.m_coord_type); 4808 1.1.1.2 mrg tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field), 4809 1.1.1.2 mrg c, ct.m_x_field, NULL_TREE); 4810 1.1.1.2 mrg tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field), 4811 1.1.1.2 mrg c, ct.m_y_field, NULL_TREE); 4812 1.1.1.2 mrg 4813 1.1.1.2 mrg test_region_model_context ctxt; 4814 1.1.1.2 mrg 4815 1.1.1.2 mrg /* Value of initial field. */ 4816 1.1.1.2 mrg { 4817 1.1.1.2 mrg region_model m (&mgr); 4818 1.1.1.2 mrg const region *c_x_reg = m.get_lvalue (c_x, &ctxt); 4819 1.1.1.2 mrg placeholder_svalue test_sval_x (integer_type_node, "test x val"); 4820 1.1.1.2 mrg m.set_value (c_x_reg, &test_sval_x, &ctxt); 4821 1.1.1.2 mrg tree rep = m.get_representative_tree (&test_sval_x); 4822 1.1.1.2 mrg ASSERT_DUMP_TREE_EQ (rep, "c.x"); 4823 1.1.1.2 mrg } 4824 1.1.1.2 mrg 4825 1.1.1.2 mrg /* Value of non-initial field. */ 4826 1.1.1.2 mrg { 4827 1.1.1.2 mrg region_model m (&mgr); 4828 1.1.1.2 mrg const region *c_y_reg = m.get_lvalue (c_y, &ctxt); 4829 1.1.1.2 mrg placeholder_svalue test_sval_y (integer_type_node, "test y val"); 4830 1.1.1.2 mrg m.set_value (c_y_reg, &test_sval_y, &ctxt); 4831 1.1.1.2 mrg tree rep = m.get_representative_tree (&test_sval_y); 4832 1.1.1.2 mrg ASSERT_DUMP_TREE_EQ (rep, "c.y"); 4833 1.1.1.2 mrg } 4834 1.1 mrg } 4835 1.1 mrg } 4836 1.1 mrg 4837 1.1 mrg /* Verify that calling region_model::get_rvalue repeatedly on the same 4838 1.1.1.2 mrg tree constant retrieves the same svalue *. */ 4839 1.1 mrg 4840 1.1 mrg static void 4841 1.1 mrg test_unique_constants () 4842 1.1 mrg { 4843 1.1 mrg tree int_0 = build_int_cst (integer_type_node, 0); 4844 1.1 mrg tree int_42 = build_int_cst (integer_type_node, 42); 4845 1.1 mrg 4846 1.1 mrg test_region_model_context ctxt; 4847 1.1.1.2 mrg region_model_manager mgr; 4848 1.1.1.2 mrg region_model model (&mgr); 4849 1.1 mrg ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt)); 4850 1.1 mrg ASSERT_EQ (model.get_rvalue (int_42, &ctxt), 4851 1.1 mrg model.get_rvalue (int_42, &ctxt)); 4852 1.1 mrg ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt)); 4853 1.1 mrg ASSERT_EQ (ctxt.get_num_diagnostics (), 0); 4854 1.1.1.2 mrg 4855 1.1.1.2 mrg /* A "(const int)42" will be a different tree from "(int)42)"... */ 4856 1.1.1.2 mrg tree const_int_type_node 4857 1.1.1.2 mrg = build_qualified_type (integer_type_node, TYPE_QUAL_CONST); 4858 1.1.1.2 mrg tree const_int_42 = build_int_cst (const_int_type_node, 42); 4859 1.1.1.2 mrg ASSERT_NE (int_42, const_int_42); 4860 1.1.1.2 mrg /* It should have a different const_svalue. */ 4861 1.1.1.2 mrg const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt); 4862 1.1.1.2 mrg const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt); 4863 1.1.1.2 mrg ASSERT_NE (int_42_sval, const_int_42_sval); 4864 1.1.1.2 mrg /* But they should compare as equal. */ 4865 1.1.1.2 mrg ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval); 4866 1.1.1.2 mrg ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval); 4867 1.1 mrg } 4868 1.1 mrg 4869 1.1.1.2 mrg /* Verify that each type gets its own singleton unknown_svalue within a 4870 1.1.1.2 mrg region_model_manager, and that NULL_TREE gets its own singleton. */ 4871 1.1 mrg 4872 1.1 mrg static void 4873 1.1.1.2 mrg test_unique_unknowns () 4874 1.1 mrg { 4875 1.1.1.2 mrg region_model_manager mgr; 4876 1.1.1.2 mrg const svalue *unknown_int 4877 1.1.1.2 mrg = mgr.get_or_create_unknown_svalue (integer_type_node); 4878 1.1.1.2 mrg /* Repeated calls with the same type should get the same "unknown" 4879 1.1.1.2 mrg svalue. */ 4880 1.1.1.2 mrg const svalue *unknown_int_2 4881 1.1.1.2 mrg = mgr.get_or_create_unknown_svalue (integer_type_node); 4882 1.1.1.2 mrg ASSERT_EQ (unknown_int, unknown_int_2); 4883 1.1.1.2 mrg 4884 1.1.1.2 mrg /* Different types (or the NULL type) should have different 4885 1.1.1.2 mrg unknown_svalues. */ 4886 1.1.1.2 mrg const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL); 4887 1.1.1.2 mrg ASSERT_NE (unknown_NULL_type, unknown_int); 4888 1.1.1.2 mrg 4889 1.1.1.2 mrg /* Repeated calls with NULL for the type should get the same "unknown" 4890 1.1.1.2 mrg svalue. */ 4891 1.1.1.2 mrg const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL); 4892 1.1.1.2 mrg ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2); 4893 1.1 mrg } 4894 1.1 mrg 4895 1.1.1.2 mrg /* Verify that initial_svalue are handled as expected. */ 4896 1.1 mrg 4897 1.1 mrg static void 4898 1.1.1.2 mrg test_initial_svalue_folding () 4899 1.1 mrg { 4900 1.1.1.2 mrg region_model_manager mgr; 4901 1.1.1.2 mrg tree x = build_global_decl ("x", integer_type_node); 4902 1.1.1.2 mrg tree y = build_global_decl ("y", integer_type_node); 4903 1.1 mrg 4904 1.1.1.2 mrg test_region_model_context ctxt; 4905 1.1.1.2 mrg region_model model (&mgr); 4906 1.1.1.2 mrg const svalue *x_init = model.get_rvalue (x, &ctxt); 4907 1.1.1.2 mrg const svalue *y_init = model.get_rvalue (y, &ctxt); 4908 1.1.1.2 mrg ASSERT_NE (x_init, y_init); 4909 1.1.1.2 mrg const region *x_reg = model.get_lvalue (x, &ctxt); 4910 1.1.1.2 mrg ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg)); 4911 1.1 mrg 4912 1.1 mrg } 4913 1.1 mrg 4914 1.1.1.2 mrg /* Verify that unary ops are folded as expected. */ 4915 1.1 mrg 4916 1.1 mrg static void 4917 1.1.1.2 mrg test_unaryop_svalue_folding () 4918 1.1 mrg { 4919 1.1.1.2 mrg region_model_manager mgr; 4920 1.1 mrg tree x = build_global_decl ("x", integer_type_node); 4921 1.1 mrg tree y = build_global_decl ("y", integer_type_node); 4922 1.1 mrg 4923 1.1.1.2 mrg test_region_model_context ctxt; 4924 1.1.1.2 mrg region_model model (&mgr); 4925 1.1.1.2 mrg const svalue *x_init = model.get_rvalue (x, &ctxt); 4926 1.1.1.2 mrg const svalue *y_init = model.get_rvalue (y, &ctxt); 4927 1.1.1.2 mrg const region *x_reg = model.get_lvalue (x, &ctxt); 4928 1.1.1.2 mrg ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg)); 4929 1.1.1.2 mrg 4930 1.1.1.2 mrg /* "(int)x" -> "x". */ 4931 1.1.1.2 mrg ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init)); 4932 1.1.1.2 mrg 4933 1.1.1.2 mrg /* "(void *)x" -> something other than "x". */ 4934 1.1.1.2 mrg ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init)); 4935 1.1.1.2 mrg 4936 1.1.1.2 mrg /* "!(x == y)" -> "x != y". */ 4937 1.1.1.2 mrg ASSERT_EQ (mgr.get_or_create_unaryop 4938 1.1.1.2 mrg (boolean_type_node, TRUTH_NOT_EXPR, 4939 1.1.1.2 mrg mgr.get_or_create_binop (boolean_type_node, EQ_EXPR, 4940 1.1.1.2 mrg x_init, y_init)), 4941 1.1.1.2 mrg mgr.get_or_create_binop (boolean_type_node, NE_EXPR, 4942 1.1.1.2 mrg x_init, y_init)); 4943 1.1.1.2 mrg /* "!(x > y)" -> "x <= y". */ 4944 1.1.1.2 mrg ASSERT_EQ (mgr.get_or_create_unaryop 4945 1.1.1.2 mrg (boolean_type_node, TRUTH_NOT_EXPR, 4946 1.1.1.2 mrg mgr.get_or_create_binop (boolean_type_node, GT_EXPR, 4947 1.1.1.2 mrg x_init, y_init)), 4948 1.1.1.2 mrg mgr.get_or_create_binop (boolean_type_node, LE_EXPR, 4949 1.1.1.2 mrg x_init, y_init)); 4950 1.1.1.2 mrg } 4951 1.1 mrg 4952 1.1.1.2 mrg /* Verify that binops on constant svalues are folded. */ 4953 1.1 mrg 4954 1.1.1.2 mrg static void 4955 1.1.1.2 mrg test_binop_svalue_folding () 4956 1.1.1.2 mrg { 4957 1.1.1.2 mrg #define NUM_CSTS 10 4958 1.1.1.2 mrg tree cst_int[NUM_CSTS]; 4959 1.1.1.2 mrg region_model_manager mgr; 4960 1.1.1.2 mrg const svalue *cst_sval[NUM_CSTS]; 4961 1.1.1.2 mrg for (int i = 0; i < NUM_CSTS; i++) 4962 1.1.1.2 mrg { 4963 1.1.1.2 mrg cst_int[i] = build_int_cst (integer_type_node, i); 4964 1.1.1.2 mrg cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]); 4965 1.1.1.2 mrg ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT); 4966 1.1.1.2 mrg ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]); 4967 1.1.1.2 mrg } 4968 1.1.1.2 mrg 4969 1.1.1.2 mrg for (int i = 0; i < NUM_CSTS; i++) 4970 1.1.1.2 mrg for (int j = 0; j < NUM_CSTS; j++) 4971 1.1.1.2 mrg { 4972 1.1.1.2 mrg if (i != j) 4973 1.1.1.2 mrg ASSERT_NE (cst_sval[i], cst_sval[j]); 4974 1.1.1.2 mrg if (i + j < NUM_CSTS) 4975 1.1.1.2 mrg { 4976 1.1.1.2 mrg const svalue *sum 4977 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR, 4978 1.1.1.2 mrg cst_sval[i], cst_sval[j]); 4979 1.1.1.2 mrg ASSERT_EQ (sum, cst_sval[i + j]); 4980 1.1.1.2 mrg } 4981 1.1.1.2 mrg if (i - j >= 0) 4982 1.1.1.2 mrg { 4983 1.1.1.2 mrg const svalue *difference 4984 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR, 4985 1.1.1.2 mrg cst_sval[i], cst_sval[j]); 4986 1.1.1.2 mrg ASSERT_EQ (difference, cst_sval[i - j]); 4987 1.1.1.2 mrg } 4988 1.1.1.2 mrg if (i * j < NUM_CSTS) 4989 1.1.1.2 mrg { 4990 1.1.1.2 mrg const svalue *product 4991 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, MULT_EXPR, 4992 1.1.1.2 mrg cst_sval[i], cst_sval[j]); 4993 1.1.1.2 mrg ASSERT_EQ (product, cst_sval[i * j]); 4994 1.1.1.2 mrg } 4995 1.1.1.2 mrg const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR, 4996 1.1.1.2 mrg cst_sval[i], cst_sval[j]); 4997 1.1.1.2 mrg ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]); 4998 1.1.1.2 mrg const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR, 4999 1.1.1.2 mrg cst_sval[i], cst_sval[j]); 5000 1.1.1.2 mrg ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]); 5001 1.1.1.2 mrg // etc 5002 1.1.1.2 mrg } 5003 1.1 mrg 5004 1.1.1.2 mrg tree x = build_global_decl ("x", integer_type_node); 5005 1.1 mrg 5006 1.1.1.2 mrg test_region_model_context ctxt; 5007 1.1.1.2 mrg region_model model (&mgr); 5008 1.1.1.2 mrg const svalue *x_init = model.get_rvalue (x, &ctxt); 5009 1.1 mrg 5010 1.1.1.2 mrg /* PLUS_EXPR folding. */ 5011 1.1.1.2 mrg const svalue *x_init_plus_zero 5012 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR, 5013 1.1.1.2 mrg x_init, cst_sval[0]); 5014 1.1.1.2 mrg ASSERT_EQ (x_init_plus_zero, x_init); 5015 1.1.1.2 mrg const svalue *zero_plus_x_init 5016 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR, 5017 1.1.1.2 mrg cst_sval[0], x_init); 5018 1.1.1.2 mrg ASSERT_EQ (zero_plus_x_init, x_init); 5019 1.1.1.2 mrg 5020 1.1.1.2 mrg /* MULT_EXPR folding. */ 5021 1.1.1.2 mrg const svalue *x_init_times_zero 5022 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, MULT_EXPR, 5023 1.1.1.2 mrg x_init, cst_sval[0]); 5024 1.1.1.2 mrg ASSERT_EQ (x_init_times_zero, cst_sval[0]); 5025 1.1.1.2 mrg const svalue *zero_times_x_init 5026 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, MULT_EXPR, 5027 1.1.1.2 mrg cst_sval[0], x_init); 5028 1.1.1.2 mrg ASSERT_EQ (zero_times_x_init, cst_sval[0]); 5029 1.1.1.2 mrg 5030 1.1.1.2 mrg const svalue *x_init_times_one 5031 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, MULT_EXPR, 5032 1.1.1.2 mrg x_init, cst_sval[1]); 5033 1.1.1.2 mrg ASSERT_EQ (x_init_times_one, x_init); 5034 1.1.1.2 mrg const svalue *one_times_x_init 5035 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, MULT_EXPR, 5036 1.1.1.2 mrg cst_sval[1], x_init); 5037 1.1.1.2 mrg ASSERT_EQ (one_times_x_init, x_init); 5038 1.1.1.2 mrg 5039 1.1.1.2 mrg // etc 5040 1.1.1.2 mrg // TODO: do we want to use the match-and-simplify DSL for this? 5041 1.1.1.2 mrg 5042 1.1.1.2 mrg /* Verify that binops put any constants on the RHS. */ 5043 1.1.1.2 mrg const svalue *four_times_x_init 5044 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, MULT_EXPR, 5045 1.1.1.2 mrg cst_sval[4], x_init); 5046 1.1.1.2 mrg const svalue *x_init_times_four 5047 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, MULT_EXPR, 5048 1.1.1.2 mrg x_init, cst_sval[4]); 5049 1.1.1.2 mrg ASSERT_EQ (four_times_x_init, x_init_times_four); 5050 1.1.1.2 mrg const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue (); 5051 1.1.1.2 mrg ASSERT_EQ (binop->get_op (), MULT_EXPR); 5052 1.1.1.2 mrg ASSERT_EQ (binop->get_arg0 (), x_init); 5053 1.1.1.2 mrg ASSERT_EQ (binop->get_arg1 (), cst_sval[4]); 5054 1.1.1.2 mrg 5055 1.1.1.2 mrg /* Verify that ((x + 1) + 1) == (x + 2). */ 5056 1.1.1.2 mrg const svalue *x_init_plus_one 5057 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR, 5058 1.1.1.2 mrg x_init, cst_sval[1]); 5059 1.1.1.2 mrg const svalue *x_init_plus_two 5060 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR, 5061 1.1.1.2 mrg x_init, cst_sval[2]); 5062 1.1.1.2 mrg const svalue *x_init_plus_one_plus_one 5063 1.1.1.2 mrg = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR, 5064 1.1.1.2 mrg x_init_plus_one, cst_sval[1]); 5065 1.1.1.2 mrg ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two); 5066 1.1.1.2 mrg 5067 1.1.1.2 mrg /* Verify various binops on booleans. */ 5068 1.1.1.2 mrg { 5069 1.1.1.2 mrg const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1); 5070 1.1.1.2 mrg const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0); 5071 1.1.1.2 mrg const svalue *sval_unknown 5072 1.1.1.2 mrg = mgr.get_or_create_unknown_svalue (boolean_type_node); 5073 1.1.1.2 mrg const placeholder_svalue sval_placeholder (boolean_type_node, "v"); 5074 1.1.1.2 mrg for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR}) 5075 1.1.1.2 mrg { 5076 1.1.1.2 mrg ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op, 5077 1.1.1.2 mrg sval_true, sval_unknown), 5078 1.1.1.2 mrg sval_true); 5079 1.1.1.2 mrg ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op, 5080 1.1.1.2 mrg sval_false, sval_unknown), 5081 1.1.1.2 mrg sval_unknown); 5082 1.1.1.2 mrg ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op, 5083 1.1.1.2 mrg sval_false, &sval_placeholder), 5084 1.1.1.2 mrg &sval_placeholder); 5085 1.1.1.2 mrg } 5086 1.1.1.2 mrg for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR}) 5087 1.1.1.2 mrg { 5088 1.1.1.2 mrg ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op, 5089 1.1.1.2 mrg sval_false, sval_unknown), 5090 1.1.1.2 mrg sval_false); 5091 1.1.1.2 mrg ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op, 5092 1.1.1.2 mrg sval_true, sval_unknown), 5093 1.1.1.2 mrg sval_unknown); 5094 1.1.1.2 mrg ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op, 5095 1.1.1.2 mrg sval_true, &sval_placeholder), 5096 1.1.1.2 mrg &sval_placeholder); 5097 1.1.1.2 mrg } 5098 1.1 mrg } 5099 1.1.1.2 mrg } 5100 1.1 mrg 5101 1.1.1.2 mrg /* Verify that sub_svalues are folded as expected. */ 5102 1.1 mrg 5103 1.1.1.2 mrg static void 5104 1.1.1.2 mrg test_sub_svalue_folding () 5105 1.1.1.2 mrg { 5106 1.1.1.2 mrg coord_test ct; 5107 1.1.1.2 mrg tree c = build_global_decl ("c", ct.m_coord_type); 5108 1.1.1.2 mrg tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field), 5109 1.1.1.2 mrg c, ct.m_x_field, NULL_TREE); 5110 1.1 mrg 5111 1.1.1.2 mrg region_model_manager mgr; 5112 1.1.1.2 mrg region_model model (&mgr); 5113 1.1.1.2 mrg test_region_model_context ctxt; 5114 1.1.1.2 mrg const region *c_x_reg = model.get_lvalue (c_x, &ctxt); 5115 1.1 mrg 5116 1.1.1.2 mrg /* Verify that sub_svalue of "unknown" simply 5117 1.1.1.2 mrg yields an unknown. */ 5118 1.1 mrg 5119 1.1.1.2 mrg const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type); 5120 1.1.1.2 mrg const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field), 5121 1.1.1.2 mrg unknown, c_x_reg); 5122 1.1.1.2 mrg ASSERT_EQ (sub->get_kind (), SK_UNKNOWN); 5123 1.1.1.2 mrg ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field)); 5124 1.1.1.2 mrg } 5125 1.1 mrg 5126 1.1.1.2 mrg /* Test that region::descendent_of_p works as expected. */ 5127 1.1 mrg 5128 1.1.1.2 mrg static void 5129 1.1.1.2 mrg test_descendent_of_p () 5130 1.1.1.2 mrg { 5131 1.1.1.2 mrg region_model_manager mgr; 5132 1.1.1.2 mrg const region *stack = mgr.get_stack_region (); 5133 1.1.1.2 mrg const region *heap = mgr.get_heap_region (); 5134 1.1.1.2 mrg const region *code = mgr.get_code_region (); 5135 1.1.1.2 mrg const region *globals = mgr.get_globals_region (); 5136 1.1.1.2 mrg 5137 1.1.1.2 mrg /* descendent_of_p should return true when used on the region itself. */ 5138 1.1.1.2 mrg ASSERT_TRUE (stack->descendent_of_p (stack)); 5139 1.1.1.2 mrg ASSERT_FALSE (stack->descendent_of_p (heap)); 5140 1.1.1.2 mrg ASSERT_FALSE (stack->descendent_of_p (code)); 5141 1.1.1.2 mrg ASSERT_FALSE (stack->descendent_of_p (globals)); 5142 1.1 mrg 5143 1.1.1.2 mrg tree x = build_global_decl ("x", integer_type_node); 5144 1.1.1.2 mrg const region *x_reg = mgr.get_region_for_global (x); 5145 1.1.1.2 mrg ASSERT_TRUE (x_reg->descendent_of_p (globals)); 5146 1.1 mrg 5147 1.1.1.2 mrg /* A cast_region should be a descendent of the original region. */ 5148 1.1.1.2 mrg const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node); 5149 1.1.1.2 mrg ASSERT_TRUE (cast_reg->descendent_of_p (x_reg)); 5150 1.1 mrg } 5151 1.1 mrg 5152 1.1.1.2 mrg /* Verify that bit_range_region works as expected. */ 5153 1.1 mrg 5154 1.1 mrg static void 5155 1.1.1.2 mrg test_bit_range_regions () 5156 1.1 mrg { 5157 1.1 mrg tree x = build_global_decl ("x", integer_type_node); 5158 1.1.1.2 mrg region_model_manager mgr; 5159 1.1.1.2 mrg const region *x_reg = mgr.get_region_for_global (x); 5160 1.1.1.2 mrg const region *byte0 5161 1.1.1.2 mrg = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8)); 5162 1.1.1.2 mrg const region *byte1 5163 1.1.1.2 mrg = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8)); 5164 1.1.1.2 mrg ASSERT_TRUE (byte0->descendent_of_p (x_reg)); 5165 1.1.1.2 mrg ASSERT_TRUE (byte1->descendent_of_p (x_reg)); 5166 1.1.1.2 mrg ASSERT_NE (byte0, byte1); 5167 1.1 mrg } 5168 1.1 mrg 5169 1.1 mrg /* Verify that simple assignments work as expected. */ 5170 1.1 mrg 5171 1.1 mrg static void 5172 1.1 mrg test_assignment () 5173 1.1 mrg { 5174 1.1 mrg tree int_0 = build_int_cst (integer_type_node, 0); 5175 1.1 mrg tree x = build_global_decl ("x", integer_type_node); 5176 1.1 mrg tree y = build_global_decl ("y", integer_type_node); 5177 1.1 mrg 5178 1.1 mrg /* "x == 0", then use of y, then "y = 0;". */ 5179 1.1.1.2 mrg region_model_manager mgr; 5180 1.1.1.2 mrg region_model model (&mgr); 5181 1.1 mrg ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0); 5182 1.1 mrg ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0); 5183 1.1 mrg model.set_value (model.get_lvalue (y, NULL), 5184 1.1 mrg model.get_rvalue (int_0, NULL), 5185 1.1 mrg NULL); 5186 1.1 mrg ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0); 5187 1.1 mrg ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x); 5188 1.1 mrg } 5189 1.1 mrg 5190 1.1 mrg /* Verify that compound assignments work as expected. */ 5191 1.1 mrg 5192 1.1 mrg static void 5193 1.1 mrg test_compound_assignment () 5194 1.1 mrg { 5195 1.1 mrg coord_test ct; 5196 1.1 mrg 5197 1.1 mrg tree c = build_global_decl ("c", ct.m_coord_type); 5198 1.1 mrg tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field), 5199 1.1 mrg c, ct.m_x_field, NULL_TREE); 5200 1.1 mrg tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field), 5201 1.1 mrg c, ct.m_y_field, NULL_TREE); 5202 1.1 mrg tree d = build_global_decl ("d", ct.m_coord_type); 5203 1.1 mrg tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field), 5204 1.1 mrg d, ct.m_x_field, NULL_TREE); 5205 1.1 mrg tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field), 5206 1.1 mrg d, ct.m_y_field, NULL_TREE); 5207 1.1 mrg 5208 1.1 mrg tree int_17 = build_int_cst (integer_type_node, 17); 5209 1.1 mrg tree int_m3 = build_int_cst (integer_type_node, -3); 5210 1.1 mrg 5211 1.1.1.2 mrg region_model_manager mgr; 5212 1.1.1.2 mrg region_model model (&mgr); 5213 1.1 mrg model.set_value (c_x, int_17, NULL); 5214 1.1 mrg model.set_value (c_y, int_m3, NULL); 5215 1.1 mrg 5216 1.1 mrg /* Copy c to d. */ 5217 1.1.1.2 mrg const svalue *sval = model.get_rvalue (c, NULL); 5218 1.1.1.2 mrg model.set_value (model.get_lvalue (d, NULL), sval, NULL); 5219 1.1.1.2 mrg 5220 1.1 mrg /* Check that the fields have the same svalues. */ 5221 1.1 mrg ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL)); 5222 1.1 mrg ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL)); 5223 1.1 mrg } 5224 1.1 mrg 5225 1.1 mrg /* Verify the details of pushing and popping stack frames. */ 5226 1.1 mrg 5227 1.1 mrg static void 5228 1.1 mrg test_stack_frames () 5229 1.1 mrg { 5230 1.1 mrg tree int_42 = build_int_cst (integer_type_node, 42); 5231 1.1 mrg tree int_10 = build_int_cst (integer_type_node, 10); 5232 1.1 mrg tree int_5 = build_int_cst (integer_type_node, 5); 5233 1.1 mrg tree int_0 = build_int_cst (integer_type_node, 0); 5234 1.1 mrg 5235 1.1 mrg auto_vec <tree> param_types; 5236 1.1 mrg tree parent_fndecl = make_fndecl (integer_type_node, 5237 1.1 mrg "parent_fn", 5238 1.1 mrg param_types); 5239 1.1 mrg allocate_struct_function (parent_fndecl, true); 5240 1.1 mrg 5241 1.1 mrg tree child_fndecl = make_fndecl (integer_type_node, 5242 1.1 mrg "child_fn", 5243 1.1 mrg param_types); 5244 1.1 mrg allocate_struct_function (child_fndecl, true); 5245 1.1 mrg 5246 1.1 mrg /* "a" and "b" in the parent frame. */ 5247 1.1 mrg tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL, 5248 1.1 mrg get_identifier ("a"), 5249 1.1 mrg integer_type_node); 5250 1.1.1.2 mrg DECL_CONTEXT (a) = parent_fndecl; 5251 1.1 mrg tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL, 5252 1.1 mrg get_identifier ("b"), 5253 1.1 mrg integer_type_node); 5254 1.1.1.2 mrg DECL_CONTEXT (b) = parent_fndecl; 5255 1.1 mrg /* "x" and "y" in a child frame. */ 5256 1.1 mrg tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL, 5257 1.1 mrg get_identifier ("x"), 5258 1.1 mrg integer_type_node); 5259 1.1.1.2 mrg DECL_CONTEXT (x) = child_fndecl; 5260 1.1 mrg tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL, 5261 1.1 mrg get_identifier ("y"), 5262 1.1 mrg integer_type_node); 5263 1.1.1.2 mrg DECL_CONTEXT (y) = child_fndecl; 5264 1.1 mrg 5265 1.1 mrg /* "p" global. */ 5266 1.1 mrg tree p = build_global_decl ("p", ptr_type_node); 5267 1.1 mrg 5268 1.1 mrg /* "q" global. */ 5269 1.1 mrg tree q = build_global_decl ("q", ptr_type_node); 5270 1.1 mrg 5271 1.1.1.2 mrg region_model_manager mgr; 5272 1.1 mrg test_region_model_context ctxt; 5273 1.1.1.2 mrg region_model model (&mgr); 5274 1.1 mrg 5275 1.1 mrg /* Push stack frame for "parent_fn". */ 5276 1.1.1.2 mrg const region *parent_frame_reg 5277 1.1.1.2 mrg = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl), 5278 1.1.1.2 mrg NULL, &ctxt); 5279 1.1.1.2 mrg ASSERT_EQ (model.get_current_frame (), parent_frame_reg); 5280 1.1.1.2 mrg ASSERT_TRUE (model.region_exists_p (parent_frame_reg)); 5281 1.1.1.2 mrg const region *a_in_parent_reg = model.get_lvalue (a, &ctxt); 5282 1.1.1.2 mrg model.set_value (a_in_parent_reg, 5283 1.1.1.2 mrg model.get_rvalue (int_42, &ctxt), 5284 1.1.1.2 mrg &ctxt); 5285 1.1.1.2 mrg ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg); 5286 1.1.1.2 mrg 5287 1.1 mrg model.add_constraint (b, LT_EXPR, int_10, &ctxt); 5288 1.1 mrg ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt), 5289 1.1 mrg tristate (tristate::TS_TRUE)); 5290 1.1 mrg 5291 1.1 mrg /* Push stack frame for "child_fn". */ 5292 1.1.1.2 mrg const region *child_frame_reg 5293 1.1 mrg = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt); 5294 1.1.1.2 mrg ASSERT_EQ (model.get_current_frame (), child_frame_reg); 5295 1.1.1.2 mrg ASSERT_TRUE (model.region_exists_p (child_frame_reg)); 5296 1.1.1.2 mrg const region *x_in_child_reg = model.get_lvalue (x, &ctxt); 5297 1.1.1.2 mrg model.set_value (x_in_child_reg, 5298 1.1.1.2 mrg model.get_rvalue (int_0, &ctxt), 5299 1.1.1.2 mrg &ctxt); 5300 1.1.1.2 mrg ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg); 5301 1.1.1.2 mrg 5302 1.1 mrg model.add_constraint (y, NE_EXPR, int_5, &ctxt); 5303 1.1 mrg ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt), 5304 1.1 mrg tristate (tristate::TS_TRUE)); 5305 1.1 mrg 5306 1.1 mrg /* Point a global pointer at a local in the child frame: p = &x. */ 5307 1.1.1.2 mrg const region *p_in_globals_reg = model.get_lvalue (p, &ctxt); 5308 1.1.1.2 mrg model.set_value (p_in_globals_reg, 5309 1.1.1.2 mrg mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg), 5310 1.1 mrg &ctxt); 5311 1.1.1.2 mrg ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL); 5312 1.1 mrg 5313 1.1 mrg /* Point another global pointer at p: q = &p. */ 5314 1.1.1.2 mrg const region *q_in_globals_reg = model.get_lvalue (q, &ctxt); 5315 1.1.1.2 mrg model.set_value (q_in_globals_reg, 5316 1.1.1.2 mrg mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg), 5317 1.1 mrg &ctxt); 5318 1.1 mrg 5319 1.1.1.2 mrg /* Test region::descendent_of_p. */ 5320 1.1.1.2 mrg ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg)); 5321 1.1.1.2 mrg ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg)); 5322 1.1.1.2 mrg ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg)); 5323 1.1 mrg 5324 1.1 mrg /* Pop the "child_fn" frame from the stack. */ 5325 1.1.1.2 mrg model.pop_frame (NULL, NULL, &ctxt); 5326 1.1.1.2 mrg ASSERT_FALSE (model.region_exists_p (child_frame_reg)); 5327 1.1.1.2 mrg ASSERT_TRUE (model.region_exists_p (parent_frame_reg)); 5328 1.1 mrg 5329 1.1 mrg /* Verify that p (which was pointing at the local "x" in the popped 5330 1.1 mrg frame) has been poisoned. */ 5331 1.1.1.2 mrg const svalue *new_p_sval = model.get_rvalue (p, NULL); 5332 1.1 mrg ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED); 5333 1.1 mrg ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (), 5334 1.1 mrg POISON_KIND_POPPED_STACK); 5335 1.1 mrg 5336 1.1 mrg /* Verify that q still points to p, in spite of the region 5337 1.1 mrg renumbering. */ 5338 1.1.1.2 mrg const svalue *new_q_sval = model.get_rvalue (q, &ctxt); 5339 1.1 mrg ASSERT_EQ (new_q_sval->get_kind (), SK_REGION); 5340 1.1.1.2 mrg ASSERT_EQ (new_q_sval->maybe_get_region (), 5341 1.1 mrg model.get_lvalue (p, &ctxt)); 5342 1.1 mrg 5343 1.1 mrg /* Verify that top of stack has been updated. */ 5344 1.1.1.2 mrg ASSERT_EQ (model.get_current_frame (), parent_frame_reg); 5345 1.1 mrg 5346 1.1 mrg /* Verify locals in parent frame. */ 5347 1.1 mrg /* Verify "a" still has its value. */ 5348 1.1.1.2 mrg const svalue *new_a_sval = model.get_rvalue (a, &ctxt); 5349 1.1 mrg ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT); 5350 1.1 mrg ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (), 5351 1.1 mrg int_42); 5352 1.1 mrg /* Verify "b" still has its constraint. */ 5353 1.1 mrg ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt), 5354 1.1 mrg tristate (tristate::TS_TRUE)); 5355 1.1 mrg } 5356 1.1 mrg 5357 1.1 mrg /* Verify that get_representative_path_var works as expected, that 5358 1.1.1.2 mrg we can map from regions to parms and back within a recursive call 5359 1.1 mrg stack. */ 5360 1.1 mrg 5361 1.1 mrg static void 5362 1.1 mrg test_get_representative_path_var () 5363 1.1 mrg { 5364 1.1 mrg auto_vec <tree> param_types; 5365 1.1 mrg tree fndecl = make_fndecl (integer_type_node, 5366 1.1 mrg "factorial", 5367 1.1 mrg param_types); 5368 1.1 mrg allocate_struct_function (fndecl, true); 5369 1.1 mrg 5370 1.1 mrg /* Parm "n". */ 5371 1.1 mrg tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL, 5372 1.1 mrg get_identifier ("n"), 5373 1.1 mrg integer_type_node); 5374 1.1.1.2 mrg DECL_CONTEXT (n) = fndecl; 5375 1.1 mrg 5376 1.1.1.2 mrg region_model_manager mgr; 5377 1.1.1.2 mrg test_region_model_context ctxt; 5378 1.1.1.2 mrg region_model model (&mgr); 5379 1.1 mrg 5380 1.1 mrg /* Push 5 stack frames for "factorial", each with a param */ 5381 1.1.1.2 mrg auto_vec<const region *> parm_regs; 5382 1.1.1.2 mrg auto_vec<const svalue *> parm_svals; 5383 1.1 mrg for (int depth = 0; depth < 5; depth++) 5384 1.1 mrg { 5385 1.1.1.2 mrg const region *frame_n_reg 5386 1.1.1.2 mrg = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt); 5387 1.1.1.2 mrg const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt); 5388 1.1.1.2 mrg parm_regs.safe_push (parm_n_reg); 5389 1.1.1.2 mrg 5390 1.1.1.2 mrg ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg); 5391 1.1.1.2 mrg const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg); 5392 1.1.1.2 mrg parm_svals.safe_push (sval_n); 5393 1.1 mrg } 5394 1.1 mrg 5395 1.1 mrg /* Verify that we can recognize that the regions are the parms, 5396 1.1 mrg at every depth. */ 5397 1.1 mrg for (int depth = 0; depth < 5; depth++) 5398 1.1 mrg { 5399 1.1.1.2 mrg { 5400 1.1.1.2 mrg svalue_set visited; 5401 1.1.1.2 mrg ASSERT_EQ (model.get_representative_path_var (parm_regs[depth], 5402 1.1.1.2 mrg &visited), 5403 1.1.1.2 mrg path_var (n, depth + 1)); 5404 1.1.1.2 mrg } 5405 1.1 mrg /* ...and that we can lookup lvalues for locals for all frames, 5406 1.1 mrg not just the top. */ 5407 1.1 mrg ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL), 5408 1.1.1.2 mrg parm_regs[depth]); 5409 1.1 mrg /* ...and that we can locate the svalues. */ 5410 1.1.1.2 mrg { 5411 1.1.1.2 mrg svalue_set visited; 5412 1.1.1.2 mrg ASSERT_EQ (model.get_representative_path_var (parm_svals[depth], 5413 1.1.1.2 mrg &visited), 5414 1.1.1.2 mrg path_var (n, depth + 1)); 5415 1.1.1.2 mrg } 5416 1.1 mrg } 5417 1.1 mrg } 5418 1.1 mrg 5419 1.1.1.2 mrg /* Ensure that region_model::operator== works as expected. */ 5420 1.1 mrg 5421 1.1 mrg static void 5422 1.1.1.2 mrg test_equality_1 () 5423 1.1 mrg { 5424 1.1.1.2 mrg tree int_42 = build_int_cst (integer_type_node, 42); 5425 1.1.1.2 mrg tree int_17 = build_int_cst (integer_type_node, 17); 5426 1.1 mrg 5427 1.1.1.2 mrg /* Verify that "empty" region_model instances are equal to each other. */ 5428 1.1.1.2 mrg region_model_manager mgr; 5429 1.1.1.2 mrg region_model model0 (&mgr); 5430 1.1.1.2 mrg region_model model1 (&mgr); 5431 1.1 mrg ASSERT_EQ (model0, model1); 5432 1.1.1.2 mrg 5433 1.1.1.2 mrg /* Verify that setting state in model1 makes the models non-equal. */ 5434 1.1.1.2 mrg tree x = build_global_decl ("x", integer_type_node); 5435 1.1.1.2 mrg model0.set_value (x, int_42, NULL); 5436 1.1.1.2 mrg ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42); 5437 1.1.1.2 mrg ASSERT_NE (model0, model1); 5438 1.1.1.2 mrg 5439 1.1.1.2 mrg /* Verify the copy-ctor. */ 5440 1.1.1.2 mrg region_model model2 (model0); 5441 1.1.1.2 mrg ASSERT_EQ (model0, model2); 5442 1.1.1.2 mrg ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42); 5443 1.1.1.2 mrg ASSERT_NE (model1, model2); 5444 1.1.1.2 mrg 5445 1.1.1.2 mrg /* Verify that models obtained from copy-ctor are independently editable 5446 1.1.1.2 mrg w/o affecting the original model. */ 5447 1.1.1.2 mrg model2.set_value (x, int_17, NULL); 5448 1.1.1.2 mrg ASSERT_NE (model0, model2); 5449 1.1.1.2 mrg ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17); 5450 1.1.1.2 mrg ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42); 5451 1.1 mrg } 5452 1.1 mrg 5453 1.1 mrg /* Verify that region models for 5454 1.1 mrg x = 42; y = 113; 5455 1.1 mrg and 5456 1.1 mrg y = 113; x = 42; 5457 1.1.1.2 mrg are equal. */ 5458 1.1 mrg 5459 1.1 mrg static void 5460 1.1 mrg test_canonicalization_2 () 5461 1.1 mrg { 5462 1.1 mrg tree int_42 = build_int_cst (integer_type_node, 42); 5463 1.1 mrg tree int_113 = build_int_cst (integer_type_node, 113); 5464 1.1 mrg tree x = build_global_decl ("x", integer_type_node); 5465 1.1 mrg tree y = build_global_decl ("y", integer_type_node); 5466 1.1 mrg 5467 1.1.1.2 mrg region_model_manager mgr; 5468 1.1.1.2 mrg region_model model0 (&mgr); 5469 1.1 mrg model0.set_value (model0.get_lvalue (x, NULL), 5470 1.1 mrg model0.get_rvalue (int_42, NULL), 5471 1.1 mrg NULL); 5472 1.1 mrg model0.set_value (model0.get_lvalue (y, NULL), 5473 1.1 mrg model0.get_rvalue (int_113, NULL), 5474 1.1 mrg NULL); 5475 1.1 mrg 5476 1.1.1.2 mrg region_model model1 (&mgr); 5477 1.1 mrg model1.set_value (model1.get_lvalue (y, NULL), 5478 1.1 mrg model1.get_rvalue (int_113, NULL), 5479 1.1 mrg NULL); 5480 1.1 mrg model1.set_value (model1.get_lvalue (x, NULL), 5481 1.1 mrg model1.get_rvalue (int_42, NULL), 5482 1.1 mrg NULL); 5483 1.1 mrg 5484 1.1 mrg ASSERT_EQ (model0, model1); 5485 1.1 mrg } 5486 1.1 mrg 5487 1.1 mrg /* Verify that constraints for 5488 1.1 mrg x > 3 && y > 42 5489 1.1 mrg and 5490 1.1 mrg y > 42 && x > 3 5491 1.1 mrg are equal after canonicalization. */ 5492 1.1 mrg 5493 1.1 mrg static void 5494 1.1 mrg test_canonicalization_3 () 5495 1.1 mrg { 5496 1.1 mrg tree int_3 = build_int_cst (integer_type_node, 3); 5497 1.1 mrg tree int_42 = build_int_cst (integer_type_node, 42); 5498 1.1 mrg tree x = build_global_decl ("x", integer_type_node); 5499 1.1 mrg tree y = build_global_decl ("y", integer_type_node); 5500 1.1 mrg 5501 1.1.1.2 mrg region_model_manager mgr; 5502 1.1.1.2 mrg region_model model0 (&mgr); 5503 1.1 mrg model0.add_constraint (x, GT_EXPR, int_3, NULL); 5504 1.1 mrg model0.add_constraint (y, GT_EXPR, int_42, NULL); 5505 1.1 mrg 5506 1.1.1.2 mrg region_model model1 (&mgr); 5507 1.1 mrg model1.add_constraint (y, GT_EXPR, int_42, NULL); 5508 1.1 mrg model1.add_constraint (x, GT_EXPR, int_3, NULL); 5509 1.1 mrg 5510 1.1.1.2 mrg model0.canonicalize (); 5511 1.1.1.2 mrg model1.canonicalize (); 5512 1.1 mrg ASSERT_EQ (model0, model1); 5513 1.1 mrg } 5514 1.1 mrg 5515 1.1 mrg /* Verify that we can canonicalize a model containing NaN and other real 5516 1.1 mrg constants. */ 5517 1.1 mrg 5518 1.1 mrg static void 5519 1.1 mrg test_canonicalization_4 () 5520 1.1 mrg { 5521 1.1 mrg auto_vec<tree> csts; 5522 1.1 mrg append_interesting_constants (&csts); 5523 1.1 mrg 5524 1.1.1.2 mrg region_model_manager mgr; 5525 1.1.1.2 mrg region_model model (&mgr); 5526 1.1 mrg 5527 1.1.1.2 mrg for (tree cst : csts) 5528 1.1 mrg model.get_rvalue (cst, NULL); 5529 1.1 mrg 5530 1.1.1.2 mrg model.canonicalize (); 5531 1.1 mrg } 5532 1.1 mrg 5533 1.1 mrg /* Assert that if we have two region_model instances 5534 1.1 mrg with values VAL_A and VAL_B for EXPR that they are 5535 1.1 mrg mergable. Write the merged model to *OUT_MERGED_MODEL, 5536 1.1 mrg and the merged svalue ptr to *OUT_MERGED_SVALUE. 5537 1.1 mrg If VAL_A or VAL_B are NULL_TREE, don't populate EXPR 5538 1.1 mrg for that region_model. */ 5539 1.1 mrg 5540 1.1 mrg static void 5541 1.1 mrg assert_region_models_merge (tree expr, tree val_a, tree val_b, 5542 1.1.1.2 mrg region_model *out_merged_model, 5543 1.1.1.2 mrg const svalue **out_merged_svalue) 5544 1.1 mrg { 5545 1.1.1.2 mrg program_point point (program_point::origin ()); 5546 1.1 mrg test_region_model_context ctxt; 5547 1.1.1.2 mrg region_model_manager *mgr = out_merged_model->get_manager (); 5548 1.1.1.2 mrg region_model model0 (mgr); 5549 1.1.1.2 mrg region_model model1 (mgr); 5550 1.1 mrg if (val_a) 5551 1.1 mrg model0.set_value (model0.get_lvalue (expr, &ctxt), 5552 1.1 mrg model0.get_rvalue (val_a, &ctxt), 5553 1.1 mrg &ctxt); 5554 1.1 mrg if (val_b) 5555 1.1 mrg model1.set_value (model1.get_lvalue (expr, &ctxt), 5556 1.1 mrg model1.get_rvalue (val_b, &ctxt), 5557 1.1 mrg &ctxt); 5558 1.1 mrg 5559 1.1 mrg /* They should be mergeable. */ 5560 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model)); 5561 1.1.1.2 mrg *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt); 5562 1.1 mrg } 5563 1.1 mrg 5564 1.1 mrg /* Verify that we can merge region_model instances. */ 5565 1.1 mrg 5566 1.1 mrg static void 5567 1.1 mrg test_state_merging () 5568 1.1 mrg { 5569 1.1 mrg tree int_42 = build_int_cst (integer_type_node, 42); 5570 1.1 mrg tree int_113 = build_int_cst (integer_type_node, 113); 5571 1.1 mrg tree x = build_global_decl ("x", integer_type_node); 5572 1.1 mrg tree y = build_global_decl ("y", integer_type_node); 5573 1.1 mrg tree z = build_global_decl ("z", integer_type_node); 5574 1.1 mrg tree p = build_global_decl ("p", ptr_type_node); 5575 1.1 mrg 5576 1.1 mrg tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y); 5577 1.1 mrg tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z); 5578 1.1 mrg 5579 1.1 mrg auto_vec <tree> param_types; 5580 1.1 mrg tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types); 5581 1.1 mrg allocate_struct_function (test_fndecl, true); 5582 1.1 mrg 5583 1.1 mrg /* Param "a". */ 5584 1.1 mrg tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL, 5585 1.1 mrg get_identifier ("a"), 5586 1.1 mrg integer_type_node); 5587 1.1.1.2 mrg DECL_CONTEXT (a) = test_fndecl; 5588 1.1 mrg tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a); 5589 1.1 mrg 5590 1.1 mrg /* Param "q", a pointer. */ 5591 1.1 mrg tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL, 5592 1.1 mrg get_identifier ("q"), 5593 1.1 mrg ptr_type_node); 5594 1.1.1.2 mrg DECL_CONTEXT (q) = test_fndecl; 5595 1.1.1.2 mrg 5596 1.1.1.2 mrg program_point point (program_point::origin ()); 5597 1.1.1.2 mrg region_model_manager mgr; 5598 1.1 mrg 5599 1.1 mrg { 5600 1.1.1.2 mrg region_model model0 (&mgr); 5601 1.1.1.2 mrg region_model model1 (&mgr); 5602 1.1.1.2 mrg region_model merged (&mgr); 5603 1.1 mrg /* Verify empty models can be merged. */ 5604 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5605 1.1 mrg ASSERT_EQ (model0, merged); 5606 1.1 mrg } 5607 1.1 mrg 5608 1.1 mrg /* Verify that we can merge two contradictory constraints on the 5609 1.1 mrg value for a global. */ 5610 1.1 mrg /* TODO: verify that the merged model doesn't have a value for 5611 1.1 mrg the global */ 5612 1.1 mrg { 5613 1.1.1.2 mrg region_model model0 (&mgr); 5614 1.1.1.2 mrg region_model model1 (&mgr); 5615 1.1.1.2 mrg region_model merged (&mgr); 5616 1.1 mrg test_region_model_context ctxt; 5617 1.1 mrg model0.add_constraint (x, EQ_EXPR, int_42, &ctxt); 5618 1.1 mrg model1.add_constraint (x, EQ_EXPR, int_113, &ctxt); 5619 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5620 1.1 mrg ASSERT_NE (model0, merged); 5621 1.1 mrg ASSERT_NE (model1, merged); 5622 1.1 mrg } 5623 1.1 mrg 5624 1.1 mrg /* Verify handling of a PARM_DECL. */ 5625 1.1 mrg { 5626 1.1 mrg test_region_model_context ctxt; 5627 1.1.1.2 mrg region_model model0 (&mgr); 5628 1.1.1.2 mrg region_model model1 (&mgr); 5629 1.1 mrg ASSERT_EQ (model0.get_stack_depth (), 0); 5630 1.1 mrg model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt); 5631 1.1 mrg ASSERT_EQ (model0.get_stack_depth (), 1); 5632 1.1 mrg model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt); 5633 1.1 mrg 5634 1.1.1.2 mrg placeholder_svalue test_sval (integer_type_node, "test sval"); 5635 1.1.1.2 mrg model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt); 5636 1.1.1.2 mrg model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt); 5637 1.1 mrg ASSERT_EQ (model0, model1); 5638 1.1 mrg 5639 1.1 mrg /* They should be mergeable, and the result should be the same. */ 5640 1.1.1.2 mrg region_model merged (&mgr); 5641 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5642 1.1 mrg ASSERT_EQ (model0, merged); 5643 1.1.1.2 mrg /* In particular, "a" should have the placeholder value. */ 5644 1.1.1.2 mrg ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval); 5645 1.1 mrg } 5646 1.1 mrg 5647 1.1 mrg /* Verify handling of a global. */ 5648 1.1 mrg { 5649 1.1 mrg test_region_model_context ctxt; 5650 1.1.1.2 mrg region_model model0 (&mgr); 5651 1.1.1.2 mrg region_model model1 (&mgr); 5652 1.1 mrg 5653 1.1.1.2 mrg placeholder_svalue test_sval (integer_type_node, "test sval"); 5654 1.1.1.2 mrg model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt); 5655 1.1.1.2 mrg model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt); 5656 1.1.1.2 mrg ASSERT_EQ (model0, model1); 5657 1.1 mrg 5658 1.1 mrg /* They should be mergeable, and the result should be the same. */ 5659 1.1.1.2 mrg region_model merged (&mgr); 5660 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5661 1.1 mrg ASSERT_EQ (model0, merged); 5662 1.1.1.2 mrg /* In particular, "x" should have the placeholder value. */ 5663 1.1.1.2 mrg ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval); 5664 1.1 mrg } 5665 1.1 mrg 5666 1.1 mrg /* Use global-handling to verify various combinations of values. */ 5667 1.1 mrg 5668 1.1 mrg /* Two equal constant values. */ 5669 1.1 mrg { 5670 1.1.1.2 mrg region_model merged (&mgr); 5671 1.1.1.2 mrg const svalue *merged_x_sval; 5672 1.1 mrg assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval); 5673 1.1 mrg 5674 1.1 mrg /* In particular, there should be a constant value for "x". */ 5675 1.1 mrg ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT); 5676 1.1 mrg ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (), 5677 1.1 mrg int_42); 5678 1.1 mrg } 5679 1.1 mrg 5680 1.1 mrg /* Two non-equal constant values. */ 5681 1.1 mrg { 5682 1.1.1.2 mrg region_model merged (&mgr); 5683 1.1.1.2 mrg const svalue *merged_x_sval; 5684 1.1 mrg assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval); 5685 1.1 mrg 5686 1.1.1.2 mrg /* In particular, there should be a "widening" value for "x". */ 5687 1.1.1.2 mrg ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING); 5688 1.1 mrg } 5689 1.1 mrg 5690 1.1.1.2 mrg /* Initial and constant. */ 5691 1.1 mrg { 5692 1.1.1.2 mrg region_model merged (&mgr); 5693 1.1.1.2 mrg const svalue *merged_x_sval; 5694 1.1 mrg assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval); 5695 1.1 mrg 5696 1.1 mrg /* In particular, there should be an unknown value for "x". */ 5697 1.1 mrg ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN); 5698 1.1 mrg } 5699 1.1 mrg 5700 1.1.1.2 mrg /* Constant and initial. */ 5701 1.1 mrg { 5702 1.1.1.2 mrg region_model merged (&mgr); 5703 1.1.1.2 mrg const svalue *merged_x_sval; 5704 1.1 mrg assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval); 5705 1.1 mrg 5706 1.1 mrg /* In particular, there should be an unknown value for "x". */ 5707 1.1 mrg ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN); 5708 1.1 mrg } 5709 1.1 mrg 5710 1.1 mrg /* Unknown and constant. */ 5711 1.1 mrg // TODO 5712 1.1 mrg 5713 1.1 mrg /* Pointers: NULL and NULL. */ 5714 1.1 mrg // TODO 5715 1.1 mrg 5716 1.1 mrg /* Pointers: NULL and non-NULL. */ 5717 1.1 mrg // TODO 5718 1.1 mrg 5719 1.1 mrg /* Pointers: non-NULL and non-NULL: ptr to a local. */ 5720 1.1 mrg { 5721 1.1.1.2 mrg region_model model0 (&mgr); 5722 1.1 mrg model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL); 5723 1.1 mrg model0.set_value (model0.get_lvalue (p, NULL), 5724 1.1 mrg model0.get_rvalue (addr_of_a, NULL), NULL); 5725 1.1 mrg 5726 1.1 mrg region_model model1 (model0); 5727 1.1 mrg ASSERT_EQ (model0, model1); 5728 1.1 mrg 5729 1.1 mrg /* They should be mergeable, and the result should be the same. */ 5730 1.1.1.2 mrg region_model merged (&mgr); 5731 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5732 1.1 mrg ASSERT_EQ (model0, merged); 5733 1.1 mrg } 5734 1.1 mrg 5735 1.1 mrg /* Pointers: non-NULL and non-NULL: ptr to a global. */ 5736 1.1 mrg { 5737 1.1.1.2 mrg region_model merged (&mgr); 5738 1.1 mrg /* p == &y in both input models. */ 5739 1.1.1.2 mrg const svalue *merged_p_sval; 5740 1.1 mrg assert_region_models_merge (p, addr_of_y, addr_of_y, &merged, 5741 1.1 mrg &merged_p_sval); 5742 1.1 mrg 5743 1.1 mrg /* We should get p == &y in the merged model. */ 5744 1.1 mrg ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION); 5745 1.1.1.2 mrg const region_svalue *merged_p_ptr 5746 1.1.1.2 mrg = merged_p_sval->dyn_cast_region_svalue (); 5747 1.1.1.2 mrg const region *merged_p_star_reg = merged_p_ptr->get_pointee (); 5748 1.1.1.2 mrg ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL)); 5749 1.1 mrg } 5750 1.1 mrg 5751 1.1 mrg /* Pointers: non-NULL ptrs to different globals: should be unknown. */ 5752 1.1 mrg { 5753 1.1.1.2 mrg region_model merged (&mgr); 5754 1.1.1.2 mrg /* x == &y vs x == &z in the input models; these are actually casts 5755 1.1.1.2 mrg of the ptrs to "int". */ 5756 1.1.1.2 mrg const svalue *merged_x_sval; 5757 1.1.1.2 mrg // TODO: 5758 1.1 mrg assert_region_models_merge (x, addr_of_y, addr_of_z, &merged, 5759 1.1 mrg &merged_x_sval); 5760 1.1 mrg 5761 1.1 mrg /* We should get x == unknown in the merged model. */ 5762 1.1 mrg ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN); 5763 1.1 mrg } 5764 1.1 mrg 5765 1.1 mrg /* Pointers: non-NULL and non-NULL: ptr to a heap region. */ 5766 1.1 mrg { 5767 1.1 mrg test_region_model_context ctxt; 5768 1.1.1.2 mrg region_model model0 (&mgr); 5769 1.1.1.2 mrg tree size = build_int_cst (size_type_node, 1024); 5770 1.1.1.2 mrg const svalue *size_sval = mgr.get_or_create_constant_svalue (size); 5771 1.1.1.2 mrg const region *new_reg 5772 1.1.1.2 mrg = model0.create_region_for_heap_alloc (size_sval, &ctxt); 5773 1.1.1.2 mrg const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg); 5774 1.1 mrg model0.set_value (model0.get_lvalue (p, &ctxt), 5775 1.1.1.2 mrg ptr_sval, &ctxt); 5776 1.1 mrg 5777 1.1 mrg region_model model1 (model0); 5778 1.1 mrg 5779 1.1 mrg ASSERT_EQ (model0, model1); 5780 1.1 mrg 5781 1.1.1.2 mrg region_model merged (&mgr); 5782 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5783 1.1 mrg 5784 1.1.1.2 mrg /* The merged model ought to be identical. */ 5785 1.1 mrg ASSERT_EQ (model0, merged); 5786 1.1 mrg } 5787 1.1 mrg 5788 1.1.1.2 mrg /* Two regions sharing the same placeholder svalue should continue sharing 5789 1.1.1.2 mrg it after self-merger. */ 5790 1.1 mrg { 5791 1.1 mrg test_region_model_context ctxt; 5792 1.1.1.2 mrg region_model model0 (&mgr); 5793 1.1.1.2 mrg placeholder_svalue placeholder_sval (integer_type_node, "test"); 5794 1.1.1.2 mrg model0.set_value (model0.get_lvalue (x, &ctxt), 5795 1.1.1.2 mrg &placeholder_sval, &ctxt); 5796 1.1.1.2 mrg model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt); 5797 1.1 mrg region_model model1 (model0); 5798 1.1 mrg 5799 1.1 mrg /* They should be mergeable, and the result should be the same. */ 5800 1.1.1.2 mrg region_model merged (&mgr); 5801 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5802 1.1 mrg ASSERT_EQ (model0, merged); 5803 1.1 mrg 5804 1.1 mrg /* In particular, we should have x == y. */ 5805 1.1 mrg ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt), 5806 1.1 mrg tristate (tristate::TS_TRUE)); 5807 1.1 mrg } 5808 1.1 mrg 5809 1.1 mrg { 5810 1.1.1.2 mrg region_model model0 (&mgr); 5811 1.1.1.2 mrg region_model model1 (&mgr); 5812 1.1 mrg test_region_model_context ctxt; 5813 1.1 mrg model0.add_constraint (x, EQ_EXPR, int_42, &ctxt); 5814 1.1 mrg model1.add_constraint (x, NE_EXPR, int_42, &ctxt); 5815 1.1.1.2 mrg region_model merged (&mgr); 5816 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5817 1.1 mrg } 5818 1.1 mrg 5819 1.1 mrg { 5820 1.1.1.2 mrg region_model model0 (&mgr); 5821 1.1.1.2 mrg region_model model1 (&mgr); 5822 1.1 mrg test_region_model_context ctxt; 5823 1.1 mrg model0.add_constraint (x, EQ_EXPR, int_42, &ctxt); 5824 1.1 mrg model1.add_constraint (x, NE_EXPR, int_42, &ctxt); 5825 1.1 mrg model1.add_constraint (x, EQ_EXPR, int_113, &ctxt); 5826 1.1.1.2 mrg region_model merged (&mgr); 5827 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5828 1.1 mrg } 5829 1.1 mrg 5830 1.1 mrg // TODO: what can't we merge? need at least one such test 5831 1.1 mrg 5832 1.1 mrg /* TODO: various things 5833 1.1 mrg - heap regions 5834 1.1 mrg - value merging: 5835 1.1 mrg - every combination, but in particular 5836 1.1.1.2 mrg - pairs of regions 5837 1.1 mrg */ 5838 1.1 mrg 5839 1.1 mrg /* Views. */ 5840 1.1 mrg { 5841 1.1 mrg test_region_model_context ctxt; 5842 1.1.1.2 mrg region_model model0 (&mgr); 5843 1.1 mrg 5844 1.1.1.2 mrg const region *x_reg = model0.get_lvalue (x, &ctxt); 5845 1.1.1.2 mrg const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node); 5846 1.1 mrg model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt); 5847 1.1 mrg 5848 1.1 mrg region_model model1 (model0); 5849 1.1 mrg ASSERT_EQ (model1, model0); 5850 1.1 mrg 5851 1.1 mrg /* They should be mergeable, and the result should be the same. */ 5852 1.1.1.2 mrg region_model merged (&mgr); 5853 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5854 1.1 mrg } 5855 1.1 mrg 5856 1.1 mrg /* Verify that we can merge a model in which a local in an older stack 5857 1.1 mrg frame points to a local in a more recent stack frame. */ 5858 1.1 mrg { 5859 1.1.1.2 mrg region_model model0 (&mgr); 5860 1.1 mrg model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL); 5861 1.1.1.2 mrg const region *q_in_first_frame = model0.get_lvalue (q, NULL); 5862 1.1 mrg 5863 1.1 mrg /* Push a second frame. */ 5864 1.1.1.2 mrg const region *reg_2nd_frame 5865 1.1 mrg = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL); 5866 1.1 mrg 5867 1.1 mrg /* Have a pointer in the older frame point to a local in the 5868 1.1 mrg more recent frame. */ 5869 1.1.1.2 mrg const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL); 5870 1.1.1.2 mrg model0.set_value (q_in_first_frame, sval_ptr, NULL); 5871 1.1 mrg 5872 1.1 mrg /* Verify that it's pointing at the newer frame. */ 5873 1.1.1.2 mrg const region *reg_pointee = sval_ptr->maybe_get_region (); 5874 1.1.1.2 mrg ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame); 5875 1.1 mrg 5876 1.1.1.2 mrg model0.canonicalize (); 5877 1.1 mrg 5878 1.1 mrg region_model model1 (model0); 5879 1.1 mrg ASSERT_EQ (model0, model1); 5880 1.1 mrg 5881 1.1 mrg /* They should be mergeable, and the result should be the same 5882 1.1 mrg (after canonicalization, at least). */ 5883 1.1.1.2 mrg region_model merged (&mgr); 5884 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5885 1.1.1.2 mrg merged.canonicalize (); 5886 1.1 mrg ASSERT_EQ (model0, merged); 5887 1.1 mrg } 5888 1.1 mrg 5889 1.1 mrg /* Verify that we can merge a model in which a local points to a global. */ 5890 1.1 mrg { 5891 1.1.1.2 mrg region_model model0 (&mgr); 5892 1.1 mrg model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL); 5893 1.1 mrg model0.set_value (model0.get_lvalue (q, NULL), 5894 1.1 mrg model0.get_rvalue (addr_of_y, NULL), NULL); 5895 1.1 mrg 5896 1.1 mrg region_model model1 (model0); 5897 1.1 mrg ASSERT_EQ (model0, model1); 5898 1.1 mrg 5899 1.1 mrg /* They should be mergeable, and the result should be the same 5900 1.1 mrg (after canonicalization, at least). */ 5901 1.1.1.2 mrg region_model merged (&mgr); 5902 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5903 1.1 mrg ASSERT_EQ (model0, merged); 5904 1.1 mrg } 5905 1.1 mrg } 5906 1.1 mrg 5907 1.1 mrg /* Verify that constraints are correctly merged when merging region_model 5908 1.1 mrg instances. */ 5909 1.1 mrg 5910 1.1 mrg static void 5911 1.1 mrg test_constraint_merging () 5912 1.1 mrg { 5913 1.1 mrg tree int_0 = build_int_cst (integer_type_node, 0); 5914 1.1 mrg tree int_5 = build_int_cst (integer_type_node, 5); 5915 1.1 mrg tree x = build_global_decl ("x", integer_type_node); 5916 1.1 mrg tree y = build_global_decl ("y", integer_type_node); 5917 1.1 mrg tree z = build_global_decl ("z", integer_type_node); 5918 1.1 mrg tree n = build_global_decl ("n", integer_type_node); 5919 1.1 mrg 5920 1.1.1.2 mrg region_model_manager mgr; 5921 1.1 mrg test_region_model_context ctxt; 5922 1.1 mrg 5923 1.1 mrg /* model0: 0 <= (x == y) < n. */ 5924 1.1.1.2 mrg region_model model0 (&mgr); 5925 1.1 mrg model0.add_constraint (x, EQ_EXPR, y, &ctxt); 5926 1.1 mrg model0.add_constraint (x, GE_EXPR, int_0, NULL); 5927 1.1 mrg model0.add_constraint (x, LT_EXPR, n, NULL); 5928 1.1 mrg 5929 1.1 mrg /* model1: z != 5 && (0 <= x < n). */ 5930 1.1.1.2 mrg region_model model1 (&mgr); 5931 1.1 mrg model1.add_constraint (z, NE_EXPR, int_5, NULL); 5932 1.1 mrg model1.add_constraint (x, GE_EXPR, int_0, NULL); 5933 1.1 mrg model1.add_constraint (x, LT_EXPR, n, NULL); 5934 1.1 mrg 5935 1.1 mrg /* They should be mergeable; the merged constraints should 5936 1.1 mrg be: (0 <= x < n). */ 5937 1.1.1.2 mrg program_point point (program_point::origin ()); 5938 1.1.1.2 mrg region_model merged (&mgr); 5939 1.1.1.2 mrg ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged)); 5940 1.1 mrg 5941 1.1 mrg ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt), 5942 1.1 mrg tristate (tristate::TS_TRUE)); 5943 1.1 mrg ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt), 5944 1.1 mrg tristate (tristate::TS_TRUE)); 5945 1.1 mrg 5946 1.1 mrg ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt), 5947 1.1 mrg tristate (tristate::TS_UNKNOWN)); 5948 1.1 mrg ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt), 5949 1.1 mrg tristate (tristate::TS_UNKNOWN)); 5950 1.1 mrg } 5951 1.1 mrg 5952 1.1.1.2 mrg /* Verify that widening_svalue::eval_condition_without_cm works as 5953 1.1.1.2 mrg expected. */ 5954 1.1.1.2 mrg 5955 1.1.1.2 mrg static void 5956 1.1.1.2 mrg test_widening_constraints () 5957 1.1.1.2 mrg { 5958 1.1.1.2 mrg program_point point (program_point::origin ()); 5959 1.1.1.2 mrg tree int_0 = build_int_cst (integer_type_node, 0); 5960 1.1.1.2 mrg tree int_m1 = build_int_cst (integer_type_node, -1); 5961 1.1.1.2 mrg tree int_1 = build_int_cst (integer_type_node, 1); 5962 1.1.1.2 mrg tree int_256 = build_int_cst (integer_type_node, 256); 5963 1.1.1.2 mrg region_model_manager mgr; 5964 1.1.1.2 mrg test_region_model_context ctxt; 5965 1.1.1.2 mrg const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0); 5966 1.1.1.2 mrg const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1); 5967 1.1.1.2 mrg const svalue *w_zero_then_one_sval 5968 1.1.1.2 mrg = mgr.get_or_create_widening_svalue (integer_type_node, point, 5969 1.1.1.2 mrg int_0_sval, int_1_sval); 5970 1.1.1.2 mrg const widening_svalue *w_zero_then_one 5971 1.1.1.2 mrg = w_zero_then_one_sval->dyn_cast_widening_svalue (); 5972 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->get_direction (), 5973 1.1.1.2 mrg widening_svalue::DIR_ASCENDING); 5974 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1), 5975 1.1.1.2 mrg tristate::TS_FALSE); 5976 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0), 5977 1.1.1.2 mrg tristate::TS_FALSE); 5978 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1), 5979 1.1.1.2 mrg tristate::TS_UNKNOWN); 5980 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256), 5981 1.1.1.2 mrg tristate::TS_UNKNOWN); 5982 1.1.1.2 mrg 5983 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1), 5984 1.1.1.2 mrg tristate::TS_FALSE); 5985 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0), 5986 1.1.1.2 mrg tristate::TS_UNKNOWN); 5987 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1), 5988 1.1.1.2 mrg tristate::TS_UNKNOWN); 5989 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256), 5990 1.1.1.2 mrg tristate::TS_UNKNOWN); 5991 1.1.1.2 mrg 5992 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1), 5993 1.1.1.2 mrg tristate::TS_TRUE); 5994 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0), 5995 1.1.1.2 mrg tristate::TS_UNKNOWN); 5996 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1), 5997 1.1.1.2 mrg tristate::TS_UNKNOWN); 5998 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256), 5999 1.1.1.2 mrg tristate::TS_UNKNOWN); 6000 1.1.1.2 mrg 6001 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1), 6002 1.1.1.2 mrg tristate::TS_TRUE); 6003 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0), 6004 1.1.1.2 mrg tristate::TS_TRUE); 6005 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1), 6006 1.1.1.2 mrg tristate::TS_UNKNOWN); 6007 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256), 6008 1.1.1.2 mrg tristate::TS_UNKNOWN); 6009 1.1.1.2 mrg 6010 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1), 6011 1.1.1.2 mrg tristate::TS_FALSE); 6012 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0), 6013 1.1.1.2 mrg tristate::TS_UNKNOWN); 6014 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1), 6015 1.1.1.2 mrg tristate::TS_UNKNOWN); 6016 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256), 6017 1.1.1.2 mrg tristate::TS_UNKNOWN); 6018 1.1.1.2 mrg 6019 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1), 6020 1.1.1.2 mrg tristate::TS_TRUE); 6021 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0), 6022 1.1.1.2 mrg tristate::TS_UNKNOWN); 6023 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1), 6024 1.1.1.2 mrg tristate::TS_UNKNOWN); 6025 1.1.1.2 mrg ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256), 6026 1.1.1.2 mrg tristate::TS_UNKNOWN); 6027 1.1.1.2 mrg } 6028 1.1.1.2 mrg 6029 1.1.1.2 mrg /* Verify merging constraints for states simulating successive iterations 6030 1.1.1.2 mrg of a loop. 6031 1.1.1.2 mrg Simulate: 6032 1.1.1.2 mrg for (i = 0; i < 256; i++) 6033 1.1.1.2 mrg [...body...] 6034 1.1.1.2 mrg i.e. this gimple:. 6035 1.1.1.2 mrg i_15 = 0; 6036 1.1.1.2 mrg goto <bb 4>; 6037 1.1.1.2 mrg 6038 1.1.1.2 mrg <bb 4> : 6039 1.1.1.2 mrg i_11 = PHI <i_15(2), i_23(3)> 6040 1.1.1.2 mrg if (i_11 <= 255) 6041 1.1.1.2 mrg goto <bb 3>; 6042 1.1.1.2 mrg else 6043 1.1.1.2 mrg goto [AFTER LOOP] 6044 1.1.1.2 mrg 6045 1.1.1.2 mrg <bb 3> : 6046 1.1.1.2 mrg [LOOP BODY] 6047 1.1.1.2 mrg i_23 = i_11 + 1; 6048 1.1.1.2 mrg 6049 1.1.1.2 mrg and thus these ops (and resultant states): 6050 1.1.1.2 mrg i_11 = PHI() 6051 1.1.1.2 mrg {i_11: 0} 6052 1.1.1.2 mrg add_constraint (i_11 <= 255) [for the true edge] 6053 1.1.1.2 mrg {i_11: 0} [constraint was a no-op] 6054 1.1.1.2 mrg i_23 = i_11 + 1; 6055 1.1.1.2 mrg {i_22: 1} 6056 1.1.1.2 mrg i_11 = PHI() 6057 1.1.1.2 mrg {i_11: WIDENED (at phi, 0, 1)} 6058 1.1.1.2 mrg add_constraint (i_11 <= 255) [for the true edge] 6059 1.1.1.2 mrg {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255} 6060 1.1.1.2 mrg i_23 = i_11 + 1; 6061 1.1.1.2 mrg {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255} 6062 1.1.1.2 mrg i_11 = PHI(); merge with state at phi above 6063 1.1.1.2 mrg {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256} 6064 1.1.1.2 mrg [changing meaning of "WIDENED" here] 6065 1.1.1.2 mrg if (i_11 <= 255) 6066 1.1.1.2 mrg T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit 6067 1.1.1.2 mrg F: {i_11: 256} 6068 1.1.1.2 mrg */ 6069 1.1.1.2 mrg 6070 1.1.1.2 mrg static void 6071 1.1.1.2 mrg test_iteration_1 () 6072 1.1.1.2 mrg { 6073 1.1.1.2 mrg program_point point (program_point::origin ()); 6074 1.1.1.2 mrg 6075 1.1.1.2 mrg tree int_0 = build_int_cst (integer_type_node, 0); 6076 1.1.1.2 mrg tree int_1 = build_int_cst (integer_type_node, 1); 6077 1.1.1.2 mrg tree int_256 = build_int_cst (integer_type_node, 256); 6078 1.1.1.2 mrg tree int_257 = build_int_cst (integer_type_node, 257); 6079 1.1.1.2 mrg tree i = build_global_decl ("i", integer_type_node); 6080 1.1.1.2 mrg 6081 1.1.1.2 mrg region_model_manager mgr; 6082 1.1.1.2 mrg test_region_model_context ctxt; 6083 1.1.1.2 mrg 6084 1.1.1.2 mrg /* model0: i: 0. */ 6085 1.1.1.2 mrg region_model model0 (&mgr); 6086 1.1.1.2 mrg model0.set_value (i, int_0, &ctxt); 6087 1.1.1.2 mrg 6088 1.1.1.2 mrg /* model1: i: 1. */ 6089 1.1.1.2 mrg region_model model1 (&mgr); 6090 1.1.1.2 mrg model1.set_value (i, int_1, &ctxt); 6091 1.1.1.2 mrg 6092 1.1.1.2 mrg /* Should merge "i" to a widened value. */ 6093 1.1.1.2 mrg region_model model2 (&mgr); 6094 1.1.1.2 mrg ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2)); 6095 1.1.1.2 mrg const svalue *merged_i = model2.get_rvalue (i, &ctxt); 6096 1.1.1.2 mrg ASSERT_EQ (merged_i->get_kind (), SK_WIDENING); 6097 1.1.1.2 mrg const widening_svalue *w = merged_i->dyn_cast_widening_svalue (); 6098 1.1.1.2 mrg ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING); 6099 1.1.1.2 mrg 6100 1.1.1.2 mrg /* Add constraint: i < 256 */ 6101 1.1.1.2 mrg model2.add_constraint (i, LT_EXPR, int_256, &ctxt); 6102 1.1.1.2 mrg ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt), 6103 1.1.1.2 mrg tristate (tristate::TS_TRUE)); 6104 1.1.1.2 mrg ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt), 6105 1.1.1.2 mrg tristate (tristate::TS_TRUE)); 6106 1.1.1.2 mrg 6107 1.1.1.2 mrg /* Try merging with the initial state. */ 6108 1.1.1.2 mrg region_model model3 (&mgr); 6109 1.1.1.2 mrg ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3)); 6110 1.1.1.2 mrg /* Merging the merged value with the initial value should be idempotent, 6111 1.1.1.2 mrg so that the analysis converges. */ 6112 1.1.1.2 mrg ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i); 6113 1.1.1.2 mrg /* Merger of 0 and a widening value with constraint < CST 6114 1.1.1.2 mrg should retain the constraint, even though it was implicit 6115 1.1.1.2 mrg for the 0 case. */ 6116 1.1.1.2 mrg ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt), 6117 1.1.1.2 mrg tristate (tristate::TS_TRUE)); 6118 1.1.1.2 mrg /* ...and we should have equality: the analysis should have converged. */ 6119 1.1.1.2 mrg ASSERT_EQ (model3, model2); 6120 1.1.1.2 mrg 6121 1.1.1.2 mrg /* "i_23 = i_11 + 1;" */ 6122 1.1.1.2 mrg region_model model4 (model3); 6123 1.1.1.2 mrg ASSERT_EQ (model4, model2); 6124 1.1.1.2 mrg model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt); 6125 1.1.1.2 mrg const svalue *plus_one = model4.get_rvalue (i, &ctxt); 6126 1.1.1.2 mrg ASSERT_EQ (plus_one->get_kind (), SK_BINOP); 6127 1.1.1.2 mrg 6128 1.1.1.2 mrg /* Try merging with the "i: 1" state. */ 6129 1.1.1.2 mrg region_model model5 (&mgr); 6130 1.1.1.2 mrg ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5)); 6131 1.1.1.2 mrg ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one); 6132 1.1.1.2 mrg ASSERT_EQ (model5, model4); 6133 1.1.1.2 mrg 6134 1.1.1.2 mrg /* "i_11 = PHI();" merge with state at phi above. 6135 1.1.1.2 mrg For i, we should have a merger of WIDENING with WIDENING + 1, 6136 1.1.1.2 mrg and this should be WIDENING again. */ 6137 1.1.1.2 mrg region_model model6 (&mgr); 6138 1.1.1.2 mrg ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6)); 6139 1.1.1.2 mrg const svalue *merged_widening = model6.get_rvalue (i, &ctxt); 6140 1.1.1.2 mrg ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING); 6141 1.1.1.2 mrg 6142 1.1.1.2 mrg ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257); 6143 1.1.1.2 mrg } 6144 1.1.1.2 mrg 6145 1.1 mrg /* Verify that if we mark a pointer to a malloc-ed region as non-NULL, 6146 1.1 mrg all cast pointers to that region are also known to be non-NULL. */ 6147 1.1 mrg 6148 1.1 mrg static void 6149 1.1 mrg test_malloc_constraints () 6150 1.1 mrg { 6151 1.1.1.2 mrg region_model_manager mgr; 6152 1.1.1.2 mrg region_model model (&mgr); 6153 1.1 mrg tree p = build_global_decl ("p", ptr_type_node); 6154 1.1 mrg tree char_star = build_pointer_type (char_type_node); 6155 1.1 mrg tree q = build_global_decl ("q", char_star); 6156 1.1 mrg tree null_ptr = build_int_cst (ptr_type_node, 0); 6157 1.1 mrg 6158 1.1.1.2 mrg const svalue *size_in_bytes 6159 1.1.1.2 mrg = mgr.get_or_create_unknown_svalue (size_type_node); 6160 1.1.1.2 mrg const region *reg = model.create_region_for_heap_alloc (size_in_bytes, NULL); 6161 1.1.1.2 mrg const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg); 6162 1.1.1.2 mrg model.set_value (model.get_lvalue (p, NULL), sval, NULL); 6163 1.1 mrg model.set_value (q, p, NULL); 6164 1.1 mrg 6165 1.1 mrg ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr); 6166 1.1 mrg ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr); 6167 1.1 mrg ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr); 6168 1.1 mrg ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr); 6169 1.1 mrg 6170 1.1 mrg model.add_constraint (p, NE_EXPR, null_ptr, NULL); 6171 1.1 mrg 6172 1.1 mrg ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr); 6173 1.1 mrg ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr); 6174 1.1 mrg ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr); 6175 1.1 mrg ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr); 6176 1.1 mrg } 6177 1.1 mrg 6178 1.1.1.2 mrg /* Smoketest of getting and setting the value of a variable. */ 6179 1.1.1.2 mrg 6180 1.1.1.2 mrg static void 6181 1.1.1.2 mrg test_var () 6182 1.1.1.2 mrg { 6183 1.1.1.2 mrg /* "int i;" */ 6184 1.1.1.2 mrg tree i = build_global_decl ("i", integer_type_node); 6185 1.1.1.2 mrg 6186 1.1.1.2 mrg tree int_17 = build_int_cst (integer_type_node, 17); 6187 1.1.1.2 mrg tree int_m3 = build_int_cst (integer_type_node, -3); 6188 1.1.1.2 mrg 6189 1.1.1.2 mrg region_model_manager mgr; 6190 1.1.1.2 mrg region_model model (&mgr); 6191 1.1.1.2 mrg 6192 1.1.1.2 mrg const region *i_reg = model.get_lvalue (i, NULL); 6193 1.1.1.2 mrg ASSERT_EQ (i_reg->get_kind (), RK_DECL); 6194 1.1.1.2 mrg 6195 1.1.1.2 mrg /* Reading "i" should give a symbolic "initial value". */ 6196 1.1.1.2 mrg const svalue *sval_init = model.get_rvalue (i, NULL); 6197 1.1.1.2 mrg ASSERT_EQ (sval_init->get_kind (), SK_INITIAL); 6198 1.1.1.2 mrg ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg); 6199 1.1.1.2 mrg /* ..and doing it again should give the same "initial value". */ 6200 1.1.1.2 mrg ASSERT_EQ (model.get_rvalue (i, NULL), sval_init); 6201 1.1.1.2 mrg 6202 1.1.1.2 mrg /* "i = 17;". */ 6203 1.1.1.2 mrg model.set_value (i, int_17, NULL); 6204 1.1.1.2 mrg ASSERT_EQ (model.get_rvalue (i, NULL), 6205 1.1.1.2 mrg model.get_rvalue (int_17, NULL)); 6206 1.1.1.2 mrg 6207 1.1.1.2 mrg /* "i = -3;". */ 6208 1.1.1.2 mrg model.set_value (i, int_m3, NULL); 6209 1.1.1.2 mrg ASSERT_EQ (model.get_rvalue (i, NULL), 6210 1.1.1.2 mrg model.get_rvalue (int_m3, NULL)); 6211 1.1.1.2 mrg 6212 1.1.1.2 mrg /* Verify get_offset for "i". */ 6213 1.1.1.2 mrg { 6214 1.1.1.2 mrg region_offset offset = i_reg->get_offset (); 6215 1.1.1.2 mrg ASSERT_EQ (offset.get_base_region (), i_reg); 6216 1.1.1.2 mrg ASSERT_EQ (offset.get_bit_offset (), 0); 6217 1.1.1.2 mrg } 6218 1.1.1.2 mrg } 6219 1.1.1.2 mrg 6220 1.1.1.2 mrg static void 6221 1.1.1.2 mrg test_array_2 () 6222 1.1.1.2 mrg { 6223 1.1.1.2 mrg /* "int arr[10];" */ 6224 1.1.1.2 mrg tree tlen = size_int (10); 6225 1.1.1.2 mrg tree arr_type 6226 1.1.1.2 mrg = build_array_type (integer_type_node, build_index_type (tlen)); 6227 1.1.1.2 mrg tree arr = build_global_decl ("arr", arr_type); 6228 1.1.1.2 mrg 6229 1.1.1.2 mrg /* "int i;" */ 6230 1.1.1.2 mrg tree i = build_global_decl ("i", integer_type_node); 6231 1.1.1.2 mrg 6232 1.1.1.2 mrg tree int_0 = build_int_cst (integer_type_node, 0); 6233 1.1.1.2 mrg tree int_1 = build_int_cst (integer_type_node, 1); 6234 1.1.1.2 mrg 6235 1.1.1.2 mrg tree arr_0 = build4 (ARRAY_REF, integer_type_node, 6236 1.1.1.2 mrg arr, int_0, NULL_TREE, NULL_TREE); 6237 1.1.1.2 mrg tree arr_1 = build4 (ARRAY_REF, integer_type_node, 6238 1.1.1.2 mrg arr, int_1, NULL_TREE, NULL_TREE); 6239 1.1.1.2 mrg tree arr_i = build4 (ARRAY_REF, integer_type_node, 6240 1.1.1.2 mrg arr, i, NULL_TREE, NULL_TREE); 6241 1.1.1.2 mrg 6242 1.1.1.2 mrg tree int_17 = build_int_cst (integer_type_node, 17); 6243 1.1.1.2 mrg tree int_42 = build_int_cst (integer_type_node, 42); 6244 1.1.1.2 mrg tree int_m3 = build_int_cst (integer_type_node, -3); 6245 1.1.1.2 mrg 6246 1.1.1.2 mrg region_model_manager mgr; 6247 1.1.1.2 mrg region_model model (&mgr); 6248 1.1.1.2 mrg /* "arr[0] = 17;". */ 6249 1.1.1.2 mrg model.set_value (arr_0, int_17, NULL); 6250 1.1.1.2 mrg /* "arr[1] = -3;". */ 6251 1.1.1.2 mrg model.set_value (arr_1, int_m3, NULL); 6252 1.1.1.2 mrg 6253 1.1.1.2 mrg ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL)); 6254 1.1.1.2 mrg ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL)); 6255 1.1.1.2 mrg 6256 1.1.1.2 mrg /* Overwrite a pre-existing binding: "arr[1] = 42;". */ 6257 1.1.1.2 mrg model.set_value (arr_1, int_42, NULL); 6258 1.1.1.2 mrg ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL)); 6259 1.1.1.2 mrg 6260 1.1.1.2 mrg /* Verify get_offset for "arr[0]". */ 6261 1.1.1.2 mrg { 6262 1.1.1.2 mrg const region *arr_0_reg = model.get_lvalue (arr_0, NULL); 6263 1.1.1.2 mrg region_offset offset = arr_0_reg->get_offset (); 6264 1.1.1.2 mrg ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL)); 6265 1.1.1.2 mrg ASSERT_EQ (offset.get_bit_offset (), 0); 6266 1.1.1.2 mrg } 6267 1.1.1.2 mrg 6268 1.1.1.2 mrg /* Verify get_offset for "arr[1]". */ 6269 1.1.1.2 mrg { 6270 1.1.1.2 mrg const region *arr_1_reg = model.get_lvalue (arr_1, NULL); 6271 1.1.1.2 mrg region_offset offset = arr_1_reg->get_offset (); 6272 1.1.1.2 mrg ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL)); 6273 1.1.1.2 mrg ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE); 6274 1.1.1.2 mrg } 6275 1.1.1.2 mrg 6276 1.1.1.2 mrg /* "arr[i] = i;" - this should remove the earlier bindings. */ 6277 1.1.1.2 mrg model.set_value (arr_i, i, NULL); 6278 1.1.1.2 mrg ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL)); 6279 1.1.1.2 mrg ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN); 6280 1.1.1.2 mrg 6281 1.1.1.2 mrg /* "arr[0] = 17;" - this should remove the arr[i] binding. */ 6282 1.1.1.2 mrg model.set_value (arr_0, int_17, NULL); 6283 1.1.1.2 mrg ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL)); 6284 1.1.1.2 mrg ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN); 6285 1.1.1.2 mrg } 6286 1.1.1.2 mrg 6287 1.1.1.2 mrg /* Smoketest of dereferencing a pointer via MEM_REF. */ 6288 1.1.1.2 mrg 6289 1.1.1.2 mrg static void 6290 1.1.1.2 mrg test_mem_ref () 6291 1.1.1.2 mrg { 6292 1.1.1.2 mrg /* 6293 1.1.1.2 mrg x = 17; 6294 1.1.1.2 mrg p = &x; 6295 1.1.1.2 mrg *p; 6296 1.1.1.2 mrg */ 6297 1.1.1.2 mrg tree x = build_global_decl ("x", integer_type_node); 6298 1.1.1.2 mrg tree int_star = build_pointer_type (integer_type_node); 6299 1.1.1.2 mrg tree p = build_global_decl ("p", int_star); 6300 1.1.1.2 mrg 6301 1.1.1.2 mrg tree int_17 = build_int_cst (integer_type_node, 17); 6302 1.1.1.2 mrg tree addr_of_x = build1 (ADDR_EXPR, int_star, x); 6303 1.1.1.2 mrg tree offset_0 = build_int_cst (integer_type_node, 0); 6304 1.1.1.2 mrg tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0); 6305 1.1.1.2 mrg 6306 1.1.1.2 mrg region_model_manager mgr; 6307 1.1.1.2 mrg region_model model (&mgr); 6308 1.1.1.2 mrg 6309 1.1.1.2 mrg /* "x = 17;". */ 6310 1.1.1.2 mrg model.set_value (x, int_17, NULL); 6311 1.1.1.2 mrg 6312 1.1.1.2 mrg /* "p = &x;". */ 6313 1.1.1.2 mrg model.set_value (p, addr_of_x, NULL); 6314 1.1.1.2 mrg 6315 1.1.1.2 mrg const svalue *sval = model.get_rvalue (star_p, NULL); 6316 1.1.1.2 mrg ASSERT_EQ (sval->maybe_get_constant (), int_17); 6317 1.1.1.2 mrg } 6318 1.1.1.2 mrg 6319 1.1.1.2 mrg /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF. 6320 1.1.1.2 mrg Analogous to this code: 6321 1.1.1.2 mrg void test_6 (int a[10]) 6322 1.1.1.2 mrg { 6323 1.1.1.2 mrg __analyzer_eval (a[3] == 42); [should be UNKNOWN] 6324 1.1.1.2 mrg a[3] = 42; 6325 1.1.1.2 mrg __analyzer_eval (a[3] == 42); [should be TRUE] 6326 1.1.1.2 mrg } 6327 1.1.1.2 mrg from data-model-1.c, which looks like this at the gimple level: 6328 1.1.1.2 mrg # __analyzer_eval (a[3] == 42); [should be UNKNOWN] 6329 1.1.1.2 mrg int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR 6330 1.1.1.2 mrg int _2 = *_1; # MEM_REF 6331 1.1.1.2 mrg _Bool _3 = _2 == 42; 6332 1.1.1.2 mrg int _4 = (int) _3; 6333 1.1.1.2 mrg __analyzer_eval (_4); 6334 1.1.1.2 mrg 6335 1.1.1.2 mrg # a[3] = 42; 6336 1.1.1.2 mrg int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR 6337 1.1.1.2 mrg *_5 = 42; # MEM_REF 6338 1.1.1.2 mrg 6339 1.1.1.2 mrg # __analyzer_eval (a[3] == 42); [should be TRUE] 6340 1.1.1.2 mrg int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR 6341 1.1.1.2 mrg int _7 = *_6; # MEM_REF 6342 1.1.1.2 mrg _Bool _8 = _7 == 42; 6343 1.1.1.2 mrg int _9 = (int) _8; 6344 1.1.1.2 mrg __analyzer_eval (_9); */ 6345 1.1.1.2 mrg 6346 1.1.1.2 mrg static void 6347 1.1.1.2 mrg test_POINTER_PLUS_EXPR_then_MEM_REF () 6348 1.1.1.2 mrg { 6349 1.1.1.2 mrg tree int_star = build_pointer_type (integer_type_node); 6350 1.1.1.2 mrg tree a = build_global_decl ("a", int_star); 6351 1.1.1.2 mrg tree offset_12 = build_int_cst (size_type_node, 12); 6352 1.1.1.2 mrg tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12); 6353 1.1.1.2 mrg tree offset_0 = build_int_cst (integer_type_node, 0); 6354 1.1.1.2 mrg tree mem_ref = build2 (MEM_REF, integer_type_node, 6355 1.1.1.2 mrg pointer_plus_expr, offset_0); 6356 1.1.1.2 mrg region_model_manager mgr; 6357 1.1.1.2 mrg region_model m (&mgr); 6358 1.1.1.2 mrg 6359 1.1.1.2 mrg tree int_42 = build_int_cst (integer_type_node, 42); 6360 1.1.1.2 mrg m.set_value (mem_ref, int_42, NULL); 6361 1.1.1.2 mrg ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42); 6362 1.1.1.2 mrg } 6363 1.1.1.2 mrg 6364 1.1.1.2 mrg /* Verify that malloc works. */ 6365 1.1.1.2 mrg 6366 1.1.1.2 mrg static void 6367 1.1.1.2 mrg test_malloc () 6368 1.1.1.2 mrg { 6369 1.1.1.2 mrg tree int_star = build_pointer_type (integer_type_node); 6370 1.1.1.2 mrg tree p = build_global_decl ("p", int_star); 6371 1.1.1.2 mrg tree n = build_global_decl ("n", integer_type_node); 6372 1.1.1.2 mrg tree n_times_4 = build2 (MULT_EXPR, size_type_node, 6373 1.1.1.2 mrg n, build_int_cst (size_type_node, 4)); 6374 1.1.1.2 mrg 6375 1.1.1.2 mrg region_model_manager mgr; 6376 1.1.1.2 mrg test_region_model_context ctxt; 6377 1.1.1.2 mrg region_model model (&mgr); 6378 1.1.1.2 mrg 6379 1.1.1.2 mrg /* "p = malloc (n * 4);". */ 6380 1.1.1.2 mrg const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt); 6381 1.1.1.2 mrg const region *reg = model.create_region_for_heap_alloc (size_sval, &ctxt); 6382 1.1.1.2 mrg const svalue *ptr = mgr.get_ptr_svalue (int_star, reg); 6383 1.1.1.2 mrg model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt); 6384 1.1.1.2 mrg ASSERT_EQ (model.get_capacity (reg), size_sval); 6385 1.1.1.2 mrg } 6386 1.1.1.2 mrg 6387 1.1.1.2 mrg /* Verify that alloca works. */ 6388 1.1.1.2 mrg 6389 1.1.1.2 mrg static void 6390 1.1.1.2 mrg test_alloca () 6391 1.1.1.2 mrg { 6392 1.1.1.2 mrg auto_vec <tree> param_types; 6393 1.1.1.2 mrg tree fndecl = make_fndecl (integer_type_node, 6394 1.1.1.2 mrg "test_fn", 6395 1.1.1.2 mrg param_types); 6396 1.1.1.2 mrg allocate_struct_function (fndecl, true); 6397 1.1.1.2 mrg 6398 1.1.1.2 mrg 6399 1.1.1.2 mrg tree int_star = build_pointer_type (integer_type_node); 6400 1.1.1.2 mrg tree p = build_global_decl ("p", int_star); 6401 1.1.1.2 mrg tree n = build_global_decl ("n", integer_type_node); 6402 1.1.1.2 mrg tree n_times_4 = build2 (MULT_EXPR, size_type_node, 6403 1.1.1.2 mrg n, build_int_cst (size_type_node, 4)); 6404 1.1.1.2 mrg 6405 1.1.1.2 mrg region_model_manager mgr; 6406 1.1.1.2 mrg test_region_model_context ctxt; 6407 1.1.1.2 mrg region_model model (&mgr); 6408 1.1.1.2 mrg 6409 1.1.1.2 mrg /* Push stack frame. */ 6410 1.1.1.2 mrg const region *frame_reg 6411 1.1.1.2 mrg = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), 6412 1.1.1.2 mrg NULL, &ctxt); 6413 1.1.1.2 mrg /* "p = alloca (n * 4);". */ 6414 1.1.1.2 mrg const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt); 6415 1.1.1.2 mrg const region *reg = model.create_region_for_alloca (size_sval, &ctxt); 6416 1.1.1.2 mrg ASSERT_EQ (reg->get_parent_region (), frame_reg); 6417 1.1.1.2 mrg const svalue *ptr = mgr.get_ptr_svalue (int_star, reg); 6418 1.1.1.2 mrg model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt); 6419 1.1.1.2 mrg ASSERT_EQ (model.get_capacity (reg), size_sval); 6420 1.1.1.2 mrg 6421 1.1.1.2 mrg /* Verify that the pointers to the alloca region are replaced by 6422 1.1.1.2 mrg poisoned values when the frame is popped. */ 6423 1.1.1.2 mrg model.pop_frame (NULL, NULL, &ctxt); 6424 1.1.1.2 mrg ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED); 6425 1.1.1.2 mrg } 6426 1.1.1.2 mrg 6427 1.1.1.2 mrg /* Verify that svalue::involves_p works. */ 6428 1.1.1.2 mrg 6429 1.1.1.2 mrg static void 6430 1.1.1.2 mrg test_involves_p () 6431 1.1.1.2 mrg { 6432 1.1.1.2 mrg region_model_manager mgr; 6433 1.1.1.2 mrg tree int_star = build_pointer_type (integer_type_node); 6434 1.1.1.2 mrg tree p = build_global_decl ("p", int_star); 6435 1.1.1.2 mrg tree q = build_global_decl ("q", int_star); 6436 1.1.1.2 mrg 6437 1.1.1.2 mrg test_region_model_context ctxt; 6438 1.1.1.2 mrg region_model model (&mgr); 6439 1.1.1.2 mrg const svalue *p_init = model.get_rvalue (p, &ctxt); 6440 1.1.1.2 mrg const svalue *q_init = model.get_rvalue (q, &ctxt); 6441 1.1.1.2 mrg 6442 1.1.1.2 mrg ASSERT_TRUE (p_init->involves_p (p_init)); 6443 1.1.1.2 mrg ASSERT_FALSE (p_init->involves_p (q_init)); 6444 1.1.1.2 mrg 6445 1.1.1.2 mrg const region *star_p_reg = mgr.get_symbolic_region (p_init); 6446 1.1.1.2 mrg const region *star_q_reg = mgr.get_symbolic_region (q_init); 6447 1.1.1.2 mrg 6448 1.1.1.2 mrg const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg); 6449 1.1.1.2 mrg const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg); 6450 1.1.1.2 mrg 6451 1.1.1.2 mrg ASSERT_TRUE (init_star_p->involves_p (p_init)); 6452 1.1.1.2 mrg ASSERT_FALSE (p_init->involves_p (init_star_p)); 6453 1.1.1.2 mrg ASSERT_FALSE (init_star_p->involves_p (q_init)); 6454 1.1.1.2 mrg ASSERT_TRUE (init_star_q->involves_p (q_init)); 6455 1.1.1.2 mrg ASSERT_FALSE (init_star_q->involves_p (p_init)); 6456 1.1.1.2 mrg } 6457 1.1.1.2 mrg 6458 1.1 mrg /* Run all of the selftests within this file. */ 6459 1.1 mrg 6460 1.1 mrg void 6461 1.1 mrg analyzer_region_model_cc_tests () 6462 1.1 mrg { 6463 1.1 mrg test_tree_cmp_on_constants (); 6464 1.1 mrg test_dump (); 6465 1.1.1.2 mrg test_struct (); 6466 1.1.1.2 mrg test_array_1 (); 6467 1.1 mrg test_get_representative_tree (); 6468 1.1 mrg test_unique_constants (); 6469 1.1.1.2 mrg test_unique_unknowns (); 6470 1.1.1.2 mrg test_initial_svalue_folding (); 6471 1.1.1.2 mrg test_unaryop_svalue_folding (); 6472 1.1.1.2 mrg test_binop_svalue_folding (); 6473 1.1.1.2 mrg test_sub_svalue_folding (); 6474 1.1.1.2 mrg test_descendent_of_p (); 6475 1.1.1.2 mrg test_bit_range_regions (); 6476 1.1 mrg test_assignment (); 6477 1.1 mrg test_compound_assignment (); 6478 1.1 mrg test_stack_frames (); 6479 1.1 mrg test_get_representative_path_var (); 6480 1.1.1.2 mrg test_equality_1 (); 6481 1.1 mrg test_canonicalization_2 (); 6482 1.1 mrg test_canonicalization_3 (); 6483 1.1 mrg test_canonicalization_4 (); 6484 1.1 mrg test_state_merging (); 6485 1.1 mrg test_constraint_merging (); 6486 1.1.1.2 mrg test_widening_constraints (); 6487 1.1.1.2 mrg test_iteration_1 (); 6488 1.1 mrg test_malloc_constraints (); 6489 1.1.1.2 mrg test_var (); 6490 1.1.1.2 mrg test_array_2 (); 6491 1.1.1.2 mrg test_mem_ref (); 6492 1.1.1.2 mrg test_POINTER_PLUS_EXPR_then_MEM_REF (); 6493 1.1.1.2 mrg test_malloc (); 6494 1.1.1.2 mrg test_alloca (); 6495 1.1.1.2 mrg test_involves_p (); 6496 1.1 mrg } 6497 1.1 mrg 6498 1.1 mrg } // namespace selftest 6499 1.1 mrg 6500 1.1 mrg #endif /* CHECKING_P */ 6501 1.1 mrg 6502 1.1 mrg } // namespace ana 6503 1.1 mrg 6504 1.1 mrg #endif /* #if ENABLE_ANALYZER */ 6505