Home | History | Annotate | Line # | Download | only in gcc
      1 /* Implements exception handling.
      2    Copyright (C) 1989-2022 Free Software Foundation, Inc.
      3    Contributed by Mike Stump <mrs (at) cygnus.com>.
      4 
      5 This file is part of GCC.
      6 
      7 GCC is free software; you can redistribute it and/or modify it under
      8 the terms of the GNU General Public License as published by the Free
      9 Software Foundation; either version 3, or (at your option) any later
     10 version.
     11 
     12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
     14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     15 for more details.
     16 
     17 You should have received a copy of the GNU General Public License
     18 along with GCC; see the file COPYING3.  If not see
     19 <http://www.gnu.org/licenses/>.  */
     20 
     21 
     22 /* An exception is an event that can be "thrown" from within a
     23    function.  This event can then be "caught" by the callers of
     24    the function.
     25 
     26    The representation of exceptions changes several times during
     27    the compilation process:
     28 
     29    In the beginning, in the front end, we have the GENERIC trees
     30    TRY_CATCH_EXPR, TRY_FINALLY_EXPR, EH_ELSE_EXPR, WITH_CLEANUP_EXPR,
     31    CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
     32 
     33    During initial gimplification (gimplify.cc) these are lowered to the
     34    GIMPLE_TRY, GIMPLE_CATCH, GIMPLE_EH_ELSE, and GIMPLE_EH_FILTER
     35    nodes.  The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are
     36    converted into GIMPLE_TRY_FINALLY nodes; the others are a more
     37    direct 1-1 conversion.
     38 
     39    During pass_lower_eh (tree-eh.cc) we record the nested structure
     40    of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
     41    We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
     42    regions at this time.  We can then flatten the statements within
     43    the TRY nodes to straight-line code.  Statements that had been within
     44    TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
     45    so that we may remember what action is supposed to be taken if
     46    a given statement does throw.  During this lowering process,
     47    we create an EH_LANDING_PAD node for each EH_REGION that has
     48    some code within the function that needs to be executed if a
     49    throw does happen.  We also create RESX statements that are
     50    used to transfer control from an inner EH_REGION to an outer
     51    EH_REGION.  We also create EH_DISPATCH statements as placeholders
     52    for a runtime type comparison that should be made in order to
     53    select the action to perform among different CATCH and EH_FILTER
     54    regions.
     55 
     56    During pass_lower_eh_dispatch (tree-eh.cc), which is run after
     57    all inlining is complete, we are able to run assign_filter_values,
     58    which allows us to map the set of types manipulated by all of the
     59    CATCH and EH_FILTER regions to a set of integers.  This set of integers
     60    will be how the exception runtime communicates with the code generated
     61    within the function.  We then expand the GIMPLE_EH_DISPATCH statements
     62    to a switch or conditional branches that use the argument provided by
     63    the runtime (__builtin_eh_filter) and the set of integers we computed
     64    in assign_filter_values.
     65 
     66    During pass_lower_resx (tree-eh.cc), which is run near the end
     67    of optimization, we expand RESX statements.  If the eh region
     68    that is outer to the RESX statement is a MUST_NOT_THROW, then
     69    the RESX expands to some form of abort statement.  If the eh
     70    region that is outer to the RESX statement is within the current
     71    function, then the RESX expands to a bookkeeping call
     72    (__builtin_eh_copy_values) and a goto.  Otherwise, the next
     73    handler for the exception must be within a function somewhere
     74    up the call chain, so we call back into the exception runtime
     75    (__builtin_unwind_resume).
     76 
     77    During pass_expand (cfgexpand.cc), we generate REG_EH_REGION notes
     78    that create an rtl to eh_region mapping that corresponds to the
     79    gimple to eh_region mapping that had been recorded in the
     80    THROW_STMT_TABLE.
     81 
     82    Then, via finish_eh_generation, we generate the real landing pads
     83    to which the runtime will actually transfer control.  These new
     84    landing pads perform whatever bookkeeping is needed by the target
     85    backend in order to resume execution within the current function.
     86    Each of these new landing pads falls through into the post_landing_pad
     87    label which had been used within the CFG up to this point.  All
     88    exception edges within the CFG are redirected to the new landing pads.
     89    If the target uses setjmp to implement exceptions, the various extra
     90    calls into the runtime to register and unregister the current stack
     91    frame are emitted at this time.
     92 
     93    During pass_convert_to_eh_region_ranges (except.cc), we transform
     94    the REG_EH_REGION notes attached to individual insns into
     95    non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
     96    and NOTE_INSN_EH_REGION_END.  Each insn within such ranges has the
     97    same associated action within the exception region tree, meaning
     98    that (1) the exception is caught by the same landing pad within the
     99    current function, (2) the exception is blocked by the runtime with
    100    a MUST_NOT_THROW region, or (3) the exception is not handled at all
    101    within the current function.
    102 
    103    Finally, during assembly generation, we call
    104    output_function_exception_table (except.cc) to emit the tables with
    105    which the exception runtime can determine if a given stack frame
    106    handles a given exception, and if so what filter value to provide
    107    to the function when the non-local control transfer is effected.
    108    If the target uses dwarf2 unwinding to implement exceptions, then
    109    output_call_frame_info (dwarf2out.cc) emits the required unwind data.  */
    110 
    111 
    112 #include "config.h"
    113 #include "system.h"
    114 #include "coretypes.h"
    115 #include "backend.h"
    116 #include "target.h"
    117 #include "rtl.h"
    118 #include "tree.h"
    119 #include "cfghooks.h"
    120 #include "tree-pass.h"
    121 #include "memmodel.h"
    122 #include "tm_p.h"
    123 #include "stringpool.h"
    124 #include "expmed.h"
    125 #include "optabs.h"
    126 #include "emit-rtl.h"
    127 #include "cgraph.h"
    128 #include "diagnostic.h"
    129 #include "fold-const.h"
    130 #include "stor-layout.h"
    131 #include "explow.h"
    132 #include "stmt.h"
    133 #include "expr.h"
    134 #include "calls.h"
    135 #include "libfuncs.h"
    136 #include "except.h"
    137 #include "output.h"
    138 #include "dwarf2asm.h"
    139 #include "dwarf2.h"
    140 #include "common/common-target.h"
    141 #include "langhooks.h"
    142 #include "cfgrtl.h"
    143 #include "tree-pretty-print.h"
    144 #include "cfgloop.h"
    145 #include "builtins.h"
    146 #include "tree-hash-traits.h"
    147 #include "flags.h"
    148 
    149 static GTY(()) int call_site_base;
    150 
    151 static GTY(()) hash_map<tree_hash, tree> *type_to_runtime_map;
    152 
    153 static GTY(()) tree setjmp_fn;
    154 
    155 /* Describe the SjLj_Function_Context structure.  */
    156 static GTY(()) tree sjlj_fc_type_node;
    157 static int sjlj_fc_call_site_ofs;
    158 static int sjlj_fc_data_ofs;
    159 static int sjlj_fc_personality_ofs;
    160 static int sjlj_fc_lsda_ofs;
    161 static int sjlj_fc_jbuf_ofs;
    162 
    163 
    165 struct GTY(()) call_site_record_d
    166 {
    167   rtx landing_pad;
    168   int action;
    169 };
    170 
    171 /* In the following structure and associated functions,
    172    we represent entries in the action table as 1-based indices.
    173    Special cases are:
    174 
    175 	 0:	null action record, non-null landing pad; implies cleanups
    176 	-1:	null action record, null landing pad; implies no action
    177 	-2:	no call-site entry; implies must_not_throw
    178 	-3:	we have yet to process outer regions
    179 
    180    Further, no special cases apply to the "next" field of the record.
    181    For next, 0 means end of list.  */
    182 
    183 struct action_record
    184 {
    185   int offset;
    186   int filter;
    187   int next;
    188 };
    189 
    190 /* Hashtable helpers.  */
    191 
    192 struct action_record_hasher : free_ptr_hash <action_record>
    193 {
    194   static inline hashval_t hash (const action_record *);
    195   static inline bool equal (const action_record *, const action_record *);
    196 };
    197 
    198 inline hashval_t
    199 action_record_hasher::hash (const action_record *entry)
    200 {
    201   return entry->next * 1009 + entry->filter;
    202 }
    203 
    204 inline bool
    205 action_record_hasher::equal (const action_record *entry,
    206 			     const action_record *data)
    207 {
    208   return entry->filter == data->filter && entry->next == data->next;
    209 }
    210 
    211 typedef hash_table<action_record_hasher> action_hash_type;
    212 
    213 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
    215 					   eh_landing_pad *);
    216 
    217 static void dw2_build_landing_pads (void);
    218 
    219 static int collect_one_action_chain (action_hash_type *, eh_region);
    220 static int add_call_site (rtx, int, int);
    221 
    222 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
    223 static void push_sleb128 (vec<uchar, va_gc> **, int);
    224 static int dw2_size_of_call_site_table (int);
    225 static int sjlj_size_of_call_site_table (void);
    226 static void dw2_output_call_site_table (int, int);
    227 static void sjlj_output_call_site_table (void);
    228 
    229 
    230 void
    232 init_eh (void)
    233 {
    234   if (! flag_exceptions)
    235     return;
    236 
    237   type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
    238 
    239   /* Create the SjLj_Function_Context structure.  This should match
    240      the definition in unwind-sjlj.c.  */
    241   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
    242     {
    243       tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
    244 
    245       sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
    246 
    247       f_prev = build_decl (BUILTINS_LOCATION,
    248 			   FIELD_DECL, get_identifier ("__prev"),
    249 			   build_pointer_type (sjlj_fc_type_node));
    250       DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
    251 
    252       f_cs = build_decl (BUILTINS_LOCATION,
    253 			 FIELD_DECL, get_identifier ("__call_site"),
    254 			 integer_type_node);
    255       DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
    256 
    257       tmp = build_index_type (size_int (4 - 1));
    258       tmp = build_array_type (lang_hooks.types.type_for_mode
    259 				(targetm.unwind_word_mode (), 1),
    260 			      tmp);
    261       f_data = build_decl (BUILTINS_LOCATION,
    262 			   FIELD_DECL, get_identifier ("__data"), tmp);
    263       DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
    264 
    265       f_per = build_decl (BUILTINS_LOCATION,
    266 			  FIELD_DECL, get_identifier ("__personality"),
    267 			  ptr_type_node);
    268       DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
    269 
    270       f_lsda = build_decl (BUILTINS_LOCATION,
    271 			   FIELD_DECL, get_identifier ("__lsda"),
    272 			   ptr_type_node);
    273       DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
    274 
    275 #ifdef DONT_USE_BUILTIN_SETJMP
    276 #ifdef JMP_BUF_SIZE
    277       tmp = size_int (JMP_BUF_SIZE - 1);
    278 #else
    279       /* Should be large enough for most systems, if it is not,
    280 	 JMP_BUF_SIZE should be defined with the proper value.  It will
    281 	 also tend to be larger than necessary for most systems, a more
    282 	 optimal port will define JMP_BUF_SIZE.  */
    283       tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
    284 #endif
    285 #else
    286       /* Compute a minimally sized jump buffer.  We need room to store at
    287 	 least 3 pointers - stack pointer, frame pointer and return address.
    288 	 Plus for some targets we need room for an extra pointer - in the
    289 	 case of MIPS this is the global pointer.  This makes a total of four
    290 	 pointers, but to be safe we actually allocate room for 5.
    291 
    292 	 If pointers are smaller than words then we allocate enough room for
    293 	 5 words, just in case the backend needs this much room.  For more
    294 	 discussion on this issue see:
    295 	 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html.  */
    296       if (POINTER_SIZE > BITS_PER_WORD)
    297 	tmp = size_int (5 - 1);
    298       else
    299 	tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
    300 #endif
    301 
    302       tmp = build_index_type (tmp);
    303       tmp = build_array_type (ptr_type_node, tmp);
    304       f_jbuf = build_decl (BUILTINS_LOCATION,
    305 			   FIELD_DECL, get_identifier ("__jbuf"), tmp);
    306 #ifdef DONT_USE_BUILTIN_SETJMP
    307       /* We don't know what the alignment requirements of the
    308 	 runtime's jmp_buf has.  Overestimate.  */
    309       SET_DECL_ALIGN (f_jbuf, BIGGEST_ALIGNMENT);
    310       DECL_USER_ALIGN (f_jbuf) = 1;
    311 #endif
    312       DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
    313 
    314       TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
    315       TREE_CHAIN (f_prev) = f_cs;
    316       TREE_CHAIN (f_cs) = f_data;
    317       TREE_CHAIN (f_data) = f_per;
    318       TREE_CHAIN (f_per) = f_lsda;
    319       TREE_CHAIN (f_lsda) = f_jbuf;
    320 
    321       layout_type (sjlj_fc_type_node);
    322 
    323       /* Cache the interesting field offsets so that we have
    324 	 easy access from rtl.  */
    325       sjlj_fc_call_site_ofs
    326 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
    327 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
    328       sjlj_fc_data_ofs
    329 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
    330 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
    331       sjlj_fc_personality_ofs
    332 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
    333 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
    334       sjlj_fc_lsda_ofs
    335 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
    336 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
    337       sjlj_fc_jbuf_ofs
    338 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
    339 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
    340 
    341 #ifdef DONT_USE_BUILTIN_SETJMP
    342       tmp = build_function_type_list (integer_type_node, TREE_TYPE (f_jbuf),
    343 				      NULL);
    344       setjmp_fn = build_decl (BUILTINS_LOCATION, FUNCTION_DECL,
    345 			      get_identifier ("setjmp"), tmp);
    346       TREE_PUBLIC (setjmp_fn) = 1;
    347       DECL_EXTERNAL (setjmp_fn) = 1;
    348       DECL_ASSEMBLER_NAME (setjmp_fn);
    349 #endif
    350     }
    351 }
    352 
    353 void
    354 init_eh_for_function (void)
    355 {
    356   cfun->eh = ggc_cleared_alloc<eh_status> ();
    357 
    358   /* Make sure zero'th entries are used.  */
    359   vec_safe_push (cfun->eh->region_array, (eh_region)0);
    360   vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
    361 }
    362 
    363 /* Routines to generate the exception tree somewhat directly.
    365    These are used from tree-eh.cc when processing exception related
    366    nodes during tree optimization.  */
    367 
    368 static eh_region
    369 gen_eh_region (enum eh_region_type type, eh_region outer)
    370 {
    371   eh_region new_eh;
    372 
    373   /* Insert a new blank region as a leaf in the tree.  */
    374   new_eh = ggc_cleared_alloc<eh_region_d> ();
    375   new_eh->type = type;
    376   new_eh->outer = outer;
    377   if (outer)
    378     {
    379       new_eh->next_peer = outer->inner;
    380       outer->inner = new_eh;
    381     }
    382   else
    383     {
    384       new_eh->next_peer = cfun->eh->region_tree;
    385       cfun->eh->region_tree = new_eh;
    386     }
    387 
    388   new_eh->index = vec_safe_length (cfun->eh->region_array);
    389   vec_safe_push (cfun->eh->region_array, new_eh);
    390 
    391   /* Copy the language's notion of whether to use __cxa_end_cleanup.  */
    392   if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
    393     new_eh->use_cxa_end_cleanup = true;
    394 
    395   return new_eh;
    396 }
    397 
    398 eh_region
    399 gen_eh_region_cleanup (eh_region outer)
    400 {
    401   return gen_eh_region (ERT_CLEANUP, outer);
    402 }
    403 
    404 eh_region
    405 gen_eh_region_try (eh_region outer)
    406 {
    407   return gen_eh_region (ERT_TRY, outer);
    408 }
    409 
    410 eh_catch
    411 gen_eh_region_catch (eh_region t, tree type_or_list)
    412 {
    413   eh_catch c, l;
    414   tree type_list, type_node;
    415 
    416   gcc_assert (t->type == ERT_TRY);
    417 
    418   /* Ensure to always end up with a type list to normalize further
    419      processing, then register each type against the runtime types map.  */
    420   type_list = type_or_list;
    421   if (type_or_list)
    422     {
    423       if (TREE_CODE (type_or_list) != TREE_LIST)
    424 	type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
    425 
    426       type_node = type_list;
    427       for (; type_node; type_node = TREE_CHAIN (type_node))
    428 	add_type_for_runtime (TREE_VALUE (type_node));
    429     }
    430 
    431   c = ggc_cleared_alloc<eh_catch_d> ();
    432   c->type_list = type_list;
    433   l = t->u.eh_try.last_catch;
    434   c->prev_catch = l;
    435   if (l)
    436     l->next_catch = c;
    437   else
    438     t->u.eh_try.first_catch = c;
    439   t->u.eh_try.last_catch = c;
    440 
    441   return c;
    442 }
    443 
    444 eh_region
    445 gen_eh_region_allowed (eh_region outer, tree allowed)
    446 {
    447   eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
    448   region->u.allowed.type_list = allowed;
    449 
    450   for (; allowed ; allowed = TREE_CHAIN (allowed))
    451     add_type_for_runtime (TREE_VALUE (allowed));
    452 
    453   return region;
    454 }
    455 
    456 eh_region
    457 gen_eh_region_must_not_throw (eh_region outer)
    458 {
    459   return gen_eh_region (ERT_MUST_NOT_THROW, outer);
    460 }
    461 
    462 eh_landing_pad
    463 gen_eh_landing_pad (eh_region region)
    464 {
    465   eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
    466 
    467   lp->next_lp = region->landing_pads;
    468   lp->region = region;
    469   lp->index = vec_safe_length (cfun->eh->lp_array);
    470   region->landing_pads = lp;
    471 
    472   vec_safe_push (cfun->eh->lp_array, lp);
    473 
    474   return lp;
    475 }
    476 
    477 eh_region
    478 get_eh_region_from_number_fn (struct function *ifun, int i)
    479 {
    480   return (*ifun->eh->region_array)[i];
    481 }
    482 
    483 eh_region
    484 get_eh_region_from_number (int i)
    485 {
    486   return get_eh_region_from_number_fn (cfun, i);
    487 }
    488 
    489 eh_landing_pad
    490 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
    491 {
    492   return (*ifun->eh->lp_array)[i];
    493 }
    494 
    495 eh_landing_pad
    496 get_eh_landing_pad_from_number (int i)
    497 {
    498   return get_eh_landing_pad_from_number_fn (cfun, i);
    499 }
    500 
    501 eh_region
    502 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
    503 {
    504   if (i < 0)
    505     return (*ifun->eh->region_array)[-i];
    506   else if (i == 0)
    507     return NULL;
    508   else
    509     {
    510       eh_landing_pad lp;
    511       lp = (*ifun->eh->lp_array)[i];
    512       return lp->region;
    513     }
    514 }
    515 
    516 eh_region
    517 get_eh_region_from_lp_number (int i)
    518 {
    519   return get_eh_region_from_lp_number_fn (cfun, i);
    520 }
    521 
    522 /* Returns true if the current function has exception handling regions.  */
    524 
    525 bool
    526 current_function_has_exception_handlers (void)
    527 {
    528   return cfun->eh->region_tree != NULL;
    529 }
    530 
    531 /* A subroutine of duplicate_eh_regions.  Copy the eh_region tree at OLD.
    533    Root it at OUTER, and apply LP_OFFSET to the lp numbers.  */
    534 
    535 struct duplicate_eh_regions_data
    536 {
    537   duplicate_eh_regions_map label_map;
    538   void *label_map_data;
    539   hash_map<void *, void *> *eh_map;
    540 };
    541 
    542 static void
    543 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
    544 			eh_region old_r, eh_region outer)
    545 {
    546   eh_landing_pad old_lp, new_lp;
    547   eh_region new_r;
    548 
    549   new_r = gen_eh_region (old_r->type, outer);
    550   bool existed = data->eh_map->put (old_r, new_r);
    551   gcc_assert (!existed);
    552 
    553   switch (old_r->type)
    554     {
    555     case ERT_CLEANUP:
    556       break;
    557 
    558     case ERT_TRY:
    559       {
    560 	eh_catch oc, nc;
    561 	for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
    562 	  {
    563 	    /* We should be doing all our region duplication before and
    564 	       during inlining, which is before filter lists are created.  */
    565 	    gcc_assert (oc->filter_list == NULL);
    566 	    nc = gen_eh_region_catch (new_r, oc->type_list);
    567 	    nc->label = data->label_map (oc->label, data->label_map_data);
    568 	  }
    569       }
    570       break;
    571 
    572     case ERT_ALLOWED_EXCEPTIONS:
    573       new_r->u.allowed.type_list = old_r->u.allowed.type_list;
    574       if (old_r->u.allowed.label)
    575 	new_r->u.allowed.label
    576 	    = data->label_map (old_r->u.allowed.label, data->label_map_data);
    577       else
    578 	new_r->u.allowed.label = NULL_TREE;
    579       break;
    580 
    581     case ERT_MUST_NOT_THROW:
    582       new_r->u.must_not_throw.failure_loc =
    583 	LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
    584       new_r->u.must_not_throw.failure_decl =
    585 	old_r->u.must_not_throw.failure_decl;
    586       break;
    587     }
    588 
    589   for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
    590     {
    591       /* Don't bother copying unused landing pads.  */
    592       if (old_lp->post_landing_pad == NULL)
    593 	continue;
    594 
    595       new_lp = gen_eh_landing_pad (new_r);
    596       bool existed = data->eh_map->put (old_lp, new_lp);
    597       gcc_assert (!existed);
    598 
    599       new_lp->post_landing_pad
    600 	= data->label_map (old_lp->post_landing_pad, data->label_map_data);
    601       EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
    602     }
    603 
    604   /* Make sure to preserve the original use of __cxa_end_cleanup.  */
    605   new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
    606 
    607   for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
    608     duplicate_eh_regions_1 (data, old_r, new_r);
    609 }
    610 
    611 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
    612    the current function and root the tree below OUTER_REGION.
    613    The special case of COPY_REGION of NULL means all regions.
    614    Remap labels using MAP/MAP_DATA callback.  Return a pointer map
    615    that allows the caller to remap uses of both EH regions and
    616    EH landing pads.  */
    617 
    618 hash_map<void *, void *> *
    619 duplicate_eh_regions (struct function *ifun,
    620 		      eh_region copy_region, int outer_lp,
    621 		      duplicate_eh_regions_map map, void *map_data)
    622 {
    623   struct duplicate_eh_regions_data data;
    624   eh_region outer_region;
    625 
    626   if (flag_checking)
    627     verify_eh_tree (ifun);
    628 
    629   data.label_map = map;
    630   data.label_map_data = map_data;
    631   data.eh_map = new hash_map<void *, void *>;
    632 
    633   outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
    634 
    635   /* Copy all the regions in the subtree.  */
    636   if (copy_region)
    637     duplicate_eh_regions_1 (&data, copy_region, outer_region);
    638   else
    639     {
    640       eh_region r;
    641       for (r = ifun->eh->region_tree; r ; r = r->next_peer)
    642 	duplicate_eh_regions_1 (&data, r, outer_region);
    643     }
    644 
    645   if (flag_checking)
    646     verify_eh_tree (cfun);
    647 
    648   return data.eh_map;
    649 }
    650 
    651 /* Return the region that is outer to both REGION_A and REGION_B in IFUN.  */
    652 
    653 eh_region
    654 eh_region_outermost (struct function *ifun, eh_region region_a,
    655 		     eh_region region_b)
    656 {
    657   gcc_assert (ifun->eh->region_array);
    658   gcc_assert (ifun->eh->region_tree);
    659 
    660   auto_sbitmap b_outer (ifun->eh->region_array->length ());
    661   bitmap_clear (b_outer);
    662 
    663   do
    664     {
    665       bitmap_set_bit (b_outer, region_b->index);
    666       region_b = region_b->outer;
    667     }
    668   while (region_b);
    669 
    670   do
    671     {
    672       if (bitmap_bit_p (b_outer, region_a->index))
    673 	break;
    674       region_a = region_a->outer;
    675     }
    676   while (region_a);
    677 
    678   return region_a;
    679 }
    680 
    681 void
    683 add_type_for_runtime (tree type)
    684 {
    685   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
    686   if (TREE_CODE (type) == NOP_EXPR)
    687     return;
    688 
    689   bool existed = false;
    690   tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
    691   if (!existed)
    692     *slot = lang_hooks.eh_runtime_type (type);
    693 }
    694 
    695 tree
    696 lookup_type_for_runtime (tree type)
    697 {
    698   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
    699   if (TREE_CODE (type) == NOP_EXPR)
    700     return type;
    701 
    702   /* We should have always inserted the data earlier.  */
    703   return *type_to_runtime_map->get (type);
    704 }
    705 
    706 
    707 /* Represent an entry in @TTypes for either catch actions
    709    or exception filter actions.  */
    710 struct ttypes_filter {
    711   tree t;
    712   int filter;
    713 };
    714 
    715 /* Helper for ttypes_filter hashing.  */
    716 
    717 struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
    718 {
    719   typedef tree_node *compare_type;
    720   static inline hashval_t hash (const ttypes_filter *);
    721   static inline bool equal (const ttypes_filter *, const tree_node *);
    722 };
    723 
    724 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
    725    (a tree) for a @TTypes type node we are thinking about adding.  */
    726 
    727 inline bool
    728 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
    729 {
    730   return entry->t == data;
    731 }
    732 
    733 inline hashval_t
    734 ttypes_filter_hasher::hash (const ttypes_filter *entry)
    735 {
    736   return TREE_HASH (entry->t);
    737 }
    738 
    739 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
    740 
    741 
    742 /* Helper for ehspec hashing.  */
    743 
    744 struct ehspec_hasher : free_ptr_hash <ttypes_filter>
    745 {
    746   static inline hashval_t hash (const ttypes_filter *);
    747   static inline bool equal (const ttypes_filter *, const ttypes_filter *);
    748 };
    749 
    750 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
    751    exception specification list we are thinking about adding.  */
    752 /* ??? Currently we use the type lists in the order given.  Someone
    753    should put these in some canonical order.  */
    754 
    755 inline bool
    756 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
    757 {
    758   return type_list_equal (entry->t, data->t);
    759 }
    760 
    761 /* Hash function for exception specification lists.  */
    762 
    763 inline hashval_t
    764 ehspec_hasher::hash (const ttypes_filter *entry)
    765 {
    766   hashval_t h = 0;
    767   tree list;
    768 
    769   for (list = entry->t; list ; list = TREE_CHAIN (list))
    770     h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
    771   return h;
    772 }
    773 
    774 typedef hash_table<ehspec_hasher> ehspec_hash_type;
    775 
    776 
    777 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
    778    to speed up the search.  Return the filter value to be used.  */
    779 
    780 static int
    781 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
    782 {
    783   struct ttypes_filter **slot, *n;
    784 
    785   slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
    786 					  INSERT);
    787 
    788   if ((n = *slot) == NULL)
    789     {
    790       /* Filter value is a 1 based table index.  */
    791 
    792       n = XNEW (struct ttypes_filter);
    793       n->t = type;
    794       n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
    795       *slot = n;
    796 
    797       vec_safe_push (cfun->eh->ttype_data, type);
    798     }
    799 
    800   return n->filter;
    801 }
    802 
    803 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
    804    to speed up the search.  Return the filter value to be used.  */
    805 
    806 static int
    807 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
    808 		  tree list)
    809 {
    810   struct ttypes_filter **slot, *n;
    811   struct ttypes_filter dummy;
    812 
    813   dummy.t = list;
    814   slot = ehspec_hash->find_slot (&dummy, INSERT);
    815 
    816   if ((n = *slot) == NULL)
    817     {
    818       int len;
    819 
    820       if (targetm.arm_eabi_unwinder)
    821 	len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
    822       else
    823 	len = vec_safe_length (cfun->eh->ehspec_data.other);
    824 
    825       /* Filter value is a -1 based byte index into a uleb128 buffer.  */
    826 
    827       n = XNEW (struct ttypes_filter);
    828       n->t = list;
    829       n->filter = -(len + 1);
    830       *slot = n;
    831 
    832       /* Generate a 0 terminated list of filter values.  */
    833       for (; list ; list = TREE_CHAIN (list))
    834 	{
    835 	  if (targetm.arm_eabi_unwinder)
    836 	    vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
    837 	  else
    838 	    {
    839 	      /* Look up each type in the list and encode its filter
    840 		 value as a uleb128.  */
    841 	      push_uleb128 (&cfun->eh->ehspec_data.other,
    842 			    add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
    843 	    }
    844 	}
    845       if (targetm.arm_eabi_unwinder)
    846 	vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
    847       else
    848 	vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
    849     }
    850 
    851   return n->filter;
    852 }
    853 
    854 /* Generate the action filter values to be used for CATCH and
    855    ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
    856    we use lots of landing pads, and so every type or list can share
    857    the same filter value, which saves table space.  */
    858 
    859 void
    860 assign_filter_values (void)
    861 {
    862   int i;
    863   eh_region r;
    864   eh_catch c;
    865 
    866   vec_alloc (cfun->eh->ttype_data, 16);
    867   if (targetm.arm_eabi_unwinder)
    868     vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
    869   else
    870     vec_alloc (cfun->eh->ehspec_data.other, 64);
    871 
    872   ehspec_hash_type ehspec (31);
    873   ttypes_hash_type ttypes (31);
    874 
    875   for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
    876     {
    877       if (r == NULL)
    878 	continue;
    879 
    880       switch (r->type)
    881 	{
    882 	case ERT_TRY:
    883 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
    884 	    {
    885 	      /* Whatever type_list is (NULL or true list), we build a list
    886 		 of filters for the region.  */
    887 	      c->filter_list = NULL_TREE;
    888 
    889 	      if (c->type_list != NULL)
    890 		{
    891 		  /* Get a filter value for each of the types caught and store
    892 		     them in the region's dedicated list.  */
    893 		  tree tp_node = c->type_list;
    894 
    895 		  for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
    896 		    {
    897 		      int flt
    898 		       	= add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
    899 		      tree flt_node = build_int_cst (integer_type_node, flt);
    900 
    901 		      c->filter_list
    902 			= tree_cons (NULL_TREE, flt_node, c->filter_list);
    903 		    }
    904 		}
    905 	      else
    906 		{
    907 		  /* Get a filter value for the NULL list also since it
    908 		     will need an action record anyway.  */
    909 		  int flt = add_ttypes_entry (&ttypes, NULL);
    910 		  tree flt_node = build_int_cst (integer_type_node, flt);
    911 
    912 		  c->filter_list
    913 		    = tree_cons (NULL_TREE, flt_node, NULL);
    914 		}
    915 	    }
    916 	  break;
    917 
    918 	case ERT_ALLOWED_EXCEPTIONS:
    919 	  r->u.allowed.filter
    920 	    = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
    921 	  break;
    922 
    923 	default:
    924 	  break;
    925 	}
    926     }
    927 }
    928 
    929 /* Emit SEQ into basic block just before INSN (that is assumed to be
    930    first instruction of some existing BB and return the newly
    931    produced block.  */
    932 static basic_block
    933 emit_to_new_bb_before (rtx_insn *seq, rtx_insn *insn)
    934 {
    935   rtx_insn *next, *last;
    936   basic_block bb;
    937   edge e;
    938   edge_iterator ei;
    939 
    940   /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
    941      call), we don't want it to go into newly created landing pad or other EH
    942      construct.  */
    943   for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
    944     if (e->flags & EDGE_FALLTHRU)
    945       force_nonfallthru (e);
    946     else
    947       ei_next (&ei);
    948 
    949   /* Make sure to put the location of INSN or a subsequent instruction on SEQ
    950      to avoid inheriting the location of the previous instruction.  */
    951   next = insn;
    952   while (next && !NONDEBUG_INSN_P (next))
    953     next = NEXT_INSN (next);
    954   if (next)
    955     last = emit_insn_before_setloc (seq, insn, INSN_LOCATION (next));
    956   else
    957     last = emit_insn_before (seq, insn);
    958   if (BARRIER_P (last))
    959     last = PREV_INSN (last);
    960   bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
    961   update_bb_for_insn (bb);
    962   bb->flags |= BB_SUPERBLOCK;
    963   return bb;
    964 }
    965 
    966 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
    968    at the rtl level.  Emit the code required by the target at a landing
    969    pad for the given region.  */
    970 
    971 static void
    972 expand_dw2_landing_pad_for_region (eh_region region)
    973 {
    974   if (targetm.have_exception_receiver ())
    975     emit_insn (targetm.gen_exception_receiver ());
    976   else if (targetm.have_nonlocal_goto_receiver ())
    977     emit_insn (targetm.gen_nonlocal_goto_receiver ());
    978   else
    979     { /* Nothing */ }
    980 
    981   if (region->exc_ptr_reg)
    982     emit_move_insn (region->exc_ptr_reg,
    983 		    gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
    984   if (region->filter_reg)
    985     emit_move_insn (region->filter_reg,
    986 		    gen_rtx_REG (targetm.eh_return_filter_mode (),
    987 				 EH_RETURN_DATA_REGNO (1)));
    988 }
    989 
    990 /* Expand the extra code needed at landing pads for dwarf2 unwinding.  */
    991 
    992 static void
    993 dw2_build_landing_pads (void)
    994 {
    995   int i;
    996   eh_landing_pad lp;
    997   int e_flags = EDGE_FALLTHRU;
    998 
    999   /* If we're going to partition blocks, we need to be able to add
   1000      new landing pads later, which means that we need to hold on to
   1001      the post-landing-pad block.  Prevent it from being merged away.
   1002      We'll remove this bit after partitioning.  */
   1003   if (flag_reorder_blocks_and_partition)
   1004     e_flags |= EDGE_PRESERVE;
   1005 
   1006   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
   1007     {
   1008       basic_block bb;
   1009       rtx_insn *seq;
   1010 
   1011       if (lp == NULL || lp->post_landing_pad == NULL)
   1012 	continue;
   1013 
   1014       start_sequence ();
   1015 
   1016       lp->landing_pad = gen_label_rtx ();
   1017       emit_label (lp->landing_pad);
   1018       LABEL_PRESERVE_P (lp->landing_pad) = 1;
   1019 
   1020       expand_dw2_landing_pad_for_region (lp->region);
   1021 
   1022       seq = get_insns ();
   1023       end_sequence ();
   1024 
   1025       bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
   1026       bb->count = bb->next_bb->count;
   1027       make_single_succ_edge (bb, bb->next_bb, e_flags);
   1028       if (current_loops)
   1029 	{
   1030 	  class loop *loop = bb->next_bb->loop_father;
   1031 	  /* If we created a pre-header block, add the new block to the
   1032 	     outer loop, otherwise to the loop itself.  */
   1033 	  if (bb->next_bb == loop->header)
   1034 	    add_bb_to_loop (bb, loop_outer (loop));
   1035 	  else
   1036 	    add_bb_to_loop (bb, loop);
   1037 	}
   1038     }
   1039 }
   1040 
   1041 
   1042 static vec<int> sjlj_lp_call_site_index;
   1044 
   1045 /* Process all active landing pads.  Assign each one a compact dispatch
   1046    index, and a call-site index.  */
   1047 
   1048 static int
   1049 sjlj_assign_call_site_values (void)
   1050 {
   1051   action_hash_type ar_hash (31);
   1052   int i, disp_index;
   1053   eh_landing_pad lp;
   1054 
   1055   vec_alloc (crtl->eh.action_record_data, 64);
   1056 
   1057   disp_index = 0;
   1058   call_site_base = 1;
   1059   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
   1060     if (lp && lp->post_landing_pad)
   1061       {
   1062 	int action, call_site;
   1063 
   1064 	/* First: build the action table.  */
   1065 	action = collect_one_action_chain (&ar_hash, lp->region);
   1066 
   1067 	/* Next: assign call-site values.  If dwarf2 terms, this would be
   1068 	   the region number assigned by convert_to_eh_region_ranges, but
   1069 	   handles no-action and must-not-throw differently.  */
   1070 	/* Map must-not-throw to otherwise unused call-site index 0.  */
   1071 	if (action == -2)
   1072 	  call_site = 0;
   1073 	/* Map no-action to otherwise unused call-site index -1.  */
   1074 	else if (action == -1)
   1075 	  call_site = -1;
   1076 	/* Otherwise, look it up in the table.  */
   1077 	else
   1078 	  call_site = add_call_site (GEN_INT (disp_index), action, 0);
   1079 	sjlj_lp_call_site_index[i] = call_site;
   1080 
   1081 	disp_index++;
   1082       }
   1083 
   1084   return disp_index;
   1085 }
   1086 
   1087 /* Emit code to record the current call-site index before every
   1088    insn that can throw.  */
   1089 
   1090 static void
   1091 sjlj_mark_call_sites (void)
   1092 {
   1093   int last_call_site = -2;
   1094   rtx_insn *insn;
   1095   rtx mem;
   1096 
   1097   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
   1098     {
   1099       eh_landing_pad lp;
   1100       eh_region r;
   1101       bool nothrow;
   1102       int this_call_site;
   1103       rtx_insn *before, *p;
   1104 
   1105       /* Reset value tracking at extended basic block boundaries.  */
   1106       if (LABEL_P (insn))
   1107 	last_call_site = -2;
   1108 
   1109       /* If the function allocates dynamic stack space, the context must
   1110 	 be updated after every allocation/deallocation accordingly.  */
   1111       if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
   1112 	{
   1113 	  rtx buf_addr;
   1114 
   1115 	  start_sequence ();
   1116 	  buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
   1117 				    sjlj_fc_jbuf_ofs);
   1118 	  expand_builtin_update_setjmp_buf (buf_addr);
   1119 	  p = get_insns ();
   1120 	  end_sequence ();
   1121 	  emit_insn_before (p, insn);
   1122 	}
   1123 
   1124       if (! INSN_P (insn))
   1125 	continue;
   1126 
   1127       nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
   1128       if (nothrow)
   1129 	continue;
   1130       if (lp)
   1131 	this_call_site = sjlj_lp_call_site_index[lp->index];
   1132       else if (r == NULL)
   1133 	{
   1134 	  /* Calls (and trapping insns) without notes are outside any
   1135 	     exception handling region in this function.  Mark them as
   1136 	     no action.  */
   1137 	  this_call_site = -1;
   1138 	}
   1139       else
   1140 	{
   1141 	  gcc_assert (r->type == ERT_MUST_NOT_THROW);
   1142 	  this_call_site = 0;
   1143 	}
   1144 
   1145       if (this_call_site != -1)
   1146 	crtl->uses_eh_lsda = 1;
   1147 
   1148       if (this_call_site == last_call_site)
   1149 	continue;
   1150 
   1151       /* Don't separate a call from it's argument loads.  */
   1152       before = insn;
   1153       if (CALL_P (insn))
   1154 	before = find_first_parameter_load (insn, NULL);
   1155 
   1156       start_sequence ();
   1157       mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
   1158 			    sjlj_fc_call_site_ofs);
   1159       emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
   1160       p = get_insns ();
   1161       end_sequence ();
   1162 
   1163       emit_insn_before (p, before);
   1164       last_call_site = this_call_site;
   1165     }
   1166 }
   1167 
   1168 /* Construct the SjLj_Function_Context.  */
   1169 
   1170 static void
   1171 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
   1172 {
   1173   rtx_insn *fn_begin, *seq;
   1174   rtx fc, mem;
   1175   bool fn_begin_outside_block;
   1176   rtx personality = get_personality_function (current_function_decl);
   1177 
   1178   fc = crtl->eh.sjlj_fc;
   1179 
   1180   start_sequence ();
   1181 
   1182   /* We're storing this libcall's address into memory instead of
   1183      calling it directly.  Thus, we must call assemble_external_libcall
   1184      here, as we cannot depend on emit_library_call to do it for us.  */
   1185   assemble_external_libcall (personality);
   1186   mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
   1187   emit_move_insn (mem, personality);
   1188 
   1189   mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
   1190   if (crtl->uses_eh_lsda)
   1191     {
   1192       char buf[20];
   1193       rtx sym;
   1194 
   1195       ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
   1196       sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
   1197       SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
   1198       emit_move_insn (mem, sym);
   1199     }
   1200   else
   1201     emit_move_insn (mem, const0_rtx);
   1202 
   1203   if (dispatch_label)
   1204     {
   1205       rtx addr = plus_constant (Pmode, XEXP (fc, 0), sjlj_fc_jbuf_ofs);
   1206 
   1207 #ifdef DONT_USE_BUILTIN_SETJMP
   1208       addr = copy_addr_to_reg (addr);
   1209       addr = convert_memory_address (ptr_mode, addr);
   1210       tree addr_tree = make_tree (ptr_type_node, addr);
   1211 
   1212       tree call_expr = build_call_expr (setjmp_fn, 1, addr_tree);
   1213       rtx x = expand_call (call_expr, NULL_RTX, false);
   1214 
   1215       emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
   1216 			       TYPE_MODE (integer_type_node), 0,
   1217 			       dispatch_label,
   1218 			       profile_probability::unlikely ());
   1219 #else
   1220       expand_builtin_setjmp_setup (addr, dispatch_label);
   1221 #endif
   1222     }
   1223 
   1224   emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
   1225 		     XEXP (fc, 0), Pmode);
   1226 
   1227   seq = get_insns ();
   1228   end_sequence ();
   1229 
   1230   /* ??? Instead of doing this at the beginning of the function,
   1231      do this in a block that is at loop level 0 and dominates all
   1232      can_throw_internal instructions.  */
   1233 
   1234   fn_begin_outside_block = true;
   1235   for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
   1236     if (NOTE_P (fn_begin))
   1237       {
   1238 	if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
   1239 	  break;
   1240 	else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
   1241 	  fn_begin_outside_block = false;
   1242       }
   1243 
   1244 #ifdef DONT_USE_BUILTIN_SETJMP
   1245   if (dispatch_label)
   1246     {
   1247       /* The sequence contains a branch in the middle so we need to force
   1248 	 the creation of a new basic block by means of BB_SUPERBLOCK.  */
   1249       if (fn_begin_outside_block)
   1250 	{
   1251 	  basic_block bb
   1252 	    = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
   1253 	  if (JUMP_P (BB_END (bb)))
   1254 	    emit_insn_before (seq, BB_END (bb));
   1255 	  else
   1256 	    emit_insn_after (seq, BB_END (bb));
   1257 	}
   1258       else
   1259 	emit_insn_after (seq, fn_begin);
   1260 
   1261       single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flags |= BB_SUPERBLOCK;
   1262       return;
   1263     }
   1264 #endif
   1265 
   1266   if (fn_begin_outside_block)
   1267     insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
   1268   else
   1269     emit_insn_after (seq, fn_begin);
   1270 }
   1271 
   1272 /* Call back from expand_function_end to know where we should put
   1273    the call to unwind_sjlj_unregister_libfunc if needed.  */
   1274 
   1275 void
   1276 sjlj_emit_function_exit_after (rtx_insn *after)
   1277 {
   1278   crtl->eh.sjlj_exit_after = after;
   1279 }
   1280 
   1281 static void
   1282 sjlj_emit_function_exit (void)
   1283 {
   1284   rtx_insn *seq, *insn;
   1285 
   1286   start_sequence ();
   1287 
   1288   emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
   1289 		     XEXP (crtl->eh.sjlj_fc, 0), Pmode);
   1290 
   1291   seq = get_insns ();
   1292   end_sequence ();
   1293 
   1294   /* ??? Really this can be done in any block at loop level 0 that
   1295      post-dominates all can_throw_internal instructions.  This is
   1296      the last possible moment.  */
   1297 
   1298   insn = crtl->eh.sjlj_exit_after;
   1299   if (LABEL_P (insn))
   1300     insn = NEXT_INSN (insn);
   1301 
   1302   emit_insn_after (seq, insn);
   1303 }
   1304 
   1305 static void
   1306 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
   1307 {
   1308   scalar_int_mode unwind_word_mode = targetm.unwind_word_mode ();
   1309   scalar_int_mode filter_mode = targetm.eh_return_filter_mode ();
   1310   eh_landing_pad lp;
   1311   rtx mem, fc, exc_ptr_reg, filter_reg;
   1312   rtx_insn *seq;
   1313   basic_block bb;
   1314   eh_region r;
   1315   int i, disp_index;
   1316   vec<tree> dispatch_labels = vNULL;
   1317 
   1318   fc = crtl->eh.sjlj_fc;
   1319 
   1320   start_sequence ();
   1321 
   1322   emit_label (dispatch_label);
   1323 
   1324 #ifndef DONT_USE_BUILTIN_SETJMP
   1325   expand_builtin_setjmp_receiver (dispatch_label);
   1326 
   1327   /* The caller of expand_builtin_setjmp_receiver is responsible for
   1328      making sure that the label doesn't vanish.  The only other caller
   1329      is the expander for __builtin_setjmp_receiver, which places this
   1330      label on the nonlocal_goto_label list.  Since we're modeling these
   1331      CFG edges more exactly, we can use the forced_labels list instead.  */
   1332   LABEL_PRESERVE_P (dispatch_label) = 1;
   1333   vec_safe_push<rtx_insn *> (forced_labels, dispatch_label);
   1334 #endif
   1335 
   1336   /* Load up exc_ptr and filter values from the function context.  */
   1337   mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
   1338   if (unwind_word_mode != ptr_mode)
   1339     {
   1340 #ifdef POINTERS_EXTEND_UNSIGNED
   1341       mem = convert_memory_address (ptr_mode, mem);
   1342 #else
   1343       mem = convert_to_mode (ptr_mode, mem, 0);
   1344 #endif
   1345     }
   1346   exc_ptr_reg = force_reg (ptr_mode, mem);
   1347 
   1348   mem = adjust_address (fc, unwind_word_mode,
   1349 			sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
   1350   if (unwind_word_mode != filter_mode)
   1351     mem = convert_to_mode (filter_mode, mem, 0);
   1352   filter_reg = force_reg (filter_mode, mem);
   1353 
   1354   /* Jump to one of the directly reachable regions.  */
   1355 
   1356   disp_index = 0;
   1357   rtx_code_label *first_reachable_label = NULL;
   1358 
   1359   /* If there's exactly one call site in the function, don't bother
   1360      generating a switch statement.  */
   1361   if (num_dispatch > 1)
   1362     dispatch_labels.create (num_dispatch);
   1363 
   1364   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
   1365     if (lp && lp->post_landing_pad)
   1366       {
   1367 	rtx_insn *seq2;
   1368 	rtx_code_label *label;
   1369 
   1370 	start_sequence ();
   1371 
   1372 	lp->landing_pad = dispatch_label;
   1373 
   1374 	if (num_dispatch > 1)
   1375 	  {
   1376 	    tree t_label, case_elt, t;
   1377 
   1378 	    t_label = create_artificial_label (UNKNOWN_LOCATION);
   1379 	    t = build_int_cst (integer_type_node, disp_index);
   1380 	    case_elt = build_case_label (t, NULL, t_label);
   1381 	    dispatch_labels.quick_push (case_elt);
   1382 	    label = jump_target_rtx (t_label);
   1383 	  }
   1384 	else
   1385 	  label = gen_label_rtx ();
   1386 
   1387 	if (disp_index == 0)
   1388 	  first_reachable_label = label;
   1389 	emit_label (label);
   1390 
   1391 	r = lp->region;
   1392 	if (r->exc_ptr_reg)
   1393 	  emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
   1394 	if (r->filter_reg)
   1395 	  emit_move_insn (r->filter_reg, filter_reg);
   1396 
   1397 	seq2 = get_insns ();
   1398 	end_sequence ();
   1399 
   1400 	rtx_insn *before = label_rtx (lp->post_landing_pad);
   1401 	bb = emit_to_new_bb_before (seq2, before);
   1402 	make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
   1403 	if (current_loops)
   1404 	  {
   1405 	    class loop *loop = bb->next_bb->loop_father;
   1406 	    /* If we created a pre-header block, add the new block to the
   1407 	       outer loop, otherwise to the loop itself.  */
   1408 	    if (bb->next_bb == loop->header)
   1409 	      add_bb_to_loop (bb, loop_outer (loop));
   1410 	    else
   1411 	      add_bb_to_loop (bb, loop);
   1412 	    /* ???  For multiple dispatches we will end up with edges
   1413 	       from the loop tree root into this loop, making it a
   1414 	       multiple-entry loop.  Discard all affected loops.  */
   1415 	    if (num_dispatch > 1)
   1416 	      {
   1417 		for (loop = bb->loop_father;
   1418 		     loop_outer (loop); loop = loop_outer (loop))
   1419 		  mark_loop_for_removal (loop);
   1420 	      }
   1421 	  }
   1422 
   1423 	disp_index++;
   1424       }
   1425   gcc_assert (disp_index == num_dispatch);
   1426 
   1427   if (num_dispatch > 1)
   1428     {
   1429       rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
   1430 				 sjlj_fc_call_site_ofs);
   1431       expand_sjlj_dispatch_table (disp, dispatch_labels);
   1432     }
   1433 
   1434   seq = get_insns ();
   1435   end_sequence ();
   1436 
   1437   bb = emit_to_new_bb_before (seq, first_reachable_label);
   1438   if (num_dispatch == 1)
   1439     {
   1440       make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
   1441       if (current_loops)
   1442 	{
   1443 	  class loop *loop = bb->next_bb->loop_father;
   1444 	  /* If we created a pre-header block, add the new block to the
   1445 	     outer loop, otherwise to the loop itself.  */
   1446 	  if (bb->next_bb == loop->header)
   1447 	    add_bb_to_loop (bb, loop_outer (loop));
   1448 	  else
   1449 	    add_bb_to_loop (bb, loop);
   1450 	}
   1451     }
   1452   else
   1453     {
   1454       /* We are not wiring up edges here, but as the dispatcher call
   1455          is at function begin simply associate the block with the
   1456 	 outermost (non-)loop.  */
   1457       if (current_loops)
   1458 	add_bb_to_loop (bb, current_loops->tree_root);
   1459     }
   1460 }
   1461 
   1462 static void
   1463 sjlj_build_landing_pads (void)
   1464 {
   1465   int num_dispatch;
   1466 
   1467   num_dispatch = vec_safe_length (cfun->eh->lp_array);
   1468   if (num_dispatch == 0)
   1469     return;
   1470   sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch, true);
   1471 
   1472   num_dispatch = sjlj_assign_call_site_values ();
   1473   if (num_dispatch > 0)
   1474     {
   1475       rtx_code_label *dispatch_label = gen_label_rtx ();
   1476       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
   1477 					TYPE_MODE (sjlj_fc_type_node),
   1478 					TYPE_ALIGN (sjlj_fc_type_node));
   1479       crtl->eh.sjlj_fc
   1480 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
   1481 			      int_size_in_bytes (sjlj_fc_type_node),
   1482 			      align);
   1483 
   1484       sjlj_mark_call_sites ();
   1485       sjlj_emit_function_enter (dispatch_label);
   1486       sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
   1487       sjlj_emit_function_exit ();
   1488     }
   1489 
   1490   /* If we do not have any landing pads, we may still need to register a
   1491      personality routine and (empty) LSDA to handle must-not-throw regions.  */
   1492   else if (function_needs_eh_personality (cfun) != eh_personality_none)
   1493     {
   1494       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
   1495 					TYPE_MODE (sjlj_fc_type_node),
   1496 					TYPE_ALIGN (sjlj_fc_type_node));
   1497       crtl->eh.sjlj_fc
   1498 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
   1499 			      int_size_in_bytes (sjlj_fc_type_node),
   1500 			      align);
   1501 
   1502       sjlj_mark_call_sites ();
   1503       sjlj_emit_function_enter (NULL);
   1504       sjlj_emit_function_exit ();
   1505     }
   1506 
   1507   sjlj_lp_call_site_index.release ();
   1508 }
   1509 
   1510 /* Update the sjlj function context.  This function should be called
   1511    whenever we allocate or deallocate dynamic stack space.  */
   1512 
   1513 void
   1514 update_sjlj_context (void)
   1515 {
   1516   if (!flag_exceptions)
   1517     return;
   1518 
   1519   emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
   1520 }
   1521 
   1522 /* After initial rtl generation, call back to finish generating
   1523    exception support code.  */
   1524 
   1525 void
   1526 finish_eh_generation (void)
   1527 {
   1528   basic_block bb;
   1529 
   1530   /* Construct the landing pads.  */
   1531   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
   1532     sjlj_build_landing_pads ();
   1533   else
   1534     dw2_build_landing_pads ();
   1535 
   1536   break_superblocks ();
   1537 
   1538   /* Redirect all EH edges from the post_landing_pad to the landing pad.  */
   1539   FOR_EACH_BB_FN (bb, cfun)
   1540     {
   1541       eh_landing_pad lp;
   1542       edge_iterator ei;
   1543       edge e;
   1544 
   1545       lp = get_eh_landing_pad_from_rtx (BB_END (bb));
   1546 
   1547       FOR_EACH_EDGE (e, ei, bb->succs)
   1548 	if (e->flags & EDGE_EH)
   1549 	  break;
   1550 
   1551       /* We should not have generated any new throwing insns during this
   1552 	 pass, and we should not have lost any EH edges, so we only need
   1553 	 to handle two cases here:
   1554 	 (1) reachable handler and an existing edge to post-landing-pad,
   1555 	 (2) no reachable handler and no edge.  */
   1556       gcc_assert ((lp != NULL) == (e != NULL));
   1557       if (lp != NULL)
   1558 	{
   1559 	  gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
   1560 
   1561 	  redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
   1562 	  e->flags |= (CALL_P (BB_END (bb))
   1563 		       ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
   1564 		       : EDGE_ABNORMAL);
   1565 	}
   1566     }
   1567 
   1568   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
   1569       /* Kludge for Alpha (see alpha_gp_save_rtx).  */
   1570       || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
   1571     commit_edge_insertions ();
   1572 }
   1573 
   1574 /* This section handles removing dead code for flow.  */
   1576 
   1577 void
   1578 remove_eh_landing_pad (eh_landing_pad lp)
   1579 {
   1580   eh_landing_pad *pp;
   1581 
   1582   for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
   1583     continue;
   1584   *pp = lp->next_lp;
   1585 
   1586   if (lp->post_landing_pad)
   1587     EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
   1588   (*cfun->eh->lp_array)[lp->index] = NULL;
   1589 }
   1590 
   1591 /* Splice the EH region at PP from the region tree.  */
   1592 
   1593 static void
   1594 remove_eh_handler_splicer (eh_region *pp)
   1595 {
   1596   eh_region region = *pp;
   1597   eh_landing_pad lp;
   1598 
   1599   for (lp = region->landing_pads; lp ; lp = lp->next_lp)
   1600     {
   1601       if (lp->post_landing_pad)
   1602 	EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
   1603       (*cfun->eh->lp_array)[lp->index] = NULL;
   1604     }
   1605 
   1606   if (region->inner)
   1607     {
   1608       eh_region p, outer;
   1609       outer = region->outer;
   1610 
   1611       *pp = p = region->inner;
   1612       do
   1613 	{
   1614 	  p->outer = outer;
   1615 	  pp = &p->next_peer;
   1616 	  p = *pp;
   1617 	}
   1618       while (p);
   1619     }
   1620   *pp = region->next_peer;
   1621 
   1622   (*cfun->eh->region_array)[region->index] = NULL;
   1623 }
   1624 
   1625 /* Splice a single EH region REGION from the region tree.
   1626 
   1627    To unlink REGION, we need to find the pointer to it with a relatively
   1628    expensive search in REGION's outer region.  If you are going to
   1629    remove a number of handlers, using remove_unreachable_eh_regions may
   1630    be a better option.  */
   1631 
   1632 void
   1633 remove_eh_handler (eh_region region)
   1634 {
   1635   eh_region *pp, *pp_start, p, outer;
   1636 
   1637   outer = region->outer;
   1638   if (outer)
   1639     pp_start = &outer->inner;
   1640   else
   1641     pp_start = &cfun->eh->region_tree;
   1642   for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
   1643     continue;
   1644 
   1645   remove_eh_handler_splicer (pp);
   1646 }
   1647 
   1648 /* Worker for remove_unreachable_eh_regions.
   1649    PP is a pointer to the region to start a region tree depth-first
   1650    search from.  R_REACHABLE is the set of regions that have to be
   1651    preserved.  */
   1652 
   1653 static void
   1654 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
   1655 {
   1656   while (*pp)
   1657     {
   1658       eh_region region = *pp;
   1659       remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
   1660       if (!bitmap_bit_p (r_reachable, region->index))
   1661 	remove_eh_handler_splicer (pp);
   1662       else
   1663 	pp = &region->next_peer;
   1664     }
   1665 }
   1666 
   1667 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
   1668    Do this by traversing the EH tree top-down and splice out regions that
   1669    are not marked.  By removing regions from the leaves, we avoid costly
   1670    searches in the region tree.  */
   1671 
   1672 void
   1673 remove_unreachable_eh_regions (sbitmap r_reachable)
   1674 {
   1675   remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
   1676 }
   1677 
   1678 /* Invokes CALLBACK for every exception handler landing pad label.
   1679    Only used by reload hackery; should not be used by new code.  */
   1680 
   1681 void
   1682 for_each_eh_label (void (*callback) (rtx))
   1683 {
   1684   eh_landing_pad lp;
   1685   int i;
   1686 
   1687   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
   1688     {
   1689       if (lp)
   1690 	{
   1691 	  rtx_code_label *lab = lp->landing_pad;
   1692 	  if (lab && LABEL_P (lab))
   1693 	    (*callback) (lab);
   1694 	}
   1695     }
   1696 }
   1697 
   1698 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
   1700    call insn.
   1701 
   1702    At the gimple level, we use LP_NR
   1703        > 0 : The statement transfers to landing pad LP_NR
   1704        = 0 : The statement is outside any EH region
   1705        < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
   1706 
   1707    At the rtl level, we use LP_NR
   1708        > 0 : The insn transfers to landing pad LP_NR
   1709        = 0 : The insn cannot throw
   1710        < 0 : The insn is within MUST_NOT_THROW region -LP_NR
   1711        = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
   1712        missing note: The insn is outside any EH region.
   1713 
   1714   ??? This difference probably ought to be avoided.  We could stand
   1715   to record nothrow for arbitrary gimple statements, and so avoid
   1716   some moderately complex lookups in stmt_could_throw_p.  Perhaps
   1717   NOTHROW should be mapped on both sides to INT_MIN.  Perhaps the
   1718   no-nonlocal-goto property should be recorded elsewhere as a bit
   1719   on the call_insn directly.  Perhaps we should make more use of
   1720   attaching the trees to call_insns (reachable via symbol_ref in
   1721   direct call cases) and just pull the data out of the trees.  */
   1722 
   1723 void
   1724 make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
   1725 {
   1726   rtx value;
   1727   if (ecf_flags & ECF_NOTHROW)
   1728     value = const0_rtx;
   1729   else if (lp_nr != 0)
   1730     value = GEN_INT (lp_nr);
   1731   else
   1732     return;
   1733   add_reg_note (insn, REG_EH_REGION, value);
   1734 }
   1735 
   1736 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
   1737    nor perform a non-local goto.  Replace the region note if it
   1738    already exists.  */
   1739 
   1740 void
   1741 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
   1742 {
   1743   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
   1744   rtx intmin = GEN_INT (INT_MIN);
   1745 
   1746   if (note != 0)
   1747     XEXP (note, 0) = intmin;
   1748   else
   1749     add_reg_note (insn, REG_EH_REGION, intmin);
   1750 }
   1751 
   1752 /* Return true if INSN could throw, assuming no REG_EH_REGION note
   1753    to the contrary.  */
   1754 
   1755 bool
   1756 insn_could_throw_p (const_rtx insn)
   1757 {
   1758   if (!flag_exceptions)
   1759     return false;
   1760   if (CALL_P (insn))
   1761     return true;
   1762   if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
   1763     return may_trap_p (PATTERN (insn));
   1764   return false;
   1765 }
   1766 
   1767 /* Copy an REG_EH_REGION note to each insn that might throw beginning
   1768    at FIRST and ending at LAST.  NOTE_OR_INSN is either the source insn
   1769    to look for a note, or the note itself.  */
   1770 
   1771 void
   1772 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
   1773 {
   1774   rtx_insn *insn;
   1775   rtx note = note_or_insn;
   1776 
   1777   if (INSN_P (note_or_insn))
   1778     {
   1779       note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
   1780       if (note == NULL)
   1781 	return;
   1782     }
   1783   else if (is_a <rtx_insn *> (note_or_insn))
   1784     return;
   1785   note = XEXP (note, 0);
   1786 
   1787   for (insn = first; insn != last ; insn = NEXT_INSN (insn))
   1788     if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
   1789         && insn_could_throw_p (insn))
   1790       add_reg_note (insn, REG_EH_REGION, note);
   1791 }
   1792 
   1793 /* Likewise, but iterate backward.  */
   1794 
   1795 void
   1796 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
   1797 {
   1798   rtx_insn *insn;
   1799   rtx note = note_or_insn;
   1800 
   1801   if (INSN_P (note_or_insn))
   1802     {
   1803       note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
   1804       if (note == NULL)
   1805 	return;
   1806     }
   1807   else if (is_a <rtx_insn *> (note_or_insn))
   1808     return;
   1809   note = XEXP (note, 0);
   1810 
   1811   for (insn = last; insn != first; insn = PREV_INSN (insn))
   1812     if (insn_could_throw_p (insn))
   1813       add_reg_note (insn, REG_EH_REGION, note);
   1814 }
   1815 
   1816 
   1817 /* Extract all EH information from INSN.  Return true if the insn
   1818    was marked NOTHROW.  */
   1819 
   1820 static bool
   1821 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
   1822 			       eh_landing_pad *plp)
   1823 {
   1824   eh_landing_pad lp = NULL;
   1825   eh_region r = NULL;
   1826   bool ret = false;
   1827   rtx note;
   1828   int lp_nr;
   1829 
   1830   if (! INSN_P (insn))
   1831     goto egress;
   1832 
   1833   if (NONJUMP_INSN_P (insn)
   1834       && GET_CODE (PATTERN (insn)) == SEQUENCE)
   1835     insn = XVECEXP (PATTERN (insn), 0, 0);
   1836 
   1837   note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
   1838   if (!note)
   1839     {
   1840       ret = !insn_could_throw_p (insn);
   1841       goto egress;
   1842     }
   1843 
   1844   lp_nr = INTVAL (XEXP (note, 0));
   1845   if (lp_nr == 0 || lp_nr == INT_MIN)
   1846     {
   1847       ret = true;
   1848       goto egress;
   1849     }
   1850 
   1851   if (lp_nr < 0)
   1852     r = (*cfun->eh->region_array)[-lp_nr];
   1853   else
   1854     {
   1855       lp = (*cfun->eh->lp_array)[lp_nr];
   1856       r = lp->region;
   1857     }
   1858 
   1859  egress:
   1860   *plp = lp;
   1861   *pr = r;
   1862   return ret;
   1863 }
   1864 
   1865 /* Return the landing pad to which INSN may go, or NULL if it does not
   1866    have a reachable landing pad within this function.  */
   1867 
   1868 eh_landing_pad
   1869 get_eh_landing_pad_from_rtx (const_rtx insn)
   1870 {
   1871   eh_landing_pad lp;
   1872   eh_region r;
   1873 
   1874   get_eh_region_and_lp_from_rtx (insn, &r, &lp);
   1875   return lp;
   1876 }
   1877 
   1878 /* Return the region to which INSN may go, or NULL if it does not
   1879    have a reachable region within this function.  */
   1880 
   1881 eh_region
   1882 get_eh_region_from_rtx (const_rtx insn)
   1883 {
   1884   eh_landing_pad lp;
   1885   eh_region r;
   1886 
   1887   get_eh_region_and_lp_from_rtx (insn, &r, &lp);
   1888   return r;
   1889 }
   1890 
   1891 /* Return true if INSN throws and is caught by something in this function.  */
   1892 
   1893 bool
   1894 can_throw_internal (const_rtx insn)
   1895 {
   1896   return get_eh_landing_pad_from_rtx (insn) != NULL;
   1897 }
   1898 
   1899 /* Return true if INSN throws and escapes from the current function.  */
   1900 
   1901 bool
   1902 can_throw_external (const_rtx insn)
   1903 {
   1904   eh_landing_pad lp;
   1905   eh_region r;
   1906   bool nothrow;
   1907 
   1908   if (! INSN_P (insn))
   1909     return false;
   1910 
   1911   if (NONJUMP_INSN_P (insn)
   1912       && GET_CODE (PATTERN (insn)) == SEQUENCE)
   1913     {
   1914       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
   1915       int i, n = seq->len ();
   1916 
   1917       for (i = 0; i < n; i++)
   1918 	if (can_throw_external (seq->element (i)))
   1919 	  return true;
   1920 
   1921       return false;
   1922     }
   1923 
   1924   nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
   1925 
   1926   /* If we can't throw, we obviously can't throw external.  */
   1927   if (nothrow)
   1928     return false;
   1929 
   1930   /* If we have an internal landing pad, then we're not external.  */
   1931   if (lp != NULL)
   1932     return false;
   1933 
   1934   /* If we're not within an EH region, then we are external.  */
   1935   if (r == NULL)
   1936     return true;
   1937 
   1938   /* The only thing that ought to be left is MUST_NOT_THROW regions,
   1939      which don't always have landing pads.  */
   1940   gcc_assert (r->type == ERT_MUST_NOT_THROW);
   1941   return false;
   1942 }
   1943 
   1944 /* Return true if INSN cannot throw at all.  */
   1945 
   1946 bool
   1947 insn_nothrow_p (const_rtx insn)
   1948 {
   1949   eh_landing_pad lp;
   1950   eh_region r;
   1951 
   1952   if (! INSN_P (insn))
   1953     return true;
   1954 
   1955   if (NONJUMP_INSN_P (insn)
   1956       && GET_CODE (PATTERN (insn)) == SEQUENCE)
   1957     {
   1958       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
   1959       int i, n = seq->len ();
   1960 
   1961       for (i = 0; i < n; i++)
   1962 	if (!insn_nothrow_p (seq->element (i)))
   1963 	  return false;
   1964 
   1965       return true;
   1966     }
   1967 
   1968   return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
   1969 }
   1970 
   1971 /* Return true if INSN can perform a non-local goto.  */
   1972 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION.  */
   1973 
   1974 bool
   1975 can_nonlocal_goto (const rtx_insn *insn)
   1976 {
   1977   if (nonlocal_goto_handler_labels && CALL_P (insn))
   1978     {
   1979       rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
   1980       if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
   1981 	return true;
   1982     }
   1983   return false;
   1984 }
   1985 
   1986 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls.  */
   1988 
   1989 static unsigned int
   1990 set_nothrow_function_flags (void)
   1991 {
   1992   rtx_insn *insn;
   1993 
   1994   crtl->nothrow = 1;
   1995 
   1996   /* Assume crtl->all_throwers_are_sibcalls until we encounter
   1997      something that can throw an exception.  We specifically exempt
   1998      CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
   1999      and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
   2000      is optimistic.  */
   2001 
   2002   crtl->all_throwers_are_sibcalls = 1;
   2003 
   2004   /* If we don't know that this implementation of the function will
   2005      actually be used, then we must not set TREE_NOTHROW, since
   2006      callers must not assume that this function does not throw.  */
   2007   if (TREE_NOTHROW (current_function_decl))
   2008     return 0;
   2009 
   2010   if (! flag_exceptions)
   2011     return 0;
   2012 
   2013   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
   2014     if (can_throw_external (insn))
   2015       {
   2016         crtl->nothrow = 0;
   2017 
   2018 	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
   2019 	  {
   2020 	    crtl->all_throwers_are_sibcalls = 0;
   2021 	    return 0;
   2022 	  }
   2023       }
   2024 
   2025   if (crtl->nothrow
   2026       && (cgraph_node::get (current_function_decl)->get_availability ()
   2027           >= AVAIL_AVAILABLE))
   2028     {
   2029       struct cgraph_node *node = cgraph_node::get (current_function_decl);
   2030       struct cgraph_edge *e;
   2031       for (e = node->callers; e; e = e->next_caller)
   2032         e->can_throw_external = false;
   2033       node->set_nothrow_flag (true);
   2034 
   2035       if (dump_file)
   2036 	fprintf (dump_file, "Marking function nothrow: %s\n\n",
   2037 		 current_function_name ());
   2038     }
   2039   return 0;
   2040 }
   2041 
   2042 namespace {
   2043 
   2044 const pass_data pass_data_set_nothrow_function_flags =
   2045 {
   2046   RTL_PASS, /* type */
   2047   "nothrow", /* name */
   2048   OPTGROUP_NONE, /* optinfo_flags */
   2049   TV_NONE, /* tv_id */
   2050   0, /* properties_required */
   2051   0, /* properties_provided */
   2052   0, /* properties_destroyed */
   2053   0, /* todo_flags_start */
   2054   0, /* todo_flags_finish */
   2055 };
   2056 
   2057 class pass_set_nothrow_function_flags : public rtl_opt_pass
   2058 {
   2059 public:
   2060   pass_set_nothrow_function_flags (gcc::context *ctxt)
   2061     : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
   2062   {}
   2063 
   2064   /* opt_pass methods: */
   2065   virtual unsigned int execute (function *)
   2066     {
   2067       return set_nothrow_function_flags ();
   2068     }
   2069 
   2070 }; // class pass_set_nothrow_function_flags
   2071 
   2072 } // anon namespace
   2073 
   2074 rtl_opt_pass *
   2075 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
   2076 {
   2077   return new pass_set_nothrow_function_flags (ctxt);
   2078 }
   2079 
   2080 
   2081 /* Various hooks for unwind library.  */
   2083 
   2084 /* Expand the EH support builtin functions:
   2085    __builtin_eh_pointer and __builtin_eh_filter.  */
   2086 
   2087 static eh_region
   2088 expand_builtin_eh_common (tree region_nr_t)
   2089 {
   2090   HOST_WIDE_INT region_nr;
   2091   eh_region region;
   2092 
   2093   gcc_assert (tree_fits_shwi_p (region_nr_t));
   2094   region_nr = tree_to_shwi (region_nr_t);
   2095 
   2096   region = (*cfun->eh->region_array)[region_nr];
   2097 
   2098   /* ??? We shouldn't have been able to delete a eh region without
   2099      deleting all the code that depended on it.  */
   2100   gcc_assert (region != NULL);
   2101 
   2102   return region;
   2103 }
   2104 
   2105 /* Expand to the exc_ptr value from the given eh region.  */
   2106 
   2107 rtx
   2108 expand_builtin_eh_pointer (tree exp)
   2109 {
   2110   eh_region region
   2111     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
   2112   if (region->exc_ptr_reg == NULL)
   2113     region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
   2114   return region->exc_ptr_reg;
   2115 }
   2116 
   2117 /* Expand to the filter value from the given eh region.  */
   2118 
   2119 rtx
   2120 expand_builtin_eh_filter (tree exp)
   2121 {
   2122   eh_region region
   2123     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
   2124   if (region->filter_reg == NULL)
   2125     region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
   2126   return region->filter_reg;
   2127 }
   2128 
   2129 /* Copy the exc_ptr and filter values from one landing pad's registers
   2130    to another.  This is used to inline the resx statement.  */
   2131 
   2132 rtx
   2133 expand_builtin_eh_copy_values (tree exp)
   2134 {
   2135   eh_region dst
   2136     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
   2137   eh_region src
   2138     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
   2139   scalar_int_mode fmode = targetm.eh_return_filter_mode ();
   2140 
   2141   if (dst->exc_ptr_reg == NULL)
   2142     dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
   2143   if (src->exc_ptr_reg == NULL)
   2144     src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
   2145 
   2146   if (dst->filter_reg == NULL)
   2147     dst->filter_reg = gen_reg_rtx (fmode);
   2148   if (src->filter_reg == NULL)
   2149     src->filter_reg = gen_reg_rtx (fmode);
   2150 
   2151   emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
   2152   emit_move_insn (dst->filter_reg, src->filter_reg);
   2153 
   2154   return const0_rtx;
   2155 }
   2156 
   2157 /* Do any necessary initialization to access arbitrary stack frames.
   2158    On the SPARC, this means flushing the register windows.  */
   2159 
   2160 void
   2161 expand_builtin_unwind_init (void)
   2162 {
   2163   /* Set this so all the registers get saved in our frame; we need to be
   2164      able to copy the saved values for any registers from frames we unwind.  */
   2165   crtl->saves_all_registers = 1;
   2166 
   2167   SETUP_FRAME_ADDRESSES ();
   2168 }
   2169 
   2170 /* Map a non-negative number to an eh return data register number; expands
   2171    to -1 if no return data register is associated with the input number.
   2172    At least the inputs 0 and 1 must be mapped; the target may provide more.  */
   2173 
   2174 rtx
   2175 expand_builtin_eh_return_data_regno (tree exp)
   2176 {
   2177   tree which = CALL_EXPR_ARG (exp, 0);
   2178   unsigned HOST_WIDE_INT iwhich;
   2179 
   2180   if (TREE_CODE (which) != INTEGER_CST)
   2181     {
   2182       error ("argument of %<__builtin_eh_return_regno%> must be constant");
   2183       return constm1_rtx;
   2184     }
   2185 
   2186   iwhich = tree_to_uhwi (which);
   2187   iwhich = EH_RETURN_DATA_REGNO (iwhich);
   2188   if (iwhich == INVALID_REGNUM)
   2189     return constm1_rtx;
   2190 
   2191 #ifdef DWARF_FRAME_REGNUM
   2192   iwhich = DWARF_FRAME_REGNUM (iwhich);
   2193 #else
   2194   iwhich = DBX_REGISTER_NUMBER (iwhich);
   2195 #endif
   2196 
   2197   return GEN_INT (iwhich);
   2198 }
   2199 
   2200 /* Given a value extracted from the return address register or stack slot,
   2201    return the actual address encoded in that value.  */
   2202 
   2203 rtx
   2204 expand_builtin_extract_return_addr (tree addr_tree)
   2205 {
   2206   rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
   2207 
   2208   if (GET_MODE (addr) != Pmode
   2209       && GET_MODE (addr) != VOIDmode)
   2210     {
   2211 #ifdef POINTERS_EXTEND_UNSIGNED
   2212       addr = convert_memory_address (Pmode, addr);
   2213 #else
   2214       addr = convert_to_mode (Pmode, addr, 0);
   2215 #endif
   2216     }
   2217 
   2218   /* First mask out any unwanted bits.  */
   2219   rtx mask = MASK_RETURN_ADDR;
   2220   if (mask)
   2221     expand_and (Pmode, addr, mask, addr);
   2222 
   2223   /* Then adjust to find the real return address.  */
   2224   if (RETURN_ADDR_OFFSET)
   2225     addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
   2226 
   2227   return addr;
   2228 }
   2229 
   2230 /* Given an actual address in addr_tree, do any necessary encoding
   2231    and return the value to be stored in the return address register or
   2232    stack slot so the epilogue will return to that address.  */
   2233 
   2234 rtx
   2235 expand_builtin_frob_return_addr (tree addr_tree)
   2236 {
   2237   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
   2238 
   2239   addr = convert_memory_address (Pmode, addr);
   2240 
   2241   if (RETURN_ADDR_OFFSET)
   2242     {
   2243       addr = force_reg (Pmode, addr);
   2244       addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
   2245     }
   2246 
   2247   return addr;
   2248 }
   2249 
   2250 /* Set up the epilogue with the magic bits we'll need to return to the
   2251    exception handler.  */
   2252 
   2253 void
   2254 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
   2255 			  tree handler_tree)
   2256 {
   2257   rtx tmp;
   2258 
   2259 #ifdef EH_RETURN_STACKADJ_RTX
   2260   tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
   2261 		     VOIDmode, EXPAND_NORMAL);
   2262   tmp = convert_memory_address (Pmode, tmp);
   2263   if (!crtl->eh.ehr_stackadj)
   2264     crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp);
   2265   else if (tmp != crtl->eh.ehr_stackadj)
   2266     emit_move_insn (crtl->eh.ehr_stackadj, tmp);
   2267 #endif
   2268 
   2269   tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
   2270 		     VOIDmode, EXPAND_NORMAL);
   2271   tmp = convert_memory_address (Pmode, tmp);
   2272   if (!crtl->eh.ehr_handler)
   2273     crtl->eh.ehr_handler = copy_addr_to_reg (tmp);
   2274   else if (tmp != crtl->eh.ehr_handler)
   2275     emit_move_insn (crtl->eh.ehr_handler, tmp);
   2276 
   2277   if (!crtl->eh.ehr_label)
   2278     crtl->eh.ehr_label = gen_label_rtx ();
   2279   emit_jump (crtl->eh.ehr_label);
   2280 }
   2281 
   2282 /* Expand __builtin_eh_return.  This exit path from the function loads up
   2283    the eh return data registers, adjusts the stack, and branches to a
   2284    given PC other than the normal return address.  */
   2285 
   2286 void
   2287 expand_eh_return (void)
   2288 {
   2289   rtx_code_label *around_label;
   2290 
   2291   if (! crtl->eh.ehr_label)
   2292     return;
   2293 
   2294   crtl->calls_eh_return = 1;
   2295 
   2296 #ifdef EH_RETURN_STACKADJ_RTX
   2297   emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
   2298 #endif
   2299 
   2300   around_label = gen_label_rtx ();
   2301   emit_jump (around_label);
   2302 
   2303   emit_label (crtl->eh.ehr_label);
   2304   clobber_return_register ();
   2305 
   2306 #ifdef EH_RETURN_STACKADJ_RTX
   2307   emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
   2308 #endif
   2309 
   2310   if (targetm.have_eh_return ())
   2311     emit_insn (targetm.gen_eh_return (crtl->eh.ehr_handler));
   2312   else
   2313     {
   2314       if (rtx handler = EH_RETURN_HANDLER_RTX)
   2315 	emit_move_insn (handler, crtl->eh.ehr_handler);
   2316       else
   2317 	error ("%<__builtin_eh_return%> not supported on this target");
   2318     }
   2319 
   2320   emit_label (around_label);
   2321 }
   2322 
   2323 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
   2324    POINTERS_EXTEND_UNSIGNED and return it.  */
   2325 
   2326 rtx
   2327 expand_builtin_extend_pointer (tree addr_tree)
   2328 {
   2329   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
   2330   int extend;
   2331 
   2332 #ifdef POINTERS_EXTEND_UNSIGNED
   2333   extend = POINTERS_EXTEND_UNSIGNED;
   2334 #else
   2335   /* The previous EH code did an unsigned extend by default, so we do this also
   2336      for consistency.  */
   2337   extend = 1;
   2338 #endif
   2339 
   2340   return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
   2341 }
   2342 
   2343 static int
   2345 add_action_record (action_hash_type *ar_hash, int filter, int next)
   2346 {
   2347   struct action_record **slot, *new_ar, tmp;
   2348 
   2349   tmp.filter = filter;
   2350   tmp.next = next;
   2351   slot = ar_hash->find_slot (&tmp, INSERT);
   2352 
   2353   if ((new_ar = *slot) == NULL)
   2354     {
   2355       new_ar = XNEW (struct action_record);
   2356       new_ar->offset = crtl->eh.action_record_data->length () + 1;
   2357       new_ar->filter = filter;
   2358       new_ar->next = next;
   2359       *slot = new_ar;
   2360 
   2361       /* The filter value goes in untouched.  The link to the next
   2362 	 record is a "self-relative" byte offset, or zero to indicate
   2363 	 that there is no next record.  So convert the absolute 1 based
   2364 	 indices we've been carrying around into a displacement.  */
   2365 
   2366       push_sleb128 (&crtl->eh.action_record_data, filter);
   2367       if (next)
   2368 	next -= crtl->eh.action_record_data->length () + 1;
   2369       push_sleb128 (&crtl->eh.action_record_data, next);
   2370     }
   2371 
   2372   return new_ar->offset;
   2373 }
   2374 
   2375 static int
   2376 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
   2377 {
   2378   int next;
   2379 
   2380   /* If we've reached the top of the region chain, then we have
   2381      no actions, and require no landing pad.  */
   2382   if (region == NULL)
   2383     return -1;
   2384 
   2385   switch (region->type)
   2386     {
   2387     case ERT_CLEANUP:
   2388       {
   2389 	eh_region r;
   2390 	/* A cleanup adds a zero filter to the beginning of the chain, but
   2391 	   there are special cases to look out for.  If there are *only*
   2392 	   cleanups along a path, then it compresses to a zero action.
   2393 	   Further, if there are multiple cleanups along a path, we only
   2394 	   need to represent one of them, as that is enough to trigger
   2395 	   entry to the landing pad at runtime.  */
   2396 	next = collect_one_action_chain (ar_hash, region->outer);
   2397 	if (next <= 0)
   2398 	  return 0;
   2399 	for (r = region->outer; r ; r = r->outer)
   2400 	  if (r->type == ERT_CLEANUP)
   2401 	    return next;
   2402 	return add_action_record (ar_hash, 0, next);
   2403       }
   2404 
   2405     case ERT_TRY:
   2406       {
   2407 	eh_catch c;
   2408 
   2409 	/* Process the associated catch regions in reverse order.
   2410 	   If there's a catch-all handler, then we don't need to
   2411 	   search outer regions.  Use a magic -3 value to record
   2412 	   that we haven't done the outer search.  */
   2413 	next = -3;
   2414 	for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
   2415 	  {
   2416 	    if (c->type_list == NULL)
   2417 	      {
   2418 		/* Retrieve the filter from the head of the filter list
   2419 		   where we have stored it (see assign_filter_values).  */
   2420 		int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
   2421 		next = add_action_record (ar_hash, filter, 0);
   2422 	      }
   2423 	    else
   2424 	      {
   2425 		/* Once the outer search is done, trigger an action record for
   2426 		   each filter we have.  */
   2427 		tree flt_node;
   2428 
   2429 		if (next == -3)
   2430 		  {
   2431 		    next = collect_one_action_chain (ar_hash, region->outer);
   2432 
   2433 		    /* If there is no next action, terminate the chain.  */
   2434 		    if (next == -1)
   2435 		      next = 0;
   2436 		    /* If all outer actions are cleanups or must_not_throw,
   2437 		       we'll have no action record for it, since we had wanted
   2438 		       to encode these states in the call-site record directly.
   2439 		       Add a cleanup action to the chain to catch these.  */
   2440 		    else if (next <= 0)
   2441 		      next = add_action_record (ar_hash, 0, 0);
   2442 		  }
   2443 
   2444 		flt_node = c->filter_list;
   2445 		for (; flt_node; flt_node = TREE_CHAIN (flt_node))
   2446 		  {
   2447 		    int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
   2448 		    next = add_action_record (ar_hash, filter, next);
   2449 		  }
   2450 	      }
   2451 	  }
   2452 	return next;
   2453       }
   2454 
   2455     case ERT_ALLOWED_EXCEPTIONS:
   2456       /* An exception specification adds its filter to the
   2457 	 beginning of the chain.  */
   2458       next = collect_one_action_chain (ar_hash, region->outer);
   2459 
   2460       /* If there is no next action, terminate the chain.  */
   2461       if (next == -1)
   2462 	next = 0;
   2463       /* If all outer actions are cleanups or must_not_throw,
   2464 	 we'll have no action record for it, since we had wanted
   2465 	 to encode these states in the call-site record directly.
   2466 	 Add a cleanup action to the chain to catch these.  */
   2467       else if (next <= 0)
   2468 	next = add_action_record (ar_hash, 0, 0);
   2469 
   2470       return add_action_record (ar_hash, region->u.allowed.filter, next);
   2471 
   2472     case ERT_MUST_NOT_THROW:
   2473       /* A must-not-throw region with no inner handlers or cleanups
   2474 	 requires no call-site entry.  Note that this differs from
   2475 	 the no handler or cleanup case in that we do require an lsda
   2476 	 to be generated.  Return a magic -2 value to record this.  */
   2477       return -2;
   2478     }
   2479 
   2480   gcc_unreachable ();
   2481 }
   2482 
   2483 static int
   2484 add_call_site (rtx landing_pad, int action, int section)
   2485 {
   2486   call_site_record record;
   2487 
   2488   record = ggc_alloc<call_site_record_d> ();
   2489   record->landing_pad = landing_pad;
   2490   record->action = action;
   2491 
   2492   vec_safe_push (crtl->eh.call_site_record_v[section], record);
   2493 
   2494   return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
   2495 }
   2496 
   2497 static rtx_note *
   2498 emit_note_eh_region_end (rtx_insn *insn)
   2499 {
   2500   return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
   2501 }
   2502 
   2503 /* Add NOP after NOTE_INSN_SWITCH_TEXT_SECTIONS when the cold section starts
   2504    with landing pad.
   2505    With landing pad being at offset 0 from the start label of the section
   2506    we would miss EH delivery because 0 is special and means no landing pad.  */
   2507 
   2508 static bool
   2509 maybe_add_nop_after_section_switch (void)
   2510 {
   2511   if (!crtl->uses_eh_lsda
   2512       || !crtl->eh.call_site_record_v[1])
   2513     return false;
   2514   int n = vec_safe_length (crtl->eh.call_site_record_v[1]);
   2515   hash_set<rtx_insn *> visited;
   2516 
   2517   for (int i = 0; i < n; ++i)
   2518     {
   2519       struct call_site_record_d *cs
   2520 	 = (*crtl->eh.call_site_record_v[1])[i];
   2521       if (cs->landing_pad)
   2522 	{
   2523 	  rtx_insn *insn = as_a <rtx_insn *> (cs->landing_pad);
   2524 	  while (true)
   2525 	    {
   2526 	      /* Landing pads have LABEL_PRESERVE_P flag set.  This check make
   2527 		 sure that we do not walk past landing pad visited earlier
   2528 		 which would result in possible quadratic behaviour.  */
   2529 	      if (LABEL_P (insn) && LABEL_PRESERVE_P (insn)
   2530 		  && visited.add (insn))
   2531 		break;
   2532 
   2533 	      /* Conservatively assume that ASM insn may be empty.  We have
   2534 		 now way to tell what they contain.  */
   2535 	      if (active_insn_p (insn)
   2536 		  && GET_CODE (PATTERN (insn)) != ASM_INPUT
   2537 		  && GET_CODE (PATTERN (insn)) != ASM_OPERANDS)
   2538 		break;
   2539 
   2540 	      /* If we reached the start of hot section, then NOP will be
   2541 		 needed.  */
   2542 	      if (GET_CODE (insn) == NOTE
   2543 		  && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
   2544 		{
   2545 		  emit_insn_after (gen_nop (), insn);
   2546 		  break;
   2547 		}
   2548 
   2549 	      /* We visit only labels from cold section.  We should never hit
   2550 		 begining of the insn stream here.  */
   2551 	      insn = PREV_INSN (insn);
   2552 	    }
   2553 	}
   2554     }
   2555   return false;
   2556 }
   2557 
   2558 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
   2559    The new note numbers will not refer to region numbers, but
   2560    instead to call site entries.  */
   2561 
   2562 static unsigned int
   2563 convert_to_eh_region_ranges (void)
   2564 {
   2565   rtx insn;
   2566   rtx_insn *iter;
   2567   rtx_note *note;
   2568   action_hash_type ar_hash (31);
   2569   int last_action = -3;
   2570   rtx_insn *last_action_insn = NULL;
   2571   rtx last_landing_pad = NULL_RTX;
   2572   rtx_insn *first_no_action_insn = NULL;
   2573   int call_site = 0;
   2574   int cur_sec = 0;
   2575   rtx_insn *section_switch_note = NULL;
   2576   rtx_insn *first_no_action_insn_before_switch = NULL;
   2577   rtx_insn *last_no_action_insn_before_switch = NULL;
   2578   int saved_call_site_base = call_site_base;
   2579 
   2580   vec_alloc (crtl->eh.action_record_data, 64);
   2581 
   2582   for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
   2583     if (INSN_P (iter))
   2584       {
   2585 	eh_landing_pad lp;
   2586 	eh_region region;
   2587 	bool nothrow;
   2588 	int this_action;
   2589 	rtx_code_label *this_landing_pad;
   2590 
   2591 	insn = iter;
   2592 	if (NONJUMP_INSN_P (insn)
   2593 	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
   2594 	  insn = XVECEXP (PATTERN (insn), 0, 0);
   2595 
   2596 	nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
   2597 	if (nothrow)
   2598 	  continue;
   2599 	if (region)
   2600 	  this_action = collect_one_action_chain (&ar_hash, region);
   2601 	else
   2602 	  this_action = -1;
   2603 
   2604 	/* Existence of catch handlers, or must-not-throw regions
   2605 	   implies that an lsda is needed (even if empty).  */
   2606 	if (this_action != -1)
   2607 	  crtl->uses_eh_lsda = 1;
   2608 
   2609 	/* Delay creation of region notes for no-action regions
   2610 	   until we're sure that an lsda will be required.  */
   2611 	else if (last_action == -3)
   2612 	  {
   2613 	    first_no_action_insn = iter;
   2614 	    last_action = -1;
   2615 	  }
   2616 
   2617 	if (this_action >= 0)
   2618 	  this_landing_pad = lp->landing_pad;
   2619 	else
   2620 	  this_landing_pad = NULL;
   2621 
   2622 	/* Differing actions or landing pads implies a change in call-site
   2623 	   info, which implies some EH_REGION note should be emitted.  */
   2624 	if (last_action != this_action
   2625 	    || last_landing_pad != this_landing_pad)
   2626 	  {
   2627 	    /* If there is a queued no-action region in the other section
   2628 	       with hot/cold partitioning, emit it now.  */
   2629 	    if (first_no_action_insn_before_switch)
   2630 	      {
   2631 		gcc_assert (this_action != -1
   2632 			    && last_action == (first_no_action_insn
   2633 					       ? -1 : -3));
   2634 		call_site = add_call_site (NULL_RTX, 0, 0);
   2635 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
   2636 					 first_no_action_insn_before_switch);
   2637 		NOTE_EH_HANDLER (note) = call_site;
   2638 		note
   2639 		  = emit_note_eh_region_end (last_no_action_insn_before_switch);
   2640 		NOTE_EH_HANDLER (note) = call_site;
   2641 		gcc_assert (last_action != -3
   2642 			    || (last_action_insn
   2643 				== last_no_action_insn_before_switch));
   2644 		first_no_action_insn_before_switch = NULL;
   2645 		last_no_action_insn_before_switch = NULL;
   2646 		call_site_base++;
   2647 	      }
   2648 	    /* If we'd not seen a previous action (-3) or the previous
   2649 	       action was must-not-throw (-2), then we do not need an
   2650 	       end note.  */
   2651 	    if (last_action >= -1)
   2652 	      {
   2653 		/* If we delayed the creation of the begin, do it now.  */
   2654 		if (first_no_action_insn)
   2655 		  {
   2656 		    call_site = add_call_site (NULL_RTX, 0, cur_sec);
   2657 		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
   2658 					     first_no_action_insn);
   2659 		    NOTE_EH_HANDLER (note) = call_site;
   2660 		    first_no_action_insn = NULL;
   2661 		  }
   2662 
   2663 		note = emit_note_eh_region_end (last_action_insn);
   2664 		NOTE_EH_HANDLER (note) = call_site;
   2665 	      }
   2666 
   2667 	    /* If the new action is must-not-throw, then no region notes
   2668 	       are created.  */
   2669 	    if (this_action >= -1)
   2670 	      {
   2671 		call_site = add_call_site (this_landing_pad,
   2672 					   this_action < 0 ? 0 : this_action,
   2673 					   cur_sec);
   2674 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
   2675 		NOTE_EH_HANDLER (note) = call_site;
   2676 	      }
   2677 
   2678 	    last_action = this_action;
   2679 	    last_landing_pad = this_landing_pad;
   2680 	  }
   2681 	last_action_insn = iter;
   2682       }
   2683     else if (NOTE_P (iter)
   2684 	     && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
   2685       {
   2686 	gcc_assert (section_switch_note == NULL_RTX);
   2687 	gcc_assert (flag_reorder_blocks_and_partition);
   2688 	section_switch_note = iter;
   2689 	if (first_no_action_insn)
   2690 	  {
   2691 	    first_no_action_insn_before_switch = first_no_action_insn;
   2692 	    last_no_action_insn_before_switch = last_action_insn;
   2693 	    first_no_action_insn = NULL;
   2694 	    gcc_assert (last_action == -1);
   2695 	    last_action = -3;
   2696 	  }
   2697 	/* Force closing of current EH region before section switch and
   2698 	   opening a new one afterwards.  */
   2699 	else if (last_action != -3)
   2700 	  last_landing_pad = pc_rtx;
   2701 	if (crtl->eh.call_site_record_v[cur_sec])
   2702 	  call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
   2703 	cur_sec++;
   2704 	gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
   2705 	vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
   2706       }
   2707 
   2708   if (last_action >= -1 && ! first_no_action_insn)
   2709     {
   2710       note = emit_note_eh_region_end (last_action_insn);
   2711       NOTE_EH_HANDLER (note) = call_site;
   2712     }
   2713 
   2714   call_site_base = saved_call_site_base;
   2715 
   2716   return 0;
   2717 }
   2718 
   2719 namespace {
   2720 
   2721 const pass_data pass_data_convert_to_eh_region_ranges =
   2722 {
   2723   RTL_PASS, /* type */
   2724   "eh_ranges", /* name */
   2725   OPTGROUP_NONE, /* optinfo_flags */
   2726   TV_NONE, /* tv_id */
   2727   0, /* properties_required */
   2728   0, /* properties_provided */
   2729   0, /* properties_destroyed */
   2730   0, /* todo_flags_start */
   2731   0, /* todo_flags_finish */
   2732 };
   2733 
   2734 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
   2735 {
   2736 public:
   2737   pass_convert_to_eh_region_ranges (gcc::context *ctxt)
   2738     : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
   2739   {}
   2740 
   2741   /* opt_pass methods: */
   2742   virtual bool gate (function *);
   2743   virtual unsigned int execute (function *)
   2744     {
   2745       int ret = convert_to_eh_region_ranges ();
   2746       maybe_add_nop_after_section_switch ();
   2747       return ret;
   2748     }
   2749 
   2750 }; // class pass_convert_to_eh_region_ranges
   2751 
   2752 bool
   2753 pass_convert_to_eh_region_ranges::gate (function *)
   2754 {
   2755   /* Nothing to do for SJLJ exceptions or if no regions created.  */
   2756   if (cfun->eh->region_tree == NULL)
   2757     return false;
   2758   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
   2759     return false;
   2760   return true;
   2761 }
   2762 
   2763 } // anon namespace
   2764 
   2765 rtl_opt_pass *
   2766 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
   2767 {
   2768   return new pass_convert_to_eh_region_ranges (ctxt);
   2769 }
   2770 
   2771 static void
   2773 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
   2774 {
   2775   do
   2776     {
   2777       unsigned char byte = value & 0x7f;
   2778       value >>= 7;
   2779       if (value)
   2780 	byte |= 0x80;
   2781       vec_safe_push (*data_area, byte);
   2782     }
   2783   while (value);
   2784 }
   2785 
   2786 static void
   2787 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
   2788 {
   2789   unsigned char byte;
   2790   int more;
   2791 
   2792   do
   2793     {
   2794       byte = value & 0x7f;
   2795       value >>= 7;
   2796       more = ! ((value == 0 && (byte & 0x40) == 0)
   2797 		|| (value == -1 && (byte & 0x40) != 0));
   2798       if (more)
   2799 	byte |= 0x80;
   2800       vec_safe_push (*data_area, byte);
   2801     }
   2802   while (more);
   2803 }
   2804 
   2805 
   2806 static int
   2808 dw2_size_of_call_site_table (int section)
   2809 {
   2810   int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
   2811   int size = n * (4 + 4 + 4);
   2812   int i;
   2813 
   2814   for (i = 0; i < n; ++i)
   2815     {
   2816       struct call_site_record_d *cs =
   2817 	(*crtl->eh.call_site_record_v[section])[i];
   2818       size += size_of_uleb128 (cs->action);
   2819     }
   2820 
   2821   return size;
   2822 }
   2823 
   2824 static int
   2825 sjlj_size_of_call_site_table (void)
   2826 {
   2827   int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
   2828   int size = 0;
   2829   int i;
   2830 
   2831   for (i = 0; i < n; ++i)
   2832     {
   2833       struct call_site_record_d *cs =
   2834 	(*crtl->eh.call_site_record_v[0])[i];
   2835       size += size_of_uleb128 (INTVAL (cs->landing_pad));
   2836       size += size_of_uleb128 (cs->action);
   2837     }
   2838 
   2839   return size;
   2840 }
   2841 
   2842 static void
   2843 dw2_output_call_site_table (int cs_format, int section)
   2844 {
   2845   int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
   2846   int i;
   2847   const char *begin;
   2848 
   2849   if (section == 0)
   2850     begin = current_function_func_begin_label;
   2851   else if (first_function_block_is_cold)
   2852     begin = crtl->subsections.hot_section_label;
   2853   else
   2854     begin = crtl->subsections.cold_section_label;
   2855 
   2856   for (i = 0; i < n; ++i)
   2857     {
   2858       struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
   2859       char reg_start_lab[32];
   2860       char reg_end_lab[32];
   2861       char landing_pad_lab[32];
   2862 
   2863       ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
   2864       ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
   2865 
   2866       if (cs->landing_pad)
   2867 	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
   2868 				     CODE_LABEL_NUMBER (cs->landing_pad));
   2869 
   2870       /* ??? Perhaps use insn length scaling if the assembler supports
   2871 	 generic arithmetic.  */
   2872       /* ??? Perhaps use attr_length to choose data1 or data2 instead of
   2873 	 data4 if the function is small enough.  */
   2874       if (cs_format == DW_EH_PE_uleb128)
   2875 	{
   2876 	  dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
   2877 					"region %d start", i);
   2878 	  dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
   2879 					"length");
   2880 	  if (cs->landing_pad)
   2881 	    dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
   2882 					  "landing pad");
   2883 	  else
   2884 	    dw2_asm_output_data_uleb128 (0, "landing pad");
   2885 	}
   2886       else
   2887 	{
   2888 	  dw2_asm_output_delta (4, reg_start_lab, begin,
   2889 				"region %d start", i);
   2890 	  dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
   2891 	  if (cs->landing_pad)
   2892 	    dw2_asm_output_delta (4, landing_pad_lab, begin,
   2893 				  "landing pad");
   2894 	  else
   2895 	    dw2_asm_output_data (4, 0, "landing pad");
   2896 	}
   2897       dw2_asm_output_data_uleb128 (cs->action, "action");
   2898     }
   2899 
   2900   call_site_base += n;
   2901 }
   2902 
   2903 static void
   2904 sjlj_output_call_site_table (void)
   2905 {
   2906   int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
   2907   int i;
   2908 
   2909   for (i = 0; i < n; ++i)
   2910     {
   2911       struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
   2912 
   2913       dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
   2914 				   "region %d landing pad", i);
   2915       dw2_asm_output_data_uleb128 (cs->action, "action");
   2916     }
   2917 
   2918   call_site_base += n;
   2919 }
   2920 
   2921 /* Switch to the section that should be used for exception tables.  */
   2922 
   2923 static void
   2924 switch_to_exception_section (const char * ARG_UNUSED (fnname))
   2925 {
   2926   section *s;
   2927 
   2928   if (exception_section)
   2929     s = exception_section;
   2930   else
   2931     {
   2932       int flags;
   2933 
   2934       if (EH_TABLES_CAN_BE_READ_ONLY)
   2935 	{
   2936 	  int tt_format =
   2937 	    ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
   2938 	  flags = ((! flag_pic
   2939 		    || ((tt_format & 0x70) != DW_EH_PE_absptr
   2940 			&& (tt_format & 0x70) != DW_EH_PE_aligned))
   2941 		   ? 0 : SECTION_WRITE);
   2942 	}
   2943       else
   2944 	flags = SECTION_WRITE;
   2945 
   2946       /* Compute the section and cache it into exception_section,
   2947 	 unless it depends on the function name.  */
   2948       if (targetm_common.have_named_sections)
   2949 	{
   2950 #ifdef HAVE_LD_EH_GC_SECTIONS
   2951 	  if (flag_function_sections
   2952 	      || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
   2953 	    {
   2954 	      char *section_name = XNEWVEC (char, strlen (fnname) + 32);
   2955 	      /* The EH table must match the code section, so only mark
   2956 		 it linkonce if we have COMDAT groups to tie them together.  */
   2957 	      if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
   2958 		flags |= SECTION_LINKONCE;
   2959 	      sprintf (section_name, ".gcc_except_table.%s", fnname);
   2960 	      s = get_section (section_name, flags, current_function_decl);
   2961 	      free (section_name);
   2962 	    }
   2963 	  else
   2964 #endif
   2965 	    exception_section
   2966 	      = s = get_section (".gcc_except_table", flags, NULL);
   2967 	}
   2968       else
   2969 	exception_section
   2970 	  = s = flags == SECTION_WRITE ? data_section : readonly_data_section;
   2971     }
   2972 
   2973   switch_to_section (s);
   2974 }
   2975 
   2976 /* Output a reference from an exception table to the type_info object TYPE.
   2977    TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
   2978    the value.  */
   2979 
   2980 static void
   2981 output_ttype (tree type, int tt_format, int tt_format_size)
   2982 {
   2983   rtx value;
   2984   bool is_public = true;
   2985 
   2986   if (type == NULL_TREE)
   2987     value = const0_rtx;
   2988   else
   2989     {
   2990       /* FIXME lto.  pass_ipa_free_lang_data changes all types to
   2991 	 runtime types so TYPE should already be a runtime type
   2992 	 reference.  When pass_ipa_free_lang data is made a default
   2993 	 pass, we can then remove the call to lookup_type_for_runtime
   2994 	 below.  */
   2995       if (TYPE_P (type))
   2996 	type = lookup_type_for_runtime (type);
   2997 
   2998       value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
   2999 
   3000       /* Let cgraph know that the rtti decl is used.  Not all of the
   3001 	 paths below go through assemble_integer, which would take
   3002 	 care of this for us.  */
   3003       STRIP_NOPS (type);
   3004       if (TREE_CODE (type) == ADDR_EXPR)
   3005 	{
   3006 	  type = TREE_OPERAND (type, 0);
   3007 	  if (VAR_P (type))
   3008 	    is_public = TREE_PUBLIC (type);
   3009 	}
   3010       else
   3011 	gcc_assert (TREE_CODE (type) == INTEGER_CST);
   3012     }
   3013 
   3014   /* Allow the target to override the type table entry format.  */
   3015   if (targetm.asm_out.ttype (value))
   3016     return;
   3017 
   3018   if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
   3019     assemble_integer (value, tt_format_size,
   3020 		      tt_format_size * BITS_PER_UNIT, 1);
   3021   else
   3022     dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
   3023 }
   3024 
   3025 /* Output an exception table for the current function according to SECTION.
   3026 
   3027    If the function has been partitioned into hot and cold parts, value 0 for
   3028    SECTION refers to the table associated with the hot part while value 1
   3029    refers to the table associated with the cold part.  If the function has
   3030    not been partitioned, value 0 refers to the single exception table.  */
   3031 
   3032 static void
   3033 output_one_function_exception_table (int section)
   3034 {
   3035   int tt_format, cs_format, lp_format, i;
   3036   char ttype_label[32];
   3037   char cs_after_size_label[32];
   3038   char cs_end_label[32];
   3039   int call_site_len;
   3040   int have_tt_data;
   3041   int tt_format_size = 0;
   3042 
   3043   have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
   3044 		  || (targetm.arm_eabi_unwinder
   3045 		      ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
   3046 		      : vec_safe_length (cfun->eh->ehspec_data.other)));
   3047 
   3048   /* Indicate the format of the @TType entries.  */
   3049   if (! have_tt_data)
   3050     tt_format = DW_EH_PE_omit;
   3051   else
   3052     {
   3053       tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
   3054       if (HAVE_AS_LEB128)
   3055 	ASM_GENERATE_INTERNAL_LABEL (ttype_label,
   3056 				     section ? "LLSDATTC" : "LLSDATT",
   3057 				     current_function_funcdef_no);
   3058 
   3059       tt_format_size = size_of_encoded_value (tt_format);
   3060 
   3061       assemble_align (tt_format_size * BITS_PER_UNIT);
   3062     }
   3063 
   3064   targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
   3065 				  current_function_funcdef_no);
   3066 
   3067   /* The LSDA header.  */
   3068 
   3069   /* Indicate the format of the landing pad start pointer.  An omitted
   3070      field implies @LPStart == @Start.  */
   3071   /* Currently we always put @LPStart == @Start.  This field would
   3072      be most useful in moving the landing pads completely out of
   3073      line to another section, but it could also be used to minimize
   3074      the size of uleb128 landing pad offsets.  */
   3075   lp_format = DW_EH_PE_omit;
   3076   dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
   3077 		       eh_data_format_name (lp_format));
   3078 
   3079   /* @LPStart pointer would go here.  */
   3080 
   3081   dw2_asm_output_data (1, tt_format, "@TType format (%s)",
   3082 		       eh_data_format_name (tt_format));
   3083 
   3084   if (!HAVE_AS_LEB128)
   3085     {
   3086       if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
   3087 	call_site_len = sjlj_size_of_call_site_table ();
   3088       else
   3089 	call_site_len = dw2_size_of_call_site_table (section);
   3090     }
   3091 
   3092   /* A pc-relative 4-byte displacement to the @TType data.  */
   3093   if (have_tt_data)
   3094     {
   3095       if (HAVE_AS_LEB128)
   3096 	{
   3097 	  char ttype_after_disp_label[32];
   3098 	  ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
   3099 				       section ? "LLSDATTDC" : "LLSDATTD",
   3100 				       current_function_funcdef_no);
   3101 	  dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
   3102 					"@TType base offset");
   3103 	  ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
   3104 	}
   3105       else
   3106 	{
   3107 	  /* Ug.  Alignment queers things.  */
   3108 	  unsigned int before_disp, after_disp, last_disp, disp;
   3109 
   3110 	  before_disp = 1 + 1;
   3111 	  after_disp = (1 + size_of_uleb128 (call_site_len)
   3112 			+ call_site_len
   3113 			+ vec_safe_length (crtl->eh.action_record_data)
   3114 			+ (vec_safe_length (cfun->eh->ttype_data)
   3115 			   * tt_format_size));
   3116 
   3117 	  disp = after_disp;
   3118 	  do
   3119 	    {
   3120 	      unsigned int disp_size, pad;
   3121 
   3122 	      last_disp = disp;
   3123 	      disp_size = size_of_uleb128 (disp);
   3124 	      pad = before_disp + disp_size + after_disp;
   3125 	      if (pad % tt_format_size)
   3126 		pad = tt_format_size - (pad % tt_format_size);
   3127 	      else
   3128 		pad = 0;
   3129 	      disp = after_disp + pad;
   3130 	    }
   3131 	  while (disp != last_disp);
   3132 
   3133 	  dw2_asm_output_data_uleb128 (disp, "@TType base offset");
   3134 	}
   3135 	}
   3136 
   3137   /* Indicate the format of the call-site offsets.  */
   3138   if (HAVE_AS_LEB128)
   3139     cs_format = DW_EH_PE_uleb128;
   3140   else
   3141     cs_format = DW_EH_PE_udata4;
   3142 
   3143   dw2_asm_output_data (1, cs_format, "call-site format (%s)",
   3144 		       eh_data_format_name (cs_format));
   3145 
   3146   if (HAVE_AS_LEB128)
   3147     {
   3148       ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
   3149 				   section ? "LLSDACSBC" : "LLSDACSB",
   3150 				   current_function_funcdef_no);
   3151       ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
   3152 				   section ? "LLSDACSEC" : "LLSDACSE",
   3153 				   current_function_funcdef_no);
   3154       dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
   3155 				    "Call-site table length");
   3156       ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
   3157       if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
   3158 	sjlj_output_call_site_table ();
   3159       else
   3160 	dw2_output_call_site_table (cs_format, section);
   3161       ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
   3162     }
   3163   else
   3164     {
   3165       dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
   3166       if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
   3167 	sjlj_output_call_site_table ();
   3168       else
   3169 	dw2_output_call_site_table (cs_format, section);
   3170     }
   3171 
   3172   /* ??? Decode and interpret the data for flag_debug_asm.  */
   3173   {
   3174     uchar uc;
   3175     FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
   3176       dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
   3177   }
   3178 
   3179   if (have_tt_data)
   3180     assemble_align (tt_format_size * BITS_PER_UNIT);
   3181 
   3182   i = vec_safe_length (cfun->eh->ttype_data);
   3183   while (i-- > 0)
   3184     {
   3185       tree type = (*cfun->eh->ttype_data)[i];
   3186       output_ttype (type, tt_format, tt_format_size);
   3187     }
   3188 
   3189   if (HAVE_AS_LEB128 && have_tt_data)
   3190     ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
   3191 
   3192   /* ??? Decode and interpret the data for flag_debug_asm.  */
   3193   if (targetm.arm_eabi_unwinder)
   3194     {
   3195       tree type;
   3196       for (i = 0;
   3197 	   vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
   3198 	output_ttype (type, tt_format, tt_format_size);
   3199     }
   3200   else
   3201     {
   3202       uchar uc;
   3203       for (i = 0;
   3204 	   vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
   3205 	dw2_asm_output_data (1, uc,
   3206 			     i ? NULL : "Exception specification table");
   3207     }
   3208 }
   3209 
   3210 /* Output an exception table for the current function according to SECTION,
   3211    switching back and forth from the function section appropriately.
   3212 
   3213    If the function has been partitioned into hot and cold parts, value 0 for
   3214    SECTION refers to the table associated with the hot part while value 1
   3215    refers to the table associated with the cold part.  If the function has
   3216    not been partitioned, value 0 refers to the single exception table.  */
   3217 
   3218 void
   3219 output_function_exception_table (int section)
   3220 {
   3221   const char *fnname = get_fnname_from_decl (current_function_decl);
   3222   rtx personality = get_personality_function (current_function_decl);
   3223 
   3224   /* Not all functions need anything.  */
   3225   if (!crtl->uses_eh_lsda
   3226       || targetm_common.except_unwind_info (&global_options) == UI_NONE)
   3227     return;
   3228 
   3229   /* No need to emit any boilerplate stuff for the cold part.  */
   3230   if (section == 1 && !crtl->eh.call_site_record_v[1])
   3231     return;
   3232 
   3233   if (personality)
   3234     {
   3235       assemble_external_libcall (personality);
   3236 
   3237       if (targetm.asm_out.emit_except_personality)
   3238 	targetm.asm_out.emit_except_personality (personality);
   3239     }
   3240 
   3241   switch_to_exception_section (fnname);
   3242 
   3243   /* If the target wants a label to begin the table, emit it here.  */
   3244   targetm.asm_out.emit_except_table_label (asm_out_file);
   3245 
   3246   /* Do the real work.  */
   3247   output_one_function_exception_table (section);
   3248 
   3249   switch_to_section (current_function_section ());
   3250 }
   3251 
   3252 void
   3253 set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
   3254 {
   3255   fun->eh->throw_stmt_table = table;
   3256 }
   3257 
   3258 hash_map<gimple *, int> *
   3259 get_eh_throw_stmt_table (struct function *fun)
   3260 {
   3261   return fun->eh->throw_stmt_table;
   3262 }
   3263 
   3264 /* Determine if the function needs an EH personality function.  */
   3266 
   3267 enum eh_personality_kind
   3268 function_needs_eh_personality (struct function *fn)
   3269 {
   3270   enum eh_personality_kind kind = eh_personality_none;
   3271   eh_region i;
   3272 
   3273   FOR_ALL_EH_REGION_FN (i, fn)
   3274     {
   3275       switch (i->type)
   3276 	{
   3277 	case ERT_CLEANUP:
   3278 	  /* Can do with any personality including the generic C one.  */
   3279 	  kind = eh_personality_any;
   3280 	  break;
   3281 
   3282 	case ERT_TRY:
   3283 	case ERT_ALLOWED_EXCEPTIONS:
   3284 	  /* Always needs a EH personality function.  The generic C
   3285 	     personality doesn't handle these even for empty type lists.  */
   3286 	  return eh_personality_lang;
   3287 
   3288 	case ERT_MUST_NOT_THROW:
   3289 	  /* Always needs a EH personality function.  The language may specify
   3290 	     what abort routine that must be used, e.g. std::terminate.  */
   3291 	  return eh_personality_lang;
   3292 	}
   3293     }
   3294 
   3295   return kind;
   3296 }
   3297 
   3298 /* Dump EH information to OUT.  */
   3300 
   3301 void
   3302 dump_eh_tree (FILE * out, struct function *fun)
   3303 {
   3304   eh_region i;
   3305   int depth = 0;
   3306   static const char *const type_name[] = {
   3307     "cleanup", "try", "allowed_exceptions", "must_not_throw"
   3308   };
   3309 
   3310   i = fun->eh->region_tree;
   3311   if (!i)
   3312     return;
   3313 
   3314   fprintf (out, "Eh tree:\n");
   3315   while (1)
   3316     {
   3317       fprintf (out, "  %*s %i %s", depth * 2, "",
   3318 	       i->index, type_name[(int) i->type]);
   3319 
   3320       if (i->landing_pads)
   3321 	{
   3322 	  eh_landing_pad lp;
   3323 
   3324 	  fprintf (out, " land:");
   3325 	  if (current_ir_type () == IR_GIMPLE)
   3326 	    {
   3327 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
   3328 		{
   3329 		  fprintf (out, "{%i,", lp->index);
   3330 		  print_generic_expr (out, lp->post_landing_pad);
   3331 		  fputc ('}', out);
   3332 		  if (lp->next_lp)
   3333 		    fputc (',', out);
   3334 		}
   3335 	    }
   3336 	  else
   3337 	    {
   3338 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
   3339 		{
   3340 		  fprintf (out, "{%i,", lp->index);
   3341 		  if (lp->landing_pad)
   3342 		    fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
   3343 			     NOTE_P (lp->landing_pad) ? "(del)" : "");
   3344 		  else
   3345 		    fprintf (out, "(nil),");
   3346 		  if (lp->post_landing_pad)
   3347 		    {
   3348 		      rtx_insn *lab = label_rtx (lp->post_landing_pad);
   3349 		      fprintf (out, "%i%s}", INSN_UID (lab),
   3350 			       NOTE_P (lab) ? "(del)" : "");
   3351 		    }
   3352 		  else
   3353 		    fprintf (out, "(nil)}");
   3354 		  if (lp->next_lp)
   3355 		    fputc (',', out);
   3356 		}
   3357 	    }
   3358 	}
   3359 
   3360       switch (i->type)
   3361 	{
   3362 	case ERT_CLEANUP:
   3363 	case ERT_MUST_NOT_THROW:
   3364 	  break;
   3365 
   3366 	case ERT_TRY:
   3367 	  {
   3368 	    eh_catch c;
   3369 	    fprintf (out, " catch:");
   3370 	    for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
   3371 	      {
   3372 		fputc ('{', out);
   3373 		if (c->label)
   3374 		  {
   3375 		    fprintf (out, "lab:");
   3376 		    print_generic_expr (out, c->label);
   3377 		    fputc (';', out);
   3378 		  }
   3379 		print_generic_expr (out, c->type_list);
   3380 		fputc ('}', out);
   3381 		if (c->next_catch)
   3382 		  fputc (',', out);
   3383 	      }
   3384 	  }
   3385 	  break;
   3386 
   3387 	case ERT_ALLOWED_EXCEPTIONS:
   3388 	  fprintf (out, " filter :%i types:", i->u.allowed.filter);
   3389 	  print_generic_expr (out, i->u.allowed.type_list);
   3390 	  break;
   3391 	}
   3392       fputc ('\n', out);
   3393 
   3394       /* If there are sub-regions, process them.  */
   3395       if (i->inner)
   3396 	i = i->inner, depth++;
   3397       /* If there are peers, process them.  */
   3398       else if (i->next_peer)
   3399 	i = i->next_peer;
   3400       /* Otherwise, step back up the tree to the next peer.  */
   3401       else
   3402 	{
   3403 	  do
   3404 	    {
   3405 	      i = i->outer;
   3406 	      depth--;
   3407 	      if (i == NULL)
   3408 		return;
   3409 	    }
   3410 	  while (i->next_peer == NULL);
   3411 	  i = i->next_peer;
   3412 	}
   3413     }
   3414 }
   3415 
   3416 /* Dump the EH tree for FN on stderr.  */
   3417 
   3418 DEBUG_FUNCTION void
   3419 debug_eh_tree (struct function *fn)
   3420 {
   3421   dump_eh_tree (stderr, fn);
   3422 }
   3423 
   3424 /* Verify invariants on EH datastructures.  */
   3425 
   3426 DEBUG_FUNCTION void
   3427 verify_eh_tree (struct function *fun)
   3428 {
   3429   eh_region r, outer;
   3430   int nvisited_lp, nvisited_r;
   3431   int count_lp, count_r, depth, i;
   3432   eh_landing_pad lp;
   3433   bool err = false;
   3434 
   3435   if (!fun->eh->region_tree)
   3436     return;
   3437 
   3438   count_r = 0;
   3439   for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
   3440     if (r)
   3441       {
   3442 	if (r->index == i)
   3443 	  count_r++;
   3444 	else
   3445 	  {
   3446 	    error ("%<region_array%> is corrupted for region %i", r->index);
   3447 	    err = true;
   3448 	  }
   3449       }
   3450 
   3451   count_lp = 0;
   3452   for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
   3453     if (lp)
   3454       {
   3455 	if (lp->index == i)
   3456 	  count_lp++;
   3457 	else
   3458 	  {
   3459 	    error ("%<lp_array%> is corrupted for lp %i", lp->index);
   3460 	    err = true;
   3461 	  }
   3462       }
   3463 
   3464   depth = nvisited_lp = nvisited_r = 0;
   3465   outer = NULL;
   3466   r = fun->eh->region_tree;
   3467   while (1)
   3468     {
   3469       if ((*fun->eh->region_array)[r->index] != r)
   3470 	{
   3471 	  error ("%<region_array%> is corrupted for region %i", r->index);
   3472 	  err = true;
   3473 	}
   3474       if (r->outer != outer)
   3475 	{
   3476 	  error ("outer block of region %i is wrong", r->index);
   3477 	  err = true;
   3478 	}
   3479       if (depth < 0)
   3480 	{
   3481 	  error ("negative nesting depth of region %i", r->index);
   3482 	  err = true;
   3483 	}
   3484       nvisited_r++;
   3485 
   3486       for (lp = r->landing_pads; lp ; lp = lp->next_lp)
   3487 	{
   3488 	  if ((*fun->eh->lp_array)[lp->index] != lp)
   3489 	    {
   3490 	      error ("%<lp_array%> is corrupted for lp %i", lp->index);
   3491 	      err = true;
   3492 	    }
   3493 	  if (lp->region != r)
   3494 	    {
   3495 	      error ("region of lp %i is wrong", lp->index);
   3496 	      err = true;
   3497 	    }
   3498 	  nvisited_lp++;
   3499 	}
   3500 
   3501       if (r->inner)
   3502 	outer = r, r = r->inner, depth++;
   3503       else if (r->next_peer)
   3504 	r = r->next_peer;
   3505       else
   3506 	{
   3507 	  do
   3508 	    {
   3509 	      r = r->outer;
   3510 	      if (r == NULL)
   3511 		goto region_done;
   3512 	      depth--;
   3513 	      outer = r->outer;
   3514 	    }
   3515 	  while (r->next_peer == NULL);
   3516 	  r = r->next_peer;
   3517 	}
   3518     }
   3519  region_done:
   3520   if (depth != 0)
   3521     {
   3522       error ("tree list ends on depth %i", depth);
   3523       err = true;
   3524     }
   3525   if (count_r != nvisited_r)
   3526     {
   3527       error ("%<region_array%> does not match %<region_tree%>");
   3528       err = true;
   3529     }
   3530   if (count_lp != nvisited_lp)
   3531     {
   3532       error ("%<lp_array%> does not match %<region_tree%>");
   3533       err = true;
   3534     }
   3535 
   3536   if (err)
   3537     {
   3538       dump_eh_tree (stderr, fun);
   3539       internal_error ("%qs failed", __func__);
   3540     }
   3541 }
   3542 
   3543 #include "gt-except.h"
   3545