Home | History | Annotate | Line # | Download | only in gcc
      1  1.1  mrg /* Default target hook functions.
      2  1.1  mrg    Copyright (C) 2003-2022 Free Software Foundation, Inc.
      3  1.1  mrg 
      4  1.1  mrg This file is part of GCC.
      5  1.1  mrg 
      6  1.1  mrg GCC is free software; you can redistribute it and/or modify it under
      7  1.1  mrg the terms of the GNU General Public License as published by the Free
      8  1.1  mrg Software Foundation; either version 3, or (at your option) any later
      9  1.1  mrg version.
     10  1.1  mrg 
     11  1.1  mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
     12  1.1  mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
     13  1.1  mrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
     14  1.1  mrg for more details.
     15  1.1  mrg 
     16  1.1  mrg You should have received a copy of the GNU General Public License
     17  1.1  mrg along with GCC; see the file COPYING3.  If not see
     18  1.1  mrg <http://www.gnu.org/licenses/>.  */
     19  1.1  mrg 
     20  1.1  mrg /* The migration of target macros to target hooks works as follows:
     21  1.1  mrg 
     22  1.1  mrg    1. Create a target hook that uses the existing target macros to
     23  1.1  mrg       implement the same functionality.
     24  1.1  mrg 
     25  1.1  mrg    2. Convert all the MI files to use the hook instead of the macro.
     26  1.1  mrg 
     27  1.1  mrg    3. Repeat for a majority of the remaining target macros.  This will
     28  1.1  mrg       take some time.
     29  1.1  mrg 
     30  1.1  mrg    4. Tell target maintainers to start migrating.
     31  1.1  mrg 
     32  1.1  mrg    5. Eventually convert the backends to override the hook instead of
     33  1.1  mrg       defining the macros.  This will take some time too.
     34  1.1  mrg 
     35  1.1  mrg    6. TBD when, poison the macros.  Unmigrated targets will break at
     36  1.1  mrg       this point.
     37  1.1  mrg 
     38  1.1  mrg    Note that we expect steps 1-3 to be done by the people that
     39  1.1  mrg    understand what the MI does with each macro, and step 5 to be done
     40  1.1  mrg    by the target maintainers for their respective targets.
     41  1.1  mrg 
     42  1.1  mrg    Note that steps 1 and 2 don't have to be done together, but no
     43  1.1  mrg    target can override the new hook until step 2 is complete for it.
     44  1.1  mrg 
     45  1.1  mrg    Once the macros are poisoned, we will revert to the old migration
     46  1.1  mrg    rules - migrate the macro, callers, and targets all at once.  This
     47  1.1  mrg    comment can thus be removed at that point.  */
     48  1.1  mrg 
     49  1.1  mrg #include "config.h"
     50  1.1  mrg #include "system.h"
     51  1.1  mrg #include "coretypes.h"
     52  1.1  mrg #include "target.h"
     53  1.1  mrg #include "function.h"
     54  1.1  mrg #include "rtl.h"
     55  1.1  mrg #include "tree.h"
     56  1.1  mrg #include "tree-ssa-alias.h"
     57  1.1  mrg #include "gimple-expr.h"
     58  1.1  mrg #include "memmodel.h"
     59  1.1  mrg #include "backend.h"
     60  1.1  mrg #include "emit-rtl.h"
     61  1.1  mrg #include "df.h"
     62  1.1  mrg #include "tm_p.h"
     63  1.1  mrg #include "stringpool.h"
     64  1.1  mrg #include "tree-vrp.h"
     65  1.1  mrg #include "tree-ssanames.h"
     66  1.1  mrg #include "profile-count.h"
     67  1.1  mrg #include "optabs.h"
     68  1.1  mrg #include "regs.h"
     69  1.1  mrg #include "recog.h"
     70  1.1  mrg #include "diagnostic-core.h"
     71  1.1  mrg #include "fold-const.h"
     72  1.1  mrg #include "stor-layout.h"
     73  1.1  mrg #include "varasm.h"
     74  1.1  mrg #include "flags.h"
     75  1.1  mrg #include "explow.h"
     76  1.1  mrg #include "expmed.h"
     77  1.1  mrg #include "calls.h"
     78  1.1  mrg #include "expr.h"
     79  1.1  mrg #include "output.h"
     80  1.1  mrg #include "common/common-target.h"
     81  1.1  mrg #include "reload.h"
     82  1.1  mrg #include "intl.h"
     83  1.1  mrg #include "opts.h"
     84  1.1  mrg #include "gimplify.h"
     85  1.1  mrg #include "predict.h"
     86  1.1  mrg #include "real.h"
     87  1.1  mrg #include "langhooks.h"
     88  1.1  mrg #include "sbitmap.h"
     89  1.1  mrg #include "function-abi.h"
     90  1.1  mrg #include "attribs.h"
     91  1.1  mrg #include "asan.h"
     92  1.1  mrg #include "emit-rtl.h"
     93  1.1  mrg #include "gimple.h"
     94  1.1  mrg #include "cfgloop.h"
     95  1.1  mrg #include "tree-vectorizer.h"
     96  1.1  mrg 
     97  1.1  mrg bool
     98  1.1  mrg default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
     99  1.1  mrg 			      rtx addr ATTRIBUTE_UNUSED,
    100  1.1  mrg 			      bool strict ATTRIBUTE_UNUSED)
    101  1.1  mrg {
    102  1.1  mrg #ifdef GO_IF_LEGITIMATE_ADDRESS
    103  1.1  mrg   /* Defer to the old implementation using a goto.  */
    104  1.1  mrg   if (strict)
    105  1.1  mrg     return strict_memory_address_p (mode, addr);
    106  1.1  mrg   else
    107  1.1  mrg     return memory_address_p (mode, addr);
    108  1.1  mrg #else
    109  1.1  mrg   gcc_unreachable ();
    110  1.1  mrg #endif
    111  1.1  mrg }
    112  1.1  mrg 
    113  1.1  mrg void
    114  1.1  mrg default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
    115  1.1  mrg {
    116  1.1  mrg #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
    117  1.1  mrg   ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun);
    118  1.1  mrg #endif
    119  1.1  mrg }
    120  1.1  mrg 
    121  1.1  mrg int
    122  1.1  mrg default_unspec_may_trap_p (const_rtx x, unsigned flags)
    123  1.1  mrg {
    124  1.1  mrg   int i;
    125  1.1  mrg 
    126  1.1  mrg   /* Any floating arithmetic may trap.  */
    127  1.1  mrg   if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math))
    128  1.1  mrg     return 1;
    129  1.1  mrg 
    130  1.1  mrg   for (i = 0; i < XVECLEN (x, 0); ++i)
    131  1.1  mrg     {
    132  1.1  mrg       if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
    133  1.1  mrg 	return 1;
    134  1.1  mrg     }
    135  1.1  mrg 
    136  1.1  mrg   return 0;
    137  1.1  mrg }
    138  1.1  mrg 
    139  1.1  mrg int
    140  1.1  mrg default_bitfield_may_trap_p (const_rtx x, unsigned flags)
    141  1.1  mrg {
    142  1.1  mrg   return 0;
    143  1.1  mrg }
    144  1.1  mrg 
    145  1.1  mrg machine_mode
    146  1.1  mrg default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
    147  1.1  mrg 			       machine_mode mode,
    148  1.1  mrg 			       int *punsignedp ATTRIBUTE_UNUSED,
    149  1.1  mrg 			       const_tree funtype ATTRIBUTE_UNUSED,
    150  1.1  mrg 			       int for_return ATTRIBUTE_UNUSED)
    151  1.1  mrg {
    152  1.1  mrg   if (type != NULL_TREE && for_return == 2)
    153  1.1  mrg     return promote_mode (type, mode, punsignedp);
    154  1.1  mrg   return mode;
    155  1.1  mrg }
    156  1.1  mrg 
    157  1.1  mrg machine_mode
    158  1.1  mrg default_promote_function_mode_always_promote (const_tree type,
    159  1.1  mrg 					      machine_mode mode,
    160  1.1  mrg 					      int *punsignedp,
    161  1.1  mrg 					      const_tree funtype ATTRIBUTE_UNUSED,
    162  1.1  mrg 					      int for_return ATTRIBUTE_UNUSED)
    163  1.1  mrg {
    164  1.1  mrg   return promote_mode (type, mode, punsignedp);
    165  1.1  mrg }
    166  1.1  mrg 
    167  1.1  mrg machine_mode
    168  1.1  mrg default_cc_modes_compatible (machine_mode m1, machine_mode m2)
    169  1.1  mrg {
    170  1.1  mrg   if (m1 == m2)
    171  1.1  mrg     return m1;
    172  1.1  mrg   return VOIDmode;
    173  1.1  mrg }
    174  1.1  mrg 
    175  1.1  mrg bool
    176  1.1  mrg default_return_in_memory (const_tree type,
    177  1.1  mrg 			  const_tree fntype ATTRIBUTE_UNUSED)
    178  1.1  mrg {
    179  1.1  mrg   return (TYPE_MODE (type) == BLKmode);
    180  1.1  mrg }
    181  1.1  mrg 
    182  1.1  mrg rtx
    183  1.1  mrg default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
    184  1.1  mrg 			    machine_mode mode ATTRIBUTE_UNUSED)
    185  1.1  mrg {
    186  1.1  mrg   return x;
    187  1.1  mrg }
    188  1.1  mrg 
    189  1.1  mrg bool
    190  1.1  mrg default_legitimize_address_displacement (rtx *, rtx *, poly_int64,
    191  1.1  mrg 					 machine_mode)
    192  1.1  mrg {
    193  1.1  mrg   return false;
    194  1.1  mrg }
    195  1.1  mrg 
    196  1.1  mrg bool
    197  1.1  mrg default_const_not_ok_for_debug_p (rtx x)
    198  1.1  mrg {
    199  1.1  mrg   if (GET_CODE (x) == UNSPEC)
    200  1.1  mrg     return true;
    201  1.1  mrg   return false;
    202  1.1  mrg }
    203  1.1  mrg 
    204  1.1  mrg rtx
    205  1.1  mrg default_expand_builtin_saveregs (void)
    206  1.1  mrg {
    207  1.1  mrg   error ("%<__builtin_saveregs%> not supported by this target");
    208  1.1  mrg   return const0_rtx;
    209  1.1  mrg }
    210  1.1  mrg 
    211  1.1  mrg void
    212  1.1  mrg default_setup_incoming_varargs (cumulative_args_t,
    213  1.1  mrg 				const function_arg_info &, int *, int)
    214  1.1  mrg {
    215  1.1  mrg }
    216  1.1  mrg 
    217  1.1  mrg /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE.  */
    218  1.1  mrg 
    219  1.1  mrg rtx
    220  1.1  mrg default_builtin_setjmp_frame_value (void)
    221  1.1  mrg {
    222  1.1  mrg   return virtual_stack_vars_rtx;
    223  1.1  mrg }
    224  1.1  mrg 
    225  1.1  mrg /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false.  */
    226  1.1  mrg 
    227  1.1  mrg bool
    228  1.1  mrg hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
    229  1.1  mrg {
    230  1.1  mrg   return false;
    231  1.1  mrg }
    232  1.1  mrg 
    233  1.1  mrg bool
    234  1.1  mrg default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
    235  1.1  mrg {
    236  1.1  mrg   return (targetm.calls.setup_incoming_varargs
    237  1.1  mrg 	  != default_setup_incoming_varargs);
    238  1.1  mrg }
    239  1.1  mrg 
    240  1.1  mrg scalar_int_mode
    241  1.1  mrg default_eh_return_filter_mode (void)
    242  1.1  mrg {
    243  1.1  mrg   return targetm.unwind_word_mode ();
    244  1.1  mrg }
    245  1.1  mrg 
    246  1.1  mrg scalar_int_mode
    247  1.1  mrg default_libgcc_cmp_return_mode (void)
    248  1.1  mrg {
    249  1.1  mrg   return word_mode;
    250  1.1  mrg }
    251  1.1  mrg 
    252  1.1  mrg scalar_int_mode
    253  1.1  mrg default_libgcc_shift_count_mode (void)
    254  1.1  mrg {
    255  1.1  mrg   return word_mode;
    256  1.1  mrg }
    257  1.1  mrg 
    258  1.1  mrg scalar_int_mode
    259  1.1  mrg default_unwind_word_mode (void)
    260  1.1  mrg {
    261  1.1  mrg   return word_mode;
    262  1.1  mrg }
    263  1.1  mrg 
    264  1.1  mrg /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK.  */
    265  1.1  mrg 
    266  1.1  mrg unsigned HOST_WIDE_INT
    267  1.1  mrg default_shift_truncation_mask (machine_mode mode)
    268  1.1  mrg {
    269  1.1  mrg   return SHIFT_COUNT_TRUNCATED ? GET_MODE_UNIT_BITSIZE (mode) - 1 : 0;
    270  1.1  mrg }
    271  1.1  mrg 
    272  1.1  mrg /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL.  */
    273  1.1  mrg 
    274  1.1  mrg unsigned int
    275  1.1  mrg default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
    276  1.1  mrg {
    277  1.1  mrg   return have_insn_for (DIV, mode) ? 3 : 2;
    278  1.1  mrg }
    279  1.1  mrg 
    280  1.1  mrg /* The default implementation of TARGET_MODE_REP_EXTENDED.  */
    281  1.1  mrg 
    282  1.1  mrg int
    283  1.1  mrg default_mode_rep_extended (scalar_int_mode, scalar_int_mode)
    284  1.1  mrg {
    285  1.1  mrg   return UNKNOWN;
    286  1.1  mrg }
    287  1.1  mrg 
    288  1.1  mrg /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true.  */
    289  1.1  mrg 
    290  1.1  mrg bool
    291  1.1  mrg hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
    292  1.1  mrg {
    293  1.1  mrg   return true;
    294  1.1  mrg }
    295  1.1  mrg 
    296  1.1  mrg /* Return machine mode for non-standard suffix
    297  1.1  mrg    or VOIDmode if non-standard suffixes are unsupported.  */
    298  1.1  mrg machine_mode
    299  1.1  mrg default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
    300  1.1  mrg {
    301  1.1  mrg   return VOIDmode;
    302  1.1  mrg }
    303  1.1  mrg 
    304  1.1  mrg /* The generic C++ ABI specifies this is a 64-bit value.  */
    305  1.1  mrg tree
    306  1.1  mrg default_cxx_guard_type (void)
    307  1.1  mrg {
    308  1.1  mrg   return long_long_integer_type_node;
    309  1.1  mrg }
    310  1.1  mrg 
    311  1.1  mrg /* Returns the size of the cookie to use when allocating an array
    312  1.1  mrg    whose elements have the indicated TYPE.  Assumes that it is already
    313  1.1  mrg    known that a cookie is needed.  */
    314  1.1  mrg 
    315  1.1  mrg tree
    316  1.1  mrg default_cxx_get_cookie_size (tree type)
    317  1.1  mrg {
    318  1.1  mrg   tree cookie_size;
    319  1.1  mrg 
    320  1.1  mrg   /* We need to allocate an additional max (sizeof (size_t), alignof
    321  1.1  mrg      (true_type)) bytes.  */
    322  1.1  mrg   tree sizetype_size;
    323  1.1  mrg   tree type_align;
    324  1.1  mrg 
    325  1.1  mrg   sizetype_size = size_in_bytes (sizetype);
    326  1.1  mrg   type_align = size_int (TYPE_ALIGN_UNIT (type));
    327  1.1  mrg   if (tree_int_cst_lt (type_align, sizetype_size))
    328  1.1  mrg     cookie_size = sizetype_size;
    329  1.1  mrg   else
    330  1.1  mrg     cookie_size = type_align;
    331  1.1  mrg 
    332  1.1  mrg   return cookie_size;
    333  1.1  mrg }
    334  1.1  mrg 
    335  1.1  mrg /* Return true if a parameter must be passed by reference.  This version
    336  1.1  mrg    of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK.  */
    337  1.1  mrg 
    338  1.1  mrg bool
    339  1.1  mrg hook_pass_by_reference_must_pass_in_stack (cumulative_args_t,
    340  1.1  mrg 					   const function_arg_info &arg)
    341  1.1  mrg {
    342  1.1  mrg   return targetm.calls.must_pass_in_stack (arg);
    343  1.1  mrg }
    344  1.1  mrg 
    345  1.1  mrg /* Return true if a parameter follows callee copies conventions.  This
    346  1.1  mrg    version of the hook is true for all named arguments.  */
    347  1.1  mrg 
    348  1.1  mrg bool
    349  1.1  mrg hook_callee_copies_named (cumulative_args_t, const function_arg_info &arg)
    350  1.1  mrg {
    351  1.1  mrg   return arg.named;
    352  1.1  mrg }
    353  1.1  mrg 
    354  1.1  mrg /* Emit to STREAM the assembler syntax for insn operand X.  */
    355  1.1  mrg 
    356  1.1  mrg void
    357  1.1  mrg default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
    358  1.1  mrg 		       int code ATTRIBUTE_UNUSED)
    359  1.1  mrg {
    360  1.1  mrg #ifdef PRINT_OPERAND
    361  1.1  mrg   PRINT_OPERAND (stream, x, code);
    362  1.1  mrg #else
    363  1.1  mrg   gcc_unreachable ();
    364  1.1  mrg #endif
    365  1.1  mrg }
    366  1.1  mrg 
    367  1.1  mrg /* Emit to STREAM the assembler syntax for an insn operand whose memory
    368  1.1  mrg    address is X.  */
    369  1.1  mrg 
    370  1.1  mrg void
    371  1.1  mrg default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
    372  1.1  mrg 			       machine_mode /*mode*/,
    373  1.1  mrg 			       rtx x ATTRIBUTE_UNUSED)
    374  1.1  mrg {
    375  1.1  mrg #ifdef PRINT_OPERAND_ADDRESS
    376  1.1  mrg   PRINT_OPERAND_ADDRESS (stream, x);
    377  1.1  mrg #else
    378  1.1  mrg   gcc_unreachable ();
    379  1.1  mrg #endif
    380  1.1  mrg }
    381  1.1  mrg 
    382  1.1  mrg /* Return true if CODE is a valid punctuation character for the
    383  1.1  mrg    `print_operand' hook.  */
    384  1.1  mrg 
    385  1.1  mrg bool
    386  1.1  mrg default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
    387  1.1  mrg {
    388  1.1  mrg #ifdef PRINT_OPERAND_PUNCT_VALID_P
    389  1.1  mrg   return PRINT_OPERAND_PUNCT_VALID_P (code);
    390  1.1  mrg #else
    391  1.1  mrg   return false;
    392  1.1  mrg #endif
    393  1.1  mrg }
    394  1.1  mrg 
    395  1.1  mrg /* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME.  */
    396  1.1  mrg tree
    397  1.1  mrg default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
    398  1.1  mrg {
    399  1.1  mrg   const char *skipped = name + (*name == '*' ? 1 : 0);
    400  1.1  mrg   const char *stripped = targetm.strip_name_encoding (skipped);
    401  1.1  mrg   if (*name != '*' && user_label_prefix[0])
    402  1.1  mrg     stripped = ACONCAT ((user_label_prefix, stripped, NULL));
    403  1.1  mrg   return get_identifier (stripped);
    404  1.1  mrg }
    405  1.1  mrg 
    406  1.1  mrg /* The default implementation of TARGET_TRANSLATE_MODE_ATTRIBUTE.  */
    407  1.1  mrg 
    408  1.1  mrg machine_mode
    409  1.1  mrg default_translate_mode_attribute (machine_mode mode)
    410  1.1  mrg {
    411  1.1  mrg   return mode;
    412  1.1  mrg }
    413  1.1  mrg 
    414  1.1  mrg /* True if MODE is valid for the target.  By "valid", we mean able to
    415  1.1  mrg    be manipulated in non-trivial ways.  In particular, this means all
    416  1.1  mrg    the arithmetic is supported.
    417  1.1  mrg 
    418  1.1  mrg    By default we guess this means that any C type is supported.  If
    419  1.1  mrg    we can't map the mode back to a type that would be available in C,
    420  1.1  mrg    then reject it.  Special case, here, is the double-word arithmetic
    421  1.1  mrg    supported by optabs.cc.  */
    422  1.1  mrg 
    423  1.1  mrg bool
    424  1.1  mrg default_scalar_mode_supported_p (scalar_mode mode)
    425  1.1  mrg {
    426  1.1  mrg   int precision = GET_MODE_PRECISION (mode);
    427  1.1  mrg 
    428  1.1  mrg   switch (GET_MODE_CLASS (mode))
    429  1.1  mrg     {
    430  1.1  mrg     case MODE_PARTIAL_INT:
    431  1.1  mrg     case MODE_INT:
    432  1.1  mrg       if (precision == CHAR_TYPE_SIZE)
    433  1.1  mrg 	return true;
    434  1.1  mrg       if (precision == SHORT_TYPE_SIZE)
    435  1.1  mrg 	return true;
    436  1.1  mrg       if (precision == INT_TYPE_SIZE)
    437  1.1  mrg 	return true;
    438  1.1  mrg       if (precision == LONG_TYPE_SIZE)
    439  1.1  mrg 	return true;
    440  1.1  mrg       if (precision == LONG_LONG_TYPE_SIZE)
    441  1.1  mrg 	return true;
    442  1.1  mrg       if (precision == 2 * BITS_PER_WORD)
    443  1.1  mrg 	return true;
    444  1.1  mrg       return false;
    445  1.1  mrg 
    446  1.1  mrg     case MODE_FLOAT:
    447  1.1  mrg       if (precision == FLOAT_TYPE_SIZE)
    448  1.1  mrg 	return true;
    449  1.1  mrg       if (precision == DOUBLE_TYPE_SIZE)
    450  1.1  mrg 	return true;
    451  1.1  mrg       if (precision == LONG_DOUBLE_TYPE_SIZE)
    452  1.1  mrg 	return true;
    453  1.1  mrg       return false;
    454  1.1  mrg 
    455  1.1  mrg     case MODE_DECIMAL_FLOAT:
    456  1.1  mrg     case MODE_FRACT:
    457  1.1  mrg     case MODE_UFRACT:
    458  1.1  mrg     case MODE_ACCUM:
    459  1.1  mrg     case MODE_UACCUM:
    460  1.1  mrg       return false;
    461  1.1  mrg 
    462  1.1  mrg     default:
    463  1.1  mrg       gcc_unreachable ();
    464  1.1  mrg     }
    465  1.1  mrg }
    466  1.1  mrg 
    467  1.1  mrg /* Return true if libgcc supports floating-point mode MODE (known to
    468  1.1  mrg    be supported as a scalar mode).  */
    469  1.1  mrg 
    470  1.1  mrg bool
    471  1.1  mrg default_libgcc_floating_mode_supported_p (scalar_float_mode mode)
    472  1.1  mrg {
    473  1.1  mrg   switch (mode)
    474  1.1  mrg     {
    475  1.1  mrg #ifdef HAVE_SFmode
    476  1.1  mrg     case E_SFmode:
    477  1.1  mrg #endif
    478  1.1  mrg #ifdef HAVE_DFmode
    479  1.1  mrg     case E_DFmode:
    480  1.1  mrg #endif
    481  1.1  mrg #ifdef HAVE_XFmode
    482  1.1  mrg     case E_XFmode:
    483  1.1  mrg #endif
    484  1.1  mrg #ifdef HAVE_TFmode
    485  1.1  mrg     case E_TFmode:
    486  1.1  mrg #endif
    487  1.1  mrg       return true;
    488  1.1  mrg 
    489  1.1  mrg     default:
    490  1.1  mrg       return false;
    491  1.1  mrg     }
    492  1.1  mrg }
    493  1.1  mrg 
    494  1.1  mrg /* Return the machine mode to use for the type _FloatN, if EXTENDED is
    495  1.1  mrg    false, or _FloatNx, if EXTENDED is true, or VOIDmode if not
    496  1.1  mrg    supported.  */
    497  1.1  mrg opt_scalar_float_mode
    498  1.1  mrg default_floatn_mode (int n, bool extended)
    499  1.1  mrg {
    500  1.1  mrg   if (extended)
    501  1.1  mrg     {
    502  1.1  mrg       opt_scalar_float_mode cand1, cand2;
    503  1.1  mrg       scalar_float_mode mode;
    504  1.1  mrg       switch (n)
    505  1.1  mrg 	{
    506  1.1  mrg 	case 32:
    507  1.1  mrg #ifdef HAVE_DFmode
    508  1.1  mrg 	  cand1 = DFmode;
    509  1.1  mrg #endif
    510  1.1  mrg 	  break;
    511  1.1  mrg 
    512  1.1  mrg 	case 64:
    513  1.1  mrg #ifdef HAVE_XFmode
    514  1.1  mrg 	  cand1 = XFmode;
    515  1.1  mrg #endif
    516  1.1  mrg #ifdef HAVE_TFmode
    517  1.1  mrg 	  cand2 = TFmode;
    518  1.1  mrg #endif
    519  1.1  mrg 	  break;
    520  1.1  mrg 
    521  1.1  mrg 	case 128:
    522  1.1  mrg 	  break;
    523  1.1  mrg 
    524  1.1  mrg 	default:
    525  1.1  mrg 	  /* Those are the only valid _FloatNx types.  */
    526  1.1  mrg 	  gcc_unreachable ();
    527  1.1  mrg 	}
    528  1.1  mrg       if (cand1.exists (&mode)
    529  1.1  mrg 	  && REAL_MODE_FORMAT (mode)->ieee_bits > n
    530  1.1  mrg 	  && targetm.scalar_mode_supported_p (mode)
    531  1.1  mrg 	  && targetm.libgcc_floating_mode_supported_p (mode))
    532  1.1  mrg 	return cand1;
    533  1.1  mrg       if (cand2.exists (&mode)
    534  1.1  mrg 	  && REAL_MODE_FORMAT (mode)->ieee_bits > n
    535  1.1  mrg 	  && targetm.scalar_mode_supported_p (mode)
    536  1.1  mrg 	  && targetm.libgcc_floating_mode_supported_p (mode))
    537  1.1  mrg 	return cand2;
    538  1.1  mrg     }
    539  1.1  mrg   else
    540  1.1  mrg     {
    541  1.1  mrg       opt_scalar_float_mode cand;
    542  1.1  mrg       scalar_float_mode mode;
    543  1.1  mrg       switch (n)
    544  1.1  mrg 	{
    545  1.1  mrg 	case 16:
    546  1.1  mrg 	  /* Always enable _Float16 if we have basic support for the mode.
    547  1.1  mrg 	     Targets can control the range and precision of operations on
    548  1.1  mrg 	     the _Float16 type using TARGET_C_EXCESS_PRECISION.  */
    549  1.1  mrg #ifdef HAVE_HFmode
    550  1.1  mrg 	  cand = HFmode;
    551  1.1  mrg #endif
    552  1.1  mrg 	  break;
    553  1.1  mrg 
    554  1.1  mrg 	case 32:
    555  1.1  mrg #ifdef HAVE_SFmode
    556  1.1  mrg 	  cand = SFmode;
    557  1.1  mrg #endif
    558  1.1  mrg 	  break;
    559  1.1  mrg 
    560  1.1  mrg 	case 64:
    561  1.1  mrg #ifdef HAVE_DFmode
    562  1.1  mrg 	  cand = DFmode;
    563  1.1  mrg #endif
    564  1.1  mrg 	  break;
    565  1.1  mrg 
    566  1.1  mrg 	case 128:
    567  1.1  mrg #ifdef HAVE_TFmode
    568  1.1  mrg 	  cand = TFmode;
    569  1.1  mrg #endif
    570  1.1  mrg 	  break;
    571  1.1  mrg 
    572  1.1  mrg 	default:
    573  1.1  mrg 	  break;
    574  1.1  mrg 	}
    575  1.1  mrg       if (cand.exists (&mode)
    576  1.1  mrg 	  && REAL_MODE_FORMAT (mode)->ieee_bits == n
    577  1.1  mrg 	  && targetm.scalar_mode_supported_p (mode)
    578  1.1  mrg 	  && targetm.libgcc_floating_mode_supported_p (mode))
    579  1.1  mrg 	return cand;
    580  1.1  mrg     }
    581  1.1  mrg   return opt_scalar_float_mode ();
    582  1.1  mrg }
    583  1.1  mrg 
    584  1.1  mrg /* Define this to return true if the _Floatn and _Floatnx built-in functions
    585  1.1  mrg    should implicitly enable the built-in function without the __builtin_ prefix
    586  1.1  mrg    in addition to the normal built-in function with the __builtin_ prefix.  The
    587  1.1  mrg    default is to only enable built-in functions without the __builtin_ prefix
    588  1.1  mrg    for the GNU C langauge.  The argument FUNC is the enum builtin_in_function
    589  1.1  mrg    id of the function to be enabled.  */
    590  1.1  mrg 
    591  1.1  mrg bool
    592  1.1  mrg default_floatn_builtin_p (int func ATTRIBUTE_UNUSED)
    593  1.1  mrg {
    594  1.1  mrg   static bool first_time_p = true;
    595  1.1  mrg   static bool c_or_objective_c;
    596  1.1  mrg 
    597  1.1  mrg   if (first_time_p)
    598  1.1  mrg     {
    599  1.1  mrg       first_time_p = false;
    600  1.1  mrg       c_or_objective_c = lang_GNU_C () || lang_GNU_OBJC ();
    601  1.1  mrg     }
    602  1.1  mrg 
    603  1.1  mrg   return c_or_objective_c;
    604  1.1  mrg }
    605  1.1  mrg 
    606  1.1  mrg /* Make some target macros useable by target-independent code.  */
    607  1.1  mrg bool
    608  1.1  mrg targhook_words_big_endian (void)
    609  1.1  mrg {
    610  1.1  mrg   return !!WORDS_BIG_ENDIAN;
    611  1.1  mrg }
    612  1.1  mrg 
    613  1.1  mrg bool
    614  1.1  mrg targhook_float_words_big_endian (void)
    615  1.1  mrg {
    616  1.1  mrg   return !!FLOAT_WORDS_BIG_ENDIAN;
    617  1.1  mrg }
    618  1.1  mrg 
    619  1.1  mrg /* True if the target supports floating-point exceptions and rounding
    620  1.1  mrg    modes.  */
    621  1.1  mrg 
    622  1.1  mrg bool
    623  1.1  mrg default_float_exceptions_rounding_supported_p (void)
    624  1.1  mrg {
    625  1.1  mrg #ifdef HAVE_adddf3
    626  1.1  mrg   return HAVE_adddf3;
    627  1.1  mrg #else
    628  1.1  mrg   return false;
    629  1.1  mrg #endif
    630  1.1  mrg }
    631  1.1  mrg 
    632  1.1  mrg /* True if the target supports decimal floating point.  */
    633  1.1  mrg 
    634  1.1  mrg bool
    635  1.1  mrg default_decimal_float_supported_p (void)
    636  1.1  mrg {
    637  1.1  mrg   return ENABLE_DECIMAL_FLOAT;
    638  1.1  mrg }
    639  1.1  mrg 
    640  1.1  mrg /* True if the target supports fixed-point arithmetic.  */
    641  1.1  mrg 
    642  1.1  mrg bool
    643  1.1  mrg default_fixed_point_supported_p (void)
    644  1.1  mrg {
    645  1.1  mrg   return ENABLE_FIXED_POINT;
    646  1.1  mrg }
    647  1.1  mrg 
    648  1.1  mrg /* True if the target supports GNU indirect functions.  */
    649  1.1  mrg 
    650  1.1  mrg bool
    651  1.1  mrg default_has_ifunc_p (void)
    652  1.1  mrg {
    653  1.1  mrg   return HAVE_GNU_INDIRECT_FUNCTION;
    654  1.1  mrg }
    655  1.1  mrg 
    656  1.1  mrg /* Return true if we predict the loop LOOP will be transformed to a
    657  1.1  mrg    low-overhead loop, otherwise return false.
    658  1.1  mrg 
    659  1.1  mrg    By default, false is returned, as this hook's applicability should be
    660  1.1  mrg    verified for each target.  Target maintainers should re-define the hook
    661  1.1  mrg    if the target can take advantage of it.  */
    662  1.1  mrg 
    663  1.1  mrg bool
    664  1.1  mrg default_predict_doloop_p (class loop *loop ATTRIBUTE_UNUSED)
    665  1.1  mrg {
    666  1.1  mrg   return false;
    667  1.1  mrg }
    668  1.1  mrg 
    669  1.1  mrg /* By default, just use the input MODE itself.  */
    670  1.1  mrg 
    671  1.1  mrg machine_mode
    672  1.1  mrg default_preferred_doloop_mode (machine_mode mode)
    673  1.1  mrg {
    674  1.1  mrg   return mode;
    675  1.1  mrg }
    676  1.1  mrg 
    677  1.1  mrg /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
    678  1.1  mrg    an error message.
    679  1.1  mrg 
    680  1.1  mrg    This function checks whether a given INSN is valid within a low-overhead
    681  1.1  mrg    loop.  If INSN is invalid it returns the reason for that, otherwise it
    682  1.1  mrg    returns NULL. A called function may clobber any special registers required
    683  1.1  mrg    for low-overhead looping. Additionally, some targets (eg, PPC) use the count
    684  1.1  mrg    register for branch on table instructions. We reject the doloop pattern in
    685  1.1  mrg    these cases.  */
    686  1.1  mrg 
    687  1.1  mrg const char *
    688  1.1  mrg default_invalid_within_doloop (const rtx_insn *insn)
    689  1.1  mrg {
    690  1.1  mrg   if (CALL_P (insn))
    691  1.1  mrg     return "Function call in loop.";
    692  1.1  mrg 
    693  1.1  mrg   if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn))
    694  1.1  mrg     return "Computed branch in the loop.";
    695  1.1  mrg 
    696  1.1  mrg   return NULL;
    697  1.1  mrg }
    698  1.1  mrg 
    699  1.1  mrg /* Mapping of builtin functions to vectorized variants.  */
    700  1.1  mrg 
    701  1.1  mrg tree
    702  1.1  mrg default_builtin_vectorized_function (unsigned int, tree, tree)
    703  1.1  mrg {
    704  1.1  mrg   return NULL_TREE;
    705  1.1  mrg }
    706  1.1  mrg 
    707  1.1  mrg /* Mapping of target builtin functions to vectorized variants.  */
    708  1.1  mrg 
    709  1.1  mrg tree
    710  1.1  mrg default_builtin_md_vectorized_function (tree, tree, tree)
    711  1.1  mrg {
    712  1.1  mrg   return NULL_TREE;
    713  1.1  mrg }
    714  1.1  mrg 
    715  1.1  mrg /* Default vectorizer cost model values.  */
    716  1.1  mrg 
    717  1.1  mrg int
    718  1.1  mrg default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
    719  1.1  mrg                                     tree vectype,
    720  1.1  mrg                                     int misalign ATTRIBUTE_UNUSED)
    721  1.1  mrg {
    722  1.1  mrg   switch (type_of_cost)
    723  1.1  mrg     {
    724  1.1  mrg       case scalar_stmt:
    725  1.1  mrg       case scalar_load:
    726  1.1  mrg       case scalar_store:
    727  1.1  mrg       case vector_stmt:
    728  1.1  mrg       case vector_load:
    729  1.1  mrg       case vector_store:
    730  1.1  mrg       case vec_to_scalar:
    731  1.1  mrg       case scalar_to_vec:
    732  1.1  mrg       case cond_branch_not_taken:
    733  1.1  mrg       case vec_perm:
    734  1.1  mrg       case vec_promote_demote:
    735  1.1  mrg         return 1;
    736  1.1  mrg 
    737  1.1  mrg       case unaligned_load:
    738  1.1  mrg       case unaligned_store:
    739  1.1  mrg         return 2;
    740  1.1  mrg 
    741  1.1  mrg       case cond_branch_taken:
    742  1.1  mrg         return 3;
    743  1.1  mrg 
    744  1.1  mrg       case vec_construct:
    745  1.1  mrg 	return estimated_poly_value (TYPE_VECTOR_SUBPARTS (vectype)) - 1;
    746  1.1  mrg 
    747  1.1  mrg       default:
    748  1.1  mrg         gcc_unreachable ();
    749  1.1  mrg     }
    750  1.1  mrg }
    751  1.1  mrg 
    752  1.1  mrg /* Reciprocal.  */
    753  1.1  mrg 
    754  1.1  mrg tree
    755  1.1  mrg default_builtin_reciprocal (tree)
    756  1.1  mrg {
    757  1.1  mrg   return NULL_TREE;
    758  1.1  mrg }
    759  1.1  mrg 
    760  1.1  mrg bool
    761  1.1  mrg hook_bool_CUMULATIVE_ARGS_arg_info_false (cumulative_args_t,
    762  1.1  mrg 					  const function_arg_info &)
    763  1.1  mrg {
    764  1.1  mrg   return false;
    765  1.1  mrg }
    766  1.1  mrg 
    767  1.1  mrg bool
    768  1.1  mrg hook_bool_CUMULATIVE_ARGS_arg_info_true (cumulative_args_t,
    769  1.1  mrg 					 const function_arg_info &)
    770  1.1  mrg {
    771  1.1  mrg   return true;
    772  1.1  mrg }
    773  1.1  mrg 
    774  1.1  mrg int
    775  1.1  mrg hook_int_CUMULATIVE_ARGS_arg_info_0 (cumulative_args_t,
    776  1.1  mrg 				     const function_arg_info &)
    777  1.1  mrg {
    778  1.1  mrg   return 0;
    779  1.1  mrg }
    780  1.1  mrg 
    781  1.1  mrg void
    782  1.1  mrg hook_void_CUMULATIVE_ARGS_tree (cumulative_args_t ca ATTRIBUTE_UNUSED,
    783  1.1  mrg 				tree ATTRIBUTE_UNUSED)
    784  1.1  mrg {
    785  1.1  mrg }
    786  1.1  mrg 
    787  1.1  mrg /* Default implementation of TARGET_PUSH_ARGUMENT.  */
    788  1.1  mrg 
    789  1.1  mrg bool
    790  1.1  mrg default_push_argument (unsigned int)
    791  1.1  mrg {
    792  1.1  mrg #ifdef PUSH_ROUNDING
    793  1.1  mrg   return !ACCUMULATE_OUTGOING_ARGS;
    794  1.1  mrg #else
    795  1.1  mrg   return false;
    796  1.1  mrg #endif
    797  1.1  mrg }
    798  1.1  mrg 
    799  1.1  mrg void
    800  1.1  mrg default_function_arg_advance (cumulative_args_t, const function_arg_info &)
    801  1.1  mrg {
    802  1.1  mrg   gcc_unreachable ();
    803  1.1  mrg }
    804  1.1  mrg 
    805  1.1  mrg /* Default implementation of TARGET_FUNCTION_ARG_OFFSET.  */
    806  1.1  mrg 
    807  1.1  mrg HOST_WIDE_INT
    808  1.1  mrg default_function_arg_offset (machine_mode, const_tree)
    809  1.1  mrg {
    810  1.1  mrg   return 0;
    811  1.1  mrg }
    812  1.1  mrg 
    813  1.1  mrg /* Default implementation of TARGET_FUNCTION_ARG_PADDING: usually pad
    814  1.1  mrg    upward, but pad short args downward on big-endian machines.  */
    815  1.1  mrg 
    816  1.1  mrg pad_direction
    817  1.1  mrg default_function_arg_padding (machine_mode mode, const_tree type)
    818  1.1  mrg {
    819  1.1  mrg   if (!BYTES_BIG_ENDIAN)
    820  1.1  mrg     return PAD_UPWARD;
    821  1.1  mrg 
    822  1.1  mrg   unsigned HOST_WIDE_INT size;
    823  1.1  mrg   if (mode == BLKmode)
    824  1.1  mrg     {
    825  1.1  mrg       if (!type || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
    826  1.1  mrg 	return PAD_UPWARD;
    827  1.1  mrg       size = int_size_in_bytes (type);
    828  1.1  mrg     }
    829  1.1  mrg   else
    830  1.1  mrg     /* Targets with variable-sized modes must override this hook
    831  1.1  mrg        and handle variable-sized modes explicitly.  */
    832  1.1  mrg     size = GET_MODE_SIZE (mode).to_constant ();
    833  1.1  mrg 
    834  1.1  mrg   if (size < (PARM_BOUNDARY / BITS_PER_UNIT))
    835  1.1  mrg     return PAD_DOWNWARD;
    836  1.1  mrg 
    837  1.1  mrg   return PAD_UPWARD;
    838  1.1  mrg }
    839  1.1  mrg 
    840  1.1  mrg rtx
    841  1.1  mrg default_function_arg (cumulative_args_t, const function_arg_info &)
    842  1.1  mrg {
    843  1.1  mrg   gcc_unreachable ();
    844  1.1  mrg }
    845  1.1  mrg 
    846  1.1  mrg rtx
    847  1.1  mrg default_function_incoming_arg (cumulative_args_t, const function_arg_info &)
    848  1.1  mrg {
    849  1.1  mrg   gcc_unreachable ();
    850  1.1  mrg }
    851  1.1  mrg 
    852  1.1  mrg unsigned int
    853  1.1  mrg default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
    854  1.1  mrg 			       const_tree type ATTRIBUTE_UNUSED)
    855  1.1  mrg {
    856  1.1  mrg   return PARM_BOUNDARY;
    857  1.1  mrg }
    858  1.1  mrg 
    859  1.1  mrg unsigned int
    860  1.1  mrg default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
    861  1.1  mrg 				     const_tree type ATTRIBUTE_UNUSED)
    862  1.1  mrg {
    863  1.1  mrg   return PARM_BOUNDARY;
    864  1.1  mrg }
    865  1.1  mrg 
    866  1.1  mrg void
    867  1.1  mrg hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
    868  1.1  mrg {
    869  1.1  mrg }
    870  1.1  mrg 
    871  1.1  mrg const char *
    872  1.1  mrg hook_invalid_arg_for_unprototyped_fn (
    873  1.1  mrg 	const_tree typelist ATTRIBUTE_UNUSED,
    874  1.1  mrg 	const_tree funcdecl ATTRIBUTE_UNUSED,
    875  1.1  mrg 	const_tree val ATTRIBUTE_UNUSED)
    876  1.1  mrg {
    877  1.1  mrg   return NULL;
    878  1.1  mrg }
    879  1.1  mrg 
    880  1.1  mrg /* Initialize the stack protection decls.  */
    881  1.1  mrg 
    882  1.1  mrg /* Stack protection related decls living in libgcc.  */
    883  1.1  mrg static GTY(()) tree stack_chk_guard_decl;
    884  1.1  mrg 
    885  1.1  mrg tree
    886  1.1  mrg default_stack_protect_guard (void)
    887  1.1  mrg {
    888  1.1  mrg   tree t = stack_chk_guard_decl;
    889  1.1  mrg 
    890  1.1  mrg   if (t == NULL)
    891  1.1  mrg     {
    892  1.1  mrg       rtx x;
    893  1.1  mrg 
    894  1.1  mrg       t = build_decl (UNKNOWN_LOCATION,
    895  1.1  mrg 		      VAR_DECL, get_identifier ("__stack_chk_guard"),
    896  1.1  mrg 		      ptr_type_node);
    897  1.1  mrg       TREE_STATIC (t) = 1;
    898  1.1  mrg       TREE_PUBLIC (t) = 1;
    899  1.1  mrg       DECL_EXTERNAL (t) = 1;
    900  1.1  mrg       TREE_USED (t) = 1;
    901  1.1  mrg       TREE_THIS_VOLATILE (t) = 1;
    902  1.1  mrg       DECL_ARTIFICIAL (t) = 1;
    903  1.1  mrg       DECL_IGNORED_P (t) = 1;
    904  1.1  mrg 
    905  1.1  mrg       /* Do not share RTL as the declaration is visible outside of
    906  1.1  mrg 	 current function.  */
    907  1.1  mrg       x = DECL_RTL (t);
    908  1.1  mrg       RTX_FLAG (x, used) = 1;
    909  1.1  mrg 
    910  1.1  mrg       stack_chk_guard_decl = t;
    911  1.1  mrg     }
    912  1.1  mrg 
    913  1.1  mrg   return t;
    914  1.1  mrg }
    915  1.1  mrg 
    916  1.1  mrg static GTY(()) tree stack_chk_fail_decl;
    917  1.1  mrg 
    918  1.1  mrg tree
    919  1.1  mrg default_external_stack_protect_fail (void)
    920  1.1  mrg {
    921  1.1  mrg   tree t = stack_chk_fail_decl;
    922  1.1  mrg 
    923  1.1  mrg   if (t == NULL_TREE)
    924  1.1  mrg     {
    925  1.1  mrg       t = build_function_type_list (void_type_node, NULL_TREE);
    926  1.1  mrg       t = build_decl (UNKNOWN_LOCATION,
    927  1.1  mrg 		      FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
    928  1.1  mrg       TREE_STATIC (t) = 1;
    929  1.1  mrg       TREE_PUBLIC (t) = 1;
    930  1.1  mrg       DECL_EXTERNAL (t) = 1;
    931  1.1  mrg       TREE_USED (t) = 1;
    932  1.1  mrg       TREE_THIS_VOLATILE (t) = 1;
    933  1.1  mrg       TREE_NOTHROW (t) = 1;
    934  1.1  mrg       DECL_ARTIFICIAL (t) = 1;
    935  1.1  mrg       DECL_IGNORED_P (t) = 1;
    936  1.1  mrg       DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
    937  1.1  mrg       DECL_VISIBILITY_SPECIFIED (t) = 1;
    938  1.1  mrg 
    939  1.1  mrg       stack_chk_fail_decl = t;
    940  1.1  mrg     }
    941  1.1  mrg 
    942  1.1  mrg   return build_call_expr (t, 0);
    943  1.1  mrg }
    944  1.1  mrg 
    945  1.1  mrg tree
    946  1.1  mrg default_hidden_stack_protect_fail (void)
    947  1.1  mrg {
    948  1.1  mrg #ifndef HAVE_GAS_HIDDEN
    949  1.1  mrg   return default_external_stack_protect_fail ();
    950  1.1  mrg #else
    951  1.1  mrg   tree t = stack_chk_fail_decl;
    952  1.1  mrg 
    953  1.1  mrg   if (!flag_pic)
    954  1.1  mrg     return default_external_stack_protect_fail ();
    955  1.1  mrg 
    956  1.1  mrg   if (t == NULL_TREE)
    957  1.1  mrg     {
    958  1.1  mrg       t = build_function_type_list (void_type_node, NULL_TREE);
    959  1.1  mrg       t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
    960  1.1  mrg 		      get_identifier ("__stack_chk_fail_local"), t);
    961  1.1  mrg       TREE_STATIC (t) = 1;
    962  1.1  mrg       TREE_PUBLIC (t) = 1;
    963  1.1  mrg       DECL_EXTERNAL (t) = 1;
    964  1.1  mrg       TREE_USED (t) = 1;
    965  1.1  mrg       TREE_THIS_VOLATILE (t) = 1;
    966  1.1  mrg       TREE_NOTHROW (t) = 1;
    967  1.1  mrg       DECL_ARTIFICIAL (t) = 1;
    968  1.1  mrg       DECL_IGNORED_P (t) = 1;
    969  1.1  mrg       DECL_VISIBILITY_SPECIFIED (t) = 1;
    970  1.1  mrg #if 1
    971  1.1  mrg       /*
    972  1.1  mrg        * This is a hack:
    973  1.1  mrg        * It appears that our gas does not generate @PLT for hidden
    974  1.1  mrg        * symbols. It could be that we need a newer version, or that
    975  1.1  mrg        * this local function is handled differently on linux.
    976  1.1  mrg        */
    977  1.1  mrg       DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
    978  1.1  mrg #else
    979  1.1  mrg       DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
    980  1.1  mrg #endif
    981  1.1  mrg 
    982  1.1  mrg       stack_chk_fail_decl = t;
    983  1.1  mrg     }
    984  1.1  mrg 
    985  1.1  mrg   return build_call_expr (t, 0);
    986  1.1  mrg #endif
    987  1.1  mrg }
    988  1.1  mrg 
    989  1.1  mrg bool
    990  1.1  mrg hook_bool_const_rtx_commutative_p (const_rtx x,
    991  1.1  mrg 				   int outer_code ATTRIBUTE_UNUSED)
    992  1.1  mrg {
    993  1.1  mrg   return COMMUTATIVE_P (x);
    994  1.1  mrg }
    995  1.1  mrg 
    996  1.1  mrg rtx
    997  1.1  mrg default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
    998  1.1  mrg 			const_tree fn_decl_or_type,
    999  1.1  mrg 			bool outgoing ATTRIBUTE_UNUSED)
   1000  1.1  mrg {
   1001  1.1  mrg   /* The old interface doesn't handle receiving the function type.  */
   1002  1.1  mrg   if (fn_decl_or_type
   1003  1.1  mrg       && !DECL_P (fn_decl_or_type))
   1004  1.1  mrg     fn_decl_or_type = NULL;
   1005  1.1  mrg 
   1006  1.1  mrg #ifdef FUNCTION_VALUE
   1007  1.1  mrg   return FUNCTION_VALUE (ret_type, fn_decl_or_type);
   1008  1.1  mrg #else
   1009  1.1  mrg   gcc_unreachable ();
   1010  1.1  mrg #endif
   1011  1.1  mrg }
   1012  1.1  mrg 
   1013  1.1  mrg rtx
   1014  1.1  mrg default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
   1015  1.1  mrg 		       const_rtx fun ATTRIBUTE_UNUSED)
   1016  1.1  mrg {
   1017  1.1  mrg #ifdef LIBCALL_VALUE
   1018  1.1  mrg   return LIBCALL_VALUE (MACRO_MODE (mode));
   1019  1.1  mrg #else
   1020  1.1  mrg   gcc_unreachable ();
   1021  1.1  mrg #endif
   1022  1.1  mrg }
   1023  1.1  mrg 
   1024  1.1  mrg /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P.  */
   1025  1.1  mrg 
   1026  1.1  mrg bool
   1027  1.1  mrg default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
   1028  1.1  mrg {
   1029  1.1  mrg #ifdef FUNCTION_VALUE_REGNO_P
   1030  1.1  mrg   return FUNCTION_VALUE_REGNO_P (regno);
   1031  1.1  mrg #else
   1032  1.1  mrg   gcc_unreachable ();
   1033  1.1  mrg #endif
   1034  1.1  mrg }
   1035  1.1  mrg 
   1036  1.1  mrg /* Choose the mode and rtx to use to zero REGNO, storing tem in PMODE and
   1037  1.1  mrg    PREGNO_RTX and returning TRUE if successful, otherwise returning FALSE.  If
   1038  1.1  mrg    the natural mode for REGNO doesn't work, attempt to group it with subsequent
   1039  1.1  mrg    adjacent registers set in TOZERO.  */
   1040  1.1  mrg 
   1041  1.1  mrg static inline bool
   1042  1.1  mrg zcur_select_mode_rtx (unsigned int regno, machine_mode *pmode,
   1043  1.1  mrg 		      rtx *pregno_rtx, HARD_REG_SET tozero)
   1044  1.1  mrg {
   1045  1.1  mrg   rtx regno_rtx = regno_reg_rtx[regno];
   1046  1.1  mrg   machine_mode mode = GET_MODE (regno_rtx);
   1047  1.1  mrg 
   1048  1.1  mrg   /* If the natural mode doesn't work, try some wider mode.  */
   1049  1.1  mrg   if (!targetm.hard_regno_mode_ok (regno, mode))
   1050  1.1  mrg     {
   1051  1.1  mrg       bool found = false;
   1052  1.1  mrg       for (int nregs = 2;
   1053  1.1  mrg 	   !found && nregs <= hard_regno_max_nregs
   1054  1.1  mrg 	     && regno + nregs <= FIRST_PSEUDO_REGISTER
   1055  1.1  mrg 	     && TEST_HARD_REG_BIT (tozero,
   1056  1.1  mrg 				   regno + nregs - 1);
   1057  1.1  mrg 	   nregs++)
   1058  1.1  mrg 	{
   1059  1.1  mrg 	  mode = choose_hard_reg_mode (regno, nregs, 0);
   1060  1.1  mrg 	  if (mode == E_VOIDmode)
   1061  1.1  mrg 	    continue;
   1062  1.1  mrg 	  gcc_checking_assert (targetm.hard_regno_mode_ok (regno, mode));
   1063  1.1  mrg 	  regno_rtx = gen_rtx_REG (mode, regno);
   1064  1.1  mrg 	  found = true;
   1065  1.1  mrg 	}
   1066  1.1  mrg       if (!found)
   1067  1.1  mrg 	return false;
   1068  1.1  mrg     }
   1069  1.1  mrg 
   1070  1.1  mrg   *pmode = mode;
   1071  1.1  mrg   *pregno_rtx = regno_rtx;
   1072  1.1  mrg   return true;
   1073  1.1  mrg }
   1074  1.1  mrg 
   1075  1.1  mrg /* The default hook for TARGET_ZERO_CALL_USED_REGS.  */
   1076  1.1  mrg 
   1077  1.1  mrg HARD_REG_SET
   1078  1.1  mrg default_zero_call_used_regs (HARD_REG_SET need_zeroed_hardregs)
   1079  1.1  mrg {
   1080  1.1  mrg   gcc_assert (!hard_reg_set_empty_p (need_zeroed_hardregs));
   1081  1.1  mrg 
   1082  1.1  mrg   HARD_REG_SET failed;
   1083  1.1  mrg   CLEAR_HARD_REG_SET (failed);
   1084  1.1  mrg   bool progress = false;
   1085  1.1  mrg 
   1086  1.1  mrg   /* First, try to zero each register in need_zeroed_hardregs by
   1087  1.1  mrg      loading a zero into it, taking note of any failures in
   1088  1.1  mrg      FAILED.  */
   1089  1.1  mrg   for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
   1090  1.1  mrg     if (TEST_HARD_REG_BIT (need_zeroed_hardregs, regno))
   1091  1.1  mrg       {
   1092  1.1  mrg 	rtx_insn *last_insn = get_last_insn ();
   1093  1.1  mrg 	rtx regno_rtx;
   1094  1.1  mrg 	machine_mode mode;
   1095  1.1  mrg 
   1096  1.1  mrg 	if (!zcur_select_mode_rtx (regno, &mode, &regno_rtx,
   1097  1.1  mrg 				   need_zeroed_hardregs))
   1098  1.1  mrg 	  {
   1099  1.1  mrg 	    SET_HARD_REG_BIT (failed, regno);
   1100  1.1  mrg 	    continue;
   1101  1.1  mrg 	  }
   1102  1.1  mrg 
   1103  1.1  mrg 	rtx zero = CONST0_RTX (mode);
   1104  1.1  mrg 	rtx_insn *insn = emit_move_insn (regno_rtx, zero);
   1105  1.1  mrg 	if (!valid_insn_p (insn))
   1106  1.1  mrg 	  {
   1107  1.1  mrg 	    SET_HARD_REG_BIT (failed, regno);
   1108  1.1  mrg 	    delete_insns_since (last_insn);
   1109  1.1  mrg 	  }
   1110  1.1  mrg 	else
   1111  1.1  mrg 	  {
   1112  1.1  mrg 	    progress = true;
   1113  1.1  mrg 	    regno += hard_regno_nregs (regno, mode) - 1;
   1114  1.1  mrg 	  }
   1115  1.1  mrg       }
   1116  1.1  mrg 
   1117  1.1  mrg   /* Now retry with copies from zeroed registers, as long as we've
   1118  1.1  mrg      made some PROGRESS, and registers remain to be zeroed in
   1119  1.1  mrg      FAILED.  */
   1120  1.1  mrg   while (progress && !hard_reg_set_empty_p (failed))
   1121  1.1  mrg     {
   1122  1.1  mrg       HARD_REG_SET retrying = failed;
   1123  1.1  mrg 
   1124  1.1  mrg       CLEAR_HARD_REG_SET (failed);
   1125  1.1  mrg       progress = false;
   1126  1.1  mrg 
   1127  1.1  mrg       for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
   1128  1.1  mrg 	if (TEST_HARD_REG_BIT (retrying, regno))
   1129  1.1  mrg 	  {
   1130  1.1  mrg 	    rtx regno_rtx;
   1131  1.1  mrg 	    machine_mode mode;
   1132  1.1  mrg 
   1133  1.1  mrg 	    /* This might select registers we've already zeroed.  If grouping
   1134  1.1  mrg 	       with them is what it takes to get regno zeroed, so be it.  */
   1135  1.1  mrg 	    if (!zcur_select_mode_rtx (regno, &mode, &regno_rtx,
   1136  1.1  mrg 				       need_zeroed_hardregs))
   1137  1.1  mrg 	      {
   1138  1.1  mrg 		SET_HARD_REG_BIT (failed, regno);
   1139  1.1  mrg 		continue;
   1140  1.1  mrg 	      }
   1141  1.1  mrg 
   1142  1.1  mrg 	    bool success = false;
   1143  1.1  mrg 	    /* Look for a source.  */
   1144  1.1  mrg 	    for (unsigned int src = 0; src < FIRST_PSEUDO_REGISTER; src++)
   1145  1.1  mrg 	      {
   1146  1.1  mrg 		/* If SRC hasn't been zeroed (yet?), skip it.  */
   1147  1.1  mrg 		if (! TEST_HARD_REG_BIT (need_zeroed_hardregs, src))
   1148  1.1  mrg 		  continue;
   1149  1.1  mrg 		if (TEST_HARD_REG_BIT (retrying, src))
   1150  1.1  mrg 		  continue;
   1151  1.1  mrg 
   1152  1.1  mrg 		/* Check that SRC can hold MODE, and that any other
   1153  1.1  mrg 		   registers needed to hold MODE in SRC have also been
   1154  1.1  mrg 		   zeroed.  */
   1155  1.1  mrg 		if (!targetm.hard_regno_mode_ok (src, mode))
   1156  1.1  mrg 		  continue;
   1157  1.1  mrg 		unsigned n = targetm.hard_regno_nregs (src, mode);
   1158  1.1  mrg 		bool ok = true;
   1159  1.1  mrg 		for (unsigned i = 1; ok && i < n; i++)
   1160  1.1  mrg 		  ok = (TEST_HARD_REG_BIT (need_zeroed_hardregs, src + i)
   1161  1.1  mrg 			&& !TEST_HARD_REG_BIT (retrying, src + i));
   1162  1.1  mrg 		if (!ok)
   1163  1.1  mrg 		  continue;
   1164  1.1  mrg 
   1165  1.1  mrg 		/* SRC is usable, try to copy from it.  */
   1166  1.1  mrg 		rtx_insn *last_insn = get_last_insn ();
   1167  1.1  mrg 		rtx src_rtx = gen_rtx_REG (mode, src);
   1168  1.1  mrg 		rtx_insn *insn = emit_move_insn (regno_rtx, src_rtx);
   1169  1.1  mrg 		if (!valid_insn_p (insn))
   1170  1.1  mrg 		  /* It didn't work, remove any inserts.  We'll look
   1171  1.1  mrg 		     for another SRC.  */
   1172  1.1  mrg 		  delete_insns_since (last_insn);
   1173  1.1  mrg 		else
   1174  1.1  mrg 		  {
   1175  1.1  mrg 		    /* We're done for REGNO.  */
   1176  1.1  mrg 		    success = true;
   1177  1.1  mrg 		    break;
   1178  1.1  mrg 		  }
   1179  1.1  mrg 	      }
   1180  1.1  mrg 
   1181  1.1  mrg 	    /* If nothing worked for REGNO this round, mark it to be
   1182  1.1  mrg 	       retried if we get another round.  */
   1183  1.1  mrg 	    if (!success)
   1184  1.1  mrg 	      SET_HARD_REG_BIT (failed, regno);
   1185  1.1  mrg 	    else
   1186  1.1  mrg 	      {
   1187  1.1  mrg 		/* Take note so as to enable another round if needed.  */
   1188  1.1  mrg 		progress = true;
   1189  1.1  mrg 		regno += hard_regno_nregs (regno, mode) - 1;
   1190  1.1  mrg 	      }
   1191  1.1  mrg 	  }
   1192  1.1  mrg     }
   1193  1.1  mrg 
   1194  1.1  mrg   /* If any register remained, report it.  */
   1195  1.1  mrg   if (!progress)
   1196  1.1  mrg     {
   1197  1.1  mrg       static bool issued_error;
   1198  1.1  mrg       if (!issued_error)
   1199  1.1  mrg 	{
   1200  1.1  mrg 	  issued_error = true;
   1201  1.1  mrg 	  sorry ("%qs not supported on this target",
   1202  1.1  mrg 		 "-fzero-call-used-regs");
   1203  1.1  mrg 	}
   1204  1.1  mrg     }
   1205  1.1  mrg 
   1206  1.1  mrg   return need_zeroed_hardregs;
   1207  1.1  mrg }
   1208  1.1  mrg 
   1209  1.1  mrg rtx
   1210  1.1  mrg default_internal_arg_pointer (void)
   1211  1.1  mrg {
   1212  1.1  mrg   /* If the reg that the virtual arg pointer will be translated into is
   1213  1.1  mrg      not a fixed reg or is the stack pointer, make a copy of the virtual
   1214  1.1  mrg      arg pointer, and address parms via the copy.  The frame pointer is
   1215  1.1  mrg      considered fixed even though it is not marked as such.  */
   1216  1.1  mrg   if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
   1217  1.1  mrg        || ! (fixed_regs[ARG_POINTER_REGNUM]
   1218  1.1  mrg 	     || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
   1219  1.1  mrg     return copy_to_reg (virtual_incoming_args_rtx);
   1220  1.1  mrg   else
   1221  1.1  mrg     return virtual_incoming_args_rtx;
   1222  1.1  mrg }
   1223  1.1  mrg 
   1224  1.1  mrg rtx
   1225  1.1  mrg default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
   1226  1.1  mrg {
   1227  1.1  mrg   if (incoming_p)
   1228  1.1  mrg     {
   1229  1.1  mrg #ifdef STATIC_CHAIN_INCOMING_REGNUM
   1230  1.1  mrg       return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
   1231  1.1  mrg #endif
   1232  1.1  mrg     }
   1233  1.1  mrg 
   1234  1.1  mrg #ifdef STATIC_CHAIN_REGNUM
   1235  1.1  mrg   return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
   1236  1.1  mrg #endif
   1237  1.1  mrg 
   1238  1.1  mrg   {
   1239  1.1  mrg     static bool issued_error;
   1240  1.1  mrg     if (!issued_error)
   1241  1.1  mrg       {
   1242  1.1  mrg 	issued_error = true;
   1243  1.1  mrg 	sorry ("nested functions not supported on this target");
   1244  1.1  mrg       }
   1245  1.1  mrg 
   1246  1.1  mrg     /* It really doesn't matter what we return here, so long at it
   1247  1.1  mrg        doesn't cause the rest of the compiler to crash.  */
   1248  1.1  mrg     return gen_rtx_MEM (Pmode, stack_pointer_rtx);
   1249  1.1  mrg   }
   1250  1.1  mrg }
   1251  1.1  mrg 
   1252  1.1  mrg void
   1253  1.1  mrg default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
   1254  1.1  mrg 			 rtx ARG_UNUSED (r_chain))
   1255  1.1  mrg {
   1256  1.1  mrg   sorry ("nested function trampolines not supported on this target");
   1257  1.1  mrg }
   1258  1.1  mrg 
   1259  1.1  mrg poly_int64
   1260  1.1  mrg default_return_pops_args (tree, tree, poly_int64)
   1261  1.1  mrg {
   1262  1.1  mrg   return 0;
   1263  1.1  mrg }
   1264  1.1  mrg 
   1265  1.1  mrg reg_class_t
   1266  1.1  mrg default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED,
   1267  1.1  mrg 					 reg_class_t cl,
   1268  1.1  mrg 					 reg_class_t best_cl ATTRIBUTE_UNUSED)
   1269  1.1  mrg {
   1270  1.1  mrg   return cl;
   1271  1.1  mrg }
   1272  1.1  mrg 
   1273  1.1  mrg extern bool
   1274  1.1  mrg default_lra_p (void)
   1275  1.1  mrg {
   1276  1.1  mrg   return true;
   1277  1.1  mrg }
   1278  1.1  mrg 
   1279  1.1  mrg int
   1280  1.1  mrg default_register_priority (int hard_regno ATTRIBUTE_UNUSED)
   1281  1.1  mrg {
   1282  1.1  mrg   return 0;
   1283  1.1  mrg }
   1284  1.1  mrg 
   1285  1.1  mrg extern bool
   1286  1.1  mrg default_register_usage_leveling_p (void)
   1287  1.1  mrg {
   1288  1.1  mrg   return false;
   1289  1.1  mrg }
   1290  1.1  mrg 
   1291  1.1  mrg extern bool
   1292  1.1  mrg default_different_addr_displacement_p (void)
   1293  1.1  mrg {
   1294  1.1  mrg   return false;
   1295  1.1  mrg }
   1296  1.1  mrg 
   1297  1.1  mrg reg_class_t
   1298  1.1  mrg default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
   1299  1.1  mrg 			  reg_class_t reload_class_i ATTRIBUTE_UNUSED,
   1300  1.1  mrg 			  machine_mode reload_mode ATTRIBUTE_UNUSED,
   1301  1.1  mrg 			  secondary_reload_info *sri)
   1302  1.1  mrg {
   1303  1.1  mrg   enum reg_class rclass = NO_REGS;
   1304  1.1  mrg   enum reg_class reload_class = (enum reg_class) reload_class_i;
   1305  1.1  mrg 
   1306  1.1  mrg   if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
   1307  1.1  mrg     {
   1308  1.1  mrg       sri->icode = sri->prev_sri->t_icode;
   1309  1.1  mrg       return NO_REGS;
   1310  1.1  mrg     }
   1311  1.1  mrg #ifdef SECONDARY_INPUT_RELOAD_CLASS
   1312  1.1  mrg   if (in_p)
   1313  1.1  mrg     rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class,
   1314  1.1  mrg 					   MACRO_MODE (reload_mode), x);
   1315  1.1  mrg #endif
   1316  1.1  mrg #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
   1317  1.1  mrg   if (! in_p)
   1318  1.1  mrg     rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class,
   1319  1.1  mrg 					    MACRO_MODE (reload_mode), x);
   1320  1.1  mrg #endif
   1321  1.1  mrg   if (rclass != NO_REGS)
   1322  1.1  mrg     {
   1323  1.1  mrg       enum insn_code icode
   1324  1.1  mrg 	= direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
   1325  1.1  mrg 				reload_mode);
   1326  1.1  mrg 
   1327  1.1  mrg       if (icode != CODE_FOR_nothing
   1328  1.1  mrg 	  && !insn_operand_matches (icode, in_p, x))
   1329  1.1  mrg 	icode = CODE_FOR_nothing;
   1330  1.1  mrg       else if (icode != CODE_FOR_nothing)
   1331  1.1  mrg 	{
   1332  1.1  mrg 	  const char *insn_constraint, *scratch_constraint;
   1333  1.1  mrg 	  enum reg_class insn_class, scratch_class;
   1334  1.1  mrg 
   1335  1.1  mrg 	  gcc_assert (insn_data[(int) icode].n_operands == 3);
   1336  1.1  mrg 	  insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
   1337  1.1  mrg 	  if (!*insn_constraint)
   1338  1.1  mrg 	    insn_class = ALL_REGS;
   1339  1.1  mrg 	  else
   1340  1.1  mrg 	    {
   1341  1.1  mrg 	      if (in_p)
   1342  1.1  mrg 		{
   1343  1.1  mrg 		  gcc_assert (*insn_constraint == '=');
   1344  1.1  mrg 		  insn_constraint++;
   1345  1.1  mrg 		}
   1346  1.1  mrg 	      insn_class = (reg_class_for_constraint
   1347  1.1  mrg 			    (lookup_constraint (insn_constraint)));
   1348  1.1  mrg 	      gcc_assert (insn_class != NO_REGS);
   1349  1.1  mrg 	    }
   1350  1.1  mrg 
   1351  1.1  mrg 	  scratch_constraint = insn_data[(int) icode].operand[2].constraint;
   1352  1.1  mrg 	  /* The scratch register's constraint must start with "=&",
   1353  1.1  mrg 	     except for an input reload, where only "=" is necessary,
   1354  1.1  mrg 	     and where it might be beneficial to re-use registers from
   1355  1.1  mrg 	     the input.  */
   1356  1.1  mrg 	  gcc_assert (scratch_constraint[0] == '='
   1357  1.1  mrg 		      && (in_p || scratch_constraint[1] == '&'));
   1358  1.1  mrg 	  scratch_constraint++;
   1359  1.1  mrg 	  if (*scratch_constraint == '&')
   1360  1.1  mrg 	    scratch_constraint++;
   1361  1.1  mrg 	  scratch_class = (reg_class_for_constraint
   1362  1.1  mrg 			   (lookup_constraint (scratch_constraint)));
   1363  1.1  mrg 
   1364  1.1  mrg 	  if (reg_class_subset_p (reload_class, insn_class))
   1365  1.1  mrg 	    {
   1366  1.1  mrg 	      gcc_assert (scratch_class == rclass);
   1367  1.1  mrg 	      rclass = NO_REGS;
   1368  1.1  mrg 	    }
   1369  1.1  mrg 	  else
   1370  1.1  mrg 	    rclass = insn_class;
   1371  1.1  mrg 
   1372  1.1  mrg         }
   1373  1.1  mrg       if (rclass == NO_REGS)
   1374  1.1  mrg 	sri->icode = icode;
   1375  1.1  mrg       else
   1376  1.1  mrg 	sri->t_icode = icode;
   1377  1.1  mrg     }
   1378  1.1  mrg   return rclass;
   1379  1.1  mrg }
   1380  1.1  mrg 
   1381  1.1  mrg /* The default implementation of TARGET_SECONDARY_MEMORY_NEEDED_MODE.  */
   1382  1.1  mrg 
   1383  1.1  mrg machine_mode
   1384  1.1  mrg default_secondary_memory_needed_mode (machine_mode mode)
   1385  1.1  mrg {
   1386  1.1  mrg   if (!targetm.lra_p ()
   1387  1.1  mrg       && known_lt (GET_MODE_BITSIZE (mode), BITS_PER_WORD)
   1388  1.1  mrg       && INTEGRAL_MODE_P (mode))
   1389  1.1  mrg     return mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0).require ();
   1390  1.1  mrg   return mode;
   1391  1.1  mrg }
   1392  1.1  mrg 
   1393  1.1  mrg /* By default, if flag_pic is true, then neither local nor global relocs
   1394  1.1  mrg    should be placed in readonly memory.  */
   1395  1.1  mrg 
   1396  1.1  mrg int
   1397  1.1  mrg default_reloc_rw_mask (void)
   1398  1.1  mrg {
   1399  1.1  mrg   return flag_pic ? 3 : 0;
   1400  1.1  mrg }
   1401  1.1  mrg 
   1402  1.1  mrg /* By default, address diff vectors are generated
   1403  1.1  mrg for jump tables when flag_pic is true.  */
   1404  1.1  mrg 
   1405  1.1  mrg bool
   1406  1.1  mrg default_generate_pic_addr_diff_vec (void)
   1407  1.1  mrg {
   1408  1.1  mrg   return flag_pic;
   1409  1.1  mrg }
   1410  1.1  mrg 
   1411  1.1  mrg /* By default, do no modification. */
   1412  1.1  mrg tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
   1413  1.1  mrg 					 tree id)
   1414  1.1  mrg {
   1415  1.1  mrg    return id;
   1416  1.1  mrg }
   1417  1.1  mrg 
   1418  1.1  mrg /* The default implementation of TARGET_STATIC_RTX_ALIGNMENT.  */
   1419  1.1  mrg 
   1420  1.1  mrg HOST_WIDE_INT
   1421  1.1  mrg default_static_rtx_alignment (machine_mode mode)
   1422  1.1  mrg {
   1423  1.1  mrg   return GET_MODE_ALIGNMENT (mode);
   1424  1.1  mrg }
   1425  1.1  mrg 
   1426  1.1  mrg /* The default implementation of TARGET_CONSTANT_ALIGNMENT.  */
   1427  1.1  mrg 
   1428  1.1  mrg HOST_WIDE_INT
   1429  1.1  mrg default_constant_alignment (const_tree, HOST_WIDE_INT align)
   1430  1.1  mrg {
   1431  1.1  mrg   return align;
   1432  1.1  mrg }
   1433  1.1  mrg 
   1434  1.1  mrg /* An implementation of TARGET_CONSTANT_ALIGNMENT that aligns strings
   1435  1.1  mrg    to at least BITS_PER_WORD but otherwise makes no changes.  */
   1436  1.1  mrg 
   1437  1.1  mrg HOST_WIDE_INT
   1438  1.1  mrg constant_alignment_word_strings (const_tree exp, HOST_WIDE_INT align)
   1439  1.1  mrg {
   1440  1.1  mrg   if (TREE_CODE (exp) == STRING_CST)
   1441  1.1  mrg     return MAX (align, BITS_PER_WORD);
   1442  1.1  mrg   return align;
   1443  1.1  mrg }
   1444  1.1  mrg 
   1445  1.1  mrg /* Default to natural alignment for vector types, bounded by
   1446  1.1  mrg    MAX_OFILE_ALIGNMENT.  */
   1447  1.1  mrg 
   1448  1.1  mrg HOST_WIDE_INT
   1449  1.1  mrg default_vector_alignment (const_tree type)
   1450  1.1  mrg {
   1451  1.1  mrg   unsigned HOST_WIDE_INT align = MAX_OFILE_ALIGNMENT;
   1452  1.1  mrg   tree size = TYPE_SIZE (type);
   1453  1.1  mrg   if (tree_fits_uhwi_p (size))
   1454  1.1  mrg     align = tree_to_uhwi (size);
   1455  1.1  mrg   if (align >= MAX_OFILE_ALIGNMENT)
   1456  1.1  mrg     return MAX_OFILE_ALIGNMENT;
   1457  1.1  mrg   return MAX (align, GET_MODE_ALIGNMENT (TYPE_MODE (type)));
   1458  1.1  mrg }
   1459  1.1  mrg 
   1460  1.1  mrg /* The default implementation of
   1461  1.1  mrg    TARGET_VECTORIZE_PREFERRED_VECTOR_ALIGNMENT.  */
   1462  1.1  mrg 
   1463  1.1  mrg poly_uint64
   1464  1.1  mrg default_preferred_vector_alignment (const_tree type)
   1465  1.1  mrg {
   1466  1.1  mrg   return TYPE_ALIGN (type);
   1467  1.1  mrg }
   1468  1.1  mrg 
   1469  1.1  mrg /* By default assume vectors of element TYPE require a multiple of the natural
   1470  1.1  mrg    alignment of TYPE.  TYPE is naturally aligned if IS_PACKED is false.  */
   1471  1.1  mrg bool
   1472  1.1  mrg default_builtin_vector_alignment_reachable (const_tree /*type*/, bool is_packed)
   1473  1.1  mrg {
   1474  1.1  mrg   return ! is_packed;
   1475  1.1  mrg }
   1476  1.1  mrg 
   1477  1.1  mrg /* By default, assume that a target supports any factor of misalignment
   1478  1.1  mrg    memory access if it supports movmisalign patten.
   1479  1.1  mrg    is_packed is true if the memory access is defined in a packed struct.  */
   1480  1.1  mrg bool
   1481  1.1  mrg default_builtin_support_vector_misalignment (machine_mode mode,
   1482  1.1  mrg 					     const_tree type
   1483  1.1  mrg 					     ATTRIBUTE_UNUSED,
   1484  1.1  mrg 					     int misalignment
   1485  1.1  mrg 					     ATTRIBUTE_UNUSED,
   1486  1.1  mrg 					     bool is_packed
   1487  1.1  mrg 					     ATTRIBUTE_UNUSED)
   1488  1.1  mrg {
   1489  1.1  mrg   if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
   1490  1.1  mrg     return true;
   1491  1.1  mrg   return false;
   1492  1.1  mrg }
   1493  1.1  mrg 
   1494  1.1  mrg /* By default, only attempt to parallelize bitwise operations, and
   1495  1.1  mrg    possibly adds/subtracts using bit-twiddling.  */
   1496  1.1  mrg 
   1497  1.1  mrg machine_mode
   1498  1.1  mrg default_preferred_simd_mode (scalar_mode)
   1499  1.1  mrg {
   1500  1.1  mrg   return word_mode;
   1501  1.1  mrg }
   1502  1.1  mrg 
   1503  1.1  mrg /* By default do not split reductions further.  */
   1504  1.1  mrg 
   1505  1.1  mrg machine_mode
   1506  1.1  mrg default_split_reduction (machine_mode mode)
   1507  1.1  mrg {
   1508  1.1  mrg   return mode;
   1509  1.1  mrg }
   1510  1.1  mrg 
   1511  1.1  mrg /* By default only the preferred vector mode is tried.  */
   1512  1.1  mrg 
   1513  1.1  mrg unsigned int
   1514  1.1  mrg default_autovectorize_vector_modes (vector_modes *, bool)
   1515  1.1  mrg {
   1516  1.1  mrg   return 0;
   1517  1.1  mrg }
   1518  1.1  mrg 
   1519  1.1  mrg /* The default implementation of TARGET_VECTORIZE_RELATED_MODE.  */
   1520  1.1  mrg 
   1521  1.1  mrg opt_machine_mode
   1522  1.1  mrg default_vectorize_related_mode (machine_mode vector_mode,
   1523  1.1  mrg 				scalar_mode element_mode,
   1524  1.1  mrg 				poly_uint64 nunits)
   1525  1.1  mrg {
   1526  1.1  mrg   machine_mode result_mode;
   1527  1.1  mrg   if ((maybe_ne (nunits, 0U)
   1528  1.1  mrg        || multiple_p (GET_MODE_SIZE (vector_mode),
   1529  1.1  mrg 		      GET_MODE_SIZE (element_mode), &nunits))
   1530  1.1  mrg       && mode_for_vector (element_mode, nunits).exists (&result_mode)
   1531  1.1  mrg       && VECTOR_MODE_P (result_mode)
   1532  1.1  mrg       && targetm.vector_mode_supported_p (result_mode))
   1533  1.1  mrg     return result_mode;
   1534  1.1  mrg 
   1535  1.1  mrg   return opt_machine_mode ();
   1536  1.1  mrg }
   1537  1.1  mrg 
   1538  1.1  mrg /* By default a vector of integers is used as a mask.  */
   1539  1.1  mrg 
   1540  1.1  mrg opt_machine_mode
   1541  1.1  mrg default_get_mask_mode (machine_mode mode)
   1542  1.1  mrg {
   1543  1.1  mrg   return related_int_vector_mode (mode);
   1544  1.1  mrg }
   1545  1.1  mrg 
   1546  1.1  mrg /* By default consider masked stores to be expensive.  */
   1547  1.1  mrg 
   1548  1.1  mrg bool
   1549  1.1  mrg default_empty_mask_is_expensive (unsigned ifn)
   1550  1.1  mrg {
   1551  1.1  mrg   return ifn == IFN_MASK_STORE;
   1552  1.1  mrg }
   1553  1.1  mrg 
   1554  1.1  mrg /* By default, the cost model accumulates three separate costs (prologue,
   1555  1.1  mrg    loop body, and epilogue) for a vectorized loop or block.  So allocate an
   1556  1.1  mrg    array of three unsigned ints, set it to zero, and return its address.  */
   1557  1.1  mrg 
   1558  1.1  mrg vector_costs *
   1559  1.1  mrg default_vectorize_create_costs (vec_info *vinfo, bool costing_for_scalar)
   1560  1.1  mrg {
   1561  1.1  mrg   return new vector_costs (vinfo, costing_for_scalar);
   1562  1.1  mrg }
   1563  1.1  mrg 
   1564  1.1  mrg /* Determine whether or not a pointer mode is valid. Assume defaults
   1565  1.1  mrg    of ptr_mode or Pmode - can be overridden.  */
   1566  1.1  mrg bool
   1567  1.1  mrg default_valid_pointer_mode (scalar_int_mode mode)
   1568  1.1  mrg {
   1569  1.1  mrg   return (mode == ptr_mode || mode == Pmode);
   1570  1.1  mrg }
   1571  1.1  mrg 
   1572  1.1  mrg /* Determine whether the memory reference specified by REF may alias
   1573  1.1  mrg    the C libraries errno location.  */
   1574  1.1  mrg bool
   1575  1.1  mrg default_ref_may_alias_errno (ao_ref *ref)
   1576  1.1  mrg {
   1577  1.1  mrg   tree base = ao_ref_base (ref);
   1578  1.1  mrg   /* The default implementation assumes the errno location is
   1579  1.1  mrg      a declaration of type int or is always accessed via a
   1580  1.1  mrg      pointer to int.  We assume that accesses to errno are
   1581  1.1  mrg      not deliberately obfuscated (even in conforming ways).  */
   1582  1.1  mrg   if (TYPE_UNSIGNED (TREE_TYPE (base))
   1583  1.1  mrg       || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
   1584  1.1  mrg     return false;
   1585  1.1  mrg   /* The default implementation assumes an errno location declaration
   1586  1.1  mrg      is never defined in the current compilation unit and may not be
   1587  1.1  mrg      aliased by a local variable.  */
   1588  1.1  mrg   if (DECL_P (base)
   1589  1.1  mrg       && DECL_EXTERNAL (base)
   1590  1.1  mrg       && !TREE_STATIC (base))
   1591  1.1  mrg     return true;
   1592  1.1  mrg   else if (TREE_CODE (base) == MEM_REF
   1593  1.1  mrg 	   && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
   1594  1.1  mrg     {
   1595  1.1  mrg       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
   1596  1.1  mrg       return !pi || pi->pt.anything || pi->pt.nonlocal;
   1597  1.1  mrg     }
   1598  1.1  mrg   return false;
   1599  1.1  mrg }
   1600  1.1  mrg 
   1601  1.1  mrg /* Return the mode for a pointer to a given ADDRSPACE,
   1602  1.1  mrg    defaulting to ptr_mode for all address spaces.  */
   1603  1.1  mrg 
   1604  1.1  mrg scalar_int_mode
   1605  1.1  mrg default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
   1606  1.1  mrg {
   1607  1.1  mrg   return ptr_mode;
   1608  1.1  mrg }
   1609  1.1  mrg 
   1610  1.1  mrg /* Return the mode for an address in a given ADDRSPACE,
   1611  1.1  mrg    defaulting to Pmode for all address spaces.  */
   1612  1.1  mrg 
   1613  1.1  mrg scalar_int_mode
   1614  1.1  mrg default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
   1615  1.1  mrg {
   1616  1.1  mrg   return Pmode;
   1617  1.1  mrg }
   1618  1.1  mrg 
   1619  1.1  mrg /* Named address space version of valid_pointer_mode.
   1620  1.1  mrg    To match the above, the same modes apply to all address spaces.  */
   1621  1.1  mrg 
   1622  1.1  mrg bool
   1623  1.1  mrg default_addr_space_valid_pointer_mode (scalar_int_mode mode,
   1624  1.1  mrg 				       addr_space_t as ATTRIBUTE_UNUSED)
   1625  1.1  mrg {
   1626  1.1  mrg   return targetm.valid_pointer_mode (mode);
   1627  1.1  mrg }
   1628  1.1  mrg 
   1629  1.1  mrg /* Some places still assume that all pointer or address modes are the
   1630  1.1  mrg    standard Pmode and ptr_mode.  These optimizations become invalid if
   1631  1.1  mrg    the target actually supports multiple different modes.  For now,
   1632  1.1  mrg    we disable such optimizations on such targets, using this function.  */
   1633  1.1  mrg 
   1634  1.1  mrg bool
   1635  1.1  mrg target_default_pointer_address_modes_p (void)
   1636  1.1  mrg {
   1637  1.1  mrg   if (targetm.addr_space.address_mode != default_addr_space_address_mode)
   1638  1.1  mrg     return false;
   1639  1.1  mrg   if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
   1640  1.1  mrg     return false;
   1641  1.1  mrg 
   1642  1.1  mrg   return true;
   1643  1.1  mrg }
   1644  1.1  mrg 
   1645  1.1  mrg /* Named address space version of legitimate_address_p.
   1646  1.1  mrg    By default, all address spaces have the same form.  */
   1647  1.1  mrg 
   1648  1.1  mrg bool
   1649  1.1  mrg default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
   1650  1.1  mrg 					 bool strict,
   1651  1.1  mrg 					 addr_space_t as ATTRIBUTE_UNUSED)
   1652  1.1  mrg {
   1653  1.1  mrg   return targetm.legitimate_address_p (mode, mem, strict);
   1654  1.1  mrg }
   1655  1.1  mrg 
   1656  1.1  mrg /* Named address space version of LEGITIMIZE_ADDRESS.
   1657  1.1  mrg    By default, all address spaces have the same form.  */
   1658  1.1  mrg 
   1659  1.1  mrg rtx
   1660  1.1  mrg default_addr_space_legitimize_address (rtx x, rtx oldx, machine_mode mode,
   1661  1.1  mrg 				       addr_space_t as ATTRIBUTE_UNUSED)
   1662  1.1  mrg {
   1663  1.1  mrg   return targetm.legitimize_address (x, oldx, mode);
   1664  1.1  mrg }
   1665  1.1  mrg 
   1666  1.1  mrg /* The default hook for determining if one named address space is a subset of
   1667  1.1  mrg    another and to return which address space to use as the common address
   1668  1.1  mrg    space.  */
   1669  1.1  mrg 
   1670  1.1  mrg bool
   1671  1.1  mrg default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
   1672  1.1  mrg {
   1673  1.1  mrg   return (subset == superset);
   1674  1.1  mrg }
   1675  1.1  mrg 
   1676  1.1  mrg /* The default hook for determining if 0 within a named address
   1677  1.1  mrg    space is a valid address.  */
   1678  1.1  mrg 
   1679  1.1  mrg bool
   1680  1.1  mrg default_addr_space_zero_address_valid (addr_space_t as ATTRIBUTE_UNUSED)
   1681  1.1  mrg {
   1682  1.1  mrg   return false;
   1683  1.1  mrg }
   1684  1.1  mrg 
   1685  1.1  mrg /* The default hook for debugging the address space is to return the
   1686  1.1  mrg    address space number to indicate DW_AT_address_class.  */
   1687  1.1  mrg int
   1688  1.1  mrg default_addr_space_debug (addr_space_t as)
   1689  1.1  mrg {
   1690  1.1  mrg   return as;
   1691  1.1  mrg }
   1692  1.1  mrg 
   1693  1.1  mrg /* The default hook implementation for TARGET_ADDR_SPACE_DIAGNOSE_USAGE.
   1694  1.1  mrg    Don't complain about any address space.  */
   1695  1.1  mrg 
   1696  1.1  mrg void
   1697  1.1  mrg default_addr_space_diagnose_usage (addr_space_t, location_t)
   1698  1.1  mrg {
   1699  1.1  mrg }
   1700  1.1  mrg 
   1701  1.1  mrg 
   1702  1.1  mrg /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
   1703  1.1  mrg    called for targets with only a generic address space.  */
   1704  1.1  mrg 
   1705  1.1  mrg rtx
   1706  1.1  mrg default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
   1707  1.1  mrg 			    tree from_type ATTRIBUTE_UNUSED,
   1708  1.1  mrg 			    tree to_type ATTRIBUTE_UNUSED)
   1709  1.1  mrg {
   1710  1.1  mrg   gcc_unreachable ();
   1711  1.1  mrg }
   1712  1.1  mrg 
   1713  1.1  mrg /* The defualt implementation of TARGET_HARD_REGNO_NREGS.  */
   1714  1.1  mrg 
   1715  1.1  mrg unsigned int
   1716  1.1  mrg default_hard_regno_nregs (unsigned int, machine_mode mode)
   1717  1.1  mrg {
   1718  1.1  mrg   /* Targets with variable-sized modes must provide their own definition
   1719  1.1  mrg      of this hook.  */
   1720  1.1  mrg   return CEIL (GET_MODE_SIZE (mode).to_constant (), UNITS_PER_WORD);
   1721  1.1  mrg }
   1722  1.1  mrg 
   1723  1.1  mrg bool
   1724  1.1  mrg default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
   1725  1.1  mrg {
   1726  1.1  mrg   return true;
   1727  1.1  mrg }
   1728  1.1  mrg 
   1729  1.1  mrg /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P.  */
   1730  1.1  mrg 
   1731  1.1  mrg bool
   1732  1.1  mrg default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED,
   1733  1.1  mrg 				  addr_space_t addrspace ATTRIBUTE_UNUSED)
   1734  1.1  mrg {
   1735  1.1  mrg   return false;
   1736  1.1  mrg }
   1737  1.1  mrg 
   1738  1.1  mrg extern bool default_new_address_profitable_p (rtx, rtx);
   1739  1.1  mrg 
   1740  1.1  mrg 
   1741  1.1  mrg /* The default implementation of TARGET_NEW_ADDRESS_PROFITABLE_P.  */
   1742  1.1  mrg 
   1743  1.1  mrg bool
   1744  1.1  mrg default_new_address_profitable_p (rtx memref ATTRIBUTE_UNUSED,
   1745  1.1  mrg 				  rtx_insn *insn ATTRIBUTE_UNUSED,
   1746  1.1  mrg 				  rtx new_addr ATTRIBUTE_UNUSED)
   1747  1.1  mrg {
   1748  1.1  mrg   return true;
   1749  1.1  mrg }
   1750  1.1  mrg 
   1751  1.1  mrg bool
   1752  1.1  mrg default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
   1753  1.1  mrg 					 tree ARG_UNUSED (name),
   1754  1.1  mrg 					 tree ARG_UNUSED (args),
   1755  1.1  mrg 					 int ARG_UNUSED (flags))
   1756  1.1  mrg {
   1757  1.1  mrg   warning (OPT_Wattributes,
   1758  1.1  mrg 	   "target attribute is not supported on this machine");
   1759  1.1  mrg 
   1760  1.1  mrg   return false;
   1761  1.1  mrg }
   1762  1.1  mrg 
   1763  1.1  mrg bool
   1764  1.1  mrg default_target_option_pragma_parse (tree ARG_UNUSED (args),
   1765  1.1  mrg 				    tree ARG_UNUSED (pop_target))
   1766  1.1  mrg {
   1767  1.1  mrg   /* If args is NULL the caller is handle_pragma_pop_options ().  In that case,
   1768  1.1  mrg      emit no warning because "#pragma GCC pop_target" is valid on targets that
   1769  1.1  mrg      do not have the "target" pragma.  */
   1770  1.1  mrg   if (args)
   1771  1.1  mrg     warning (OPT_Wpragmas,
   1772  1.1  mrg 	     "%<#pragma GCC target%> is not supported for this machine");
   1773  1.1  mrg 
   1774  1.1  mrg   return false;
   1775  1.1  mrg }
   1776  1.1  mrg 
   1777  1.1  mrg bool
   1778  1.1  mrg default_target_can_inline_p (tree caller, tree callee)
   1779  1.1  mrg {
   1780  1.1  mrg   tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
   1781  1.1  mrg   tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
   1782  1.1  mrg   if (! callee_opts)
   1783  1.1  mrg     callee_opts = target_option_default_node;
   1784  1.1  mrg   if (! caller_opts)
   1785  1.1  mrg     caller_opts = target_option_default_node;
   1786  1.1  mrg 
   1787  1.1  mrg   /* If both caller and callee have attributes, assume that if the
   1788  1.1  mrg      pointer is different, the two functions have different target
   1789  1.1  mrg      options since build_target_option_node uses a hash table for the
   1790  1.1  mrg      options.  */
   1791  1.1  mrg   return callee_opts == caller_opts;
   1792  1.1  mrg }
   1793  1.1  mrg 
   1794  1.1  mrg /* By default, return false to not need to collect any target information
   1795  1.1  mrg    for inlining.  Target maintainer should re-define the hook if the
   1796  1.1  mrg    target want to take advantage of it.  */
   1797  1.1  mrg 
   1798  1.1  mrg bool
   1799  1.1  mrg default_need_ipa_fn_target_info (const_tree, unsigned int &)
   1800  1.1  mrg {
   1801  1.1  mrg   return false;
   1802  1.1  mrg }
   1803  1.1  mrg 
   1804  1.1  mrg bool
   1805  1.1  mrg default_update_ipa_fn_target_info (unsigned int &, const gimple *)
   1806  1.1  mrg {
   1807  1.1  mrg   return false;
   1808  1.1  mrg }
   1809  1.1  mrg 
   1810  1.1  mrg /* If the machine does not have a case insn that compares the bounds,
   1811  1.1  mrg    this means extra overhead for dispatch tables, which raises the
   1812  1.1  mrg    threshold for using them.  */
   1813  1.1  mrg 
   1814  1.1  mrg unsigned int
   1815  1.1  mrg default_case_values_threshold (void)
   1816  1.1  mrg {
   1817  1.1  mrg   return (targetm.have_casesi () ? 4 : 5);
   1818  1.1  mrg }
   1819  1.1  mrg 
   1820  1.1  mrg bool
   1821  1.1  mrg default_have_conditional_execution (void)
   1822  1.1  mrg {
   1823  1.1  mrg   return HAVE_conditional_execution;
   1824  1.1  mrg }
   1825  1.1  mrg 
   1826  1.1  mrg /* By default we assume that c99 functions are present at the runtime,
   1827  1.1  mrg    but sincos is not.  */
   1828  1.1  mrg bool
   1829  1.1  mrg default_libc_has_function (enum function_class fn_class,
   1830  1.1  mrg 			   tree type ATTRIBUTE_UNUSED)
   1831  1.1  mrg {
   1832  1.1  mrg   if (fn_class == function_c94
   1833  1.1  mrg       || fn_class == function_c99_misc
   1834  1.1  mrg       || fn_class == function_c99_math_complex)
   1835  1.1  mrg     return true;
   1836  1.1  mrg 
   1837  1.1  mrg   return false;
   1838  1.1  mrg }
   1839  1.1  mrg 
   1840  1.1  mrg /* By default assume that libc has not a fast implementation.  */
   1841  1.1  mrg 
   1842  1.1  mrg bool
   1843  1.1  mrg default_libc_has_fast_function (int fcode ATTRIBUTE_UNUSED)
   1844  1.1  mrg {
   1845  1.1  mrg   return false;
   1846  1.1  mrg }
   1847  1.1  mrg 
   1848  1.1  mrg bool
   1849  1.1  mrg gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED,
   1850  1.1  mrg 		       tree type ATTRIBUTE_UNUSED)
   1851  1.1  mrg {
   1852  1.1  mrg   return true;
   1853  1.1  mrg }
   1854  1.1  mrg 
   1855  1.1  mrg bool
   1856  1.1  mrg no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED,
   1857  1.1  mrg 			  tree type ATTRIBUTE_UNUSED)
   1858  1.1  mrg {
   1859  1.1  mrg   return false;
   1860  1.1  mrg }
   1861  1.1  mrg 
   1862  1.1  mrg /* Assume some c99 functions are present at the runtime including sincos.  */
   1863  1.1  mrg bool
   1864  1.1  mrg bsd_libc_has_function (enum function_class fn_class,
   1865  1.1  mrg 		       tree type ATTRIBUTE_UNUSED)
   1866  1.1  mrg {
   1867  1.1  mrg   if (fn_class == function_c94
   1868  1.1  mrg       || fn_class == function_c99_misc
   1869  1.1  mrg       || fn_class == function_sincos)
   1870  1.1  mrg     return true;
   1871  1.1  mrg 
   1872  1.1  mrg   return false;
   1873  1.1  mrg }
   1874  1.1  mrg 
   1875  1.1  mrg 
   1876  1.1  mrg tree
   1877  1.1  mrg default_builtin_tm_load_store (tree ARG_UNUSED (type))
   1878  1.1  mrg {
   1879  1.1  mrg   return NULL_TREE;
   1880  1.1  mrg }
   1881  1.1  mrg 
   1882  1.1  mrg /* Compute cost of moving registers to/from memory.  */
   1883  1.1  mrg 
   1884  1.1  mrg int
   1885  1.1  mrg default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
   1886  1.1  mrg 			  reg_class_t rclass ATTRIBUTE_UNUSED,
   1887  1.1  mrg 			  bool in ATTRIBUTE_UNUSED)
   1888  1.1  mrg {
   1889  1.1  mrg #ifndef MEMORY_MOVE_COST
   1890  1.1  mrg     return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
   1891  1.1  mrg #else
   1892  1.1  mrg     return MEMORY_MOVE_COST (MACRO_MODE (mode), (enum reg_class) rclass, in);
   1893  1.1  mrg #endif
   1894  1.1  mrg }
   1895  1.1  mrg 
   1896  1.1  mrg /* Compute cost of moving data from a register of class FROM to one of
   1897  1.1  mrg    TO, using MODE.  */
   1898  1.1  mrg 
   1899  1.1  mrg int
   1900  1.1  mrg default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
   1901  1.1  mrg                             reg_class_t from ATTRIBUTE_UNUSED,
   1902  1.1  mrg                             reg_class_t to ATTRIBUTE_UNUSED)
   1903  1.1  mrg {
   1904  1.1  mrg #ifndef REGISTER_MOVE_COST
   1905  1.1  mrg   return 2;
   1906  1.1  mrg #else
   1907  1.1  mrg   return REGISTER_MOVE_COST (MACRO_MODE (mode),
   1908  1.1  mrg 			     (enum reg_class) from, (enum reg_class) to);
   1909  1.1  mrg #endif
   1910  1.1  mrg }
   1911  1.1  mrg 
   1912  1.1  mrg /* The default implementation of TARGET_SLOW_UNALIGNED_ACCESS.  */
   1913  1.1  mrg 
   1914  1.1  mrg bool
   1915  1.1  mrg default_slow_unaligned_access (machine_mode, unsigned int)
   1916  1.1  mrg {
   1917  1.1  mrg   return STRICT_ALIGNMENT;
   1918  1.1  mrg }
   1919  1.1  mrg 
   1920  1.1  mrg /* The default implementation of TARGET_ESTIMATED_POLY_VALUE.  */
   1921  1.1  mrg 
   1922  1.1  mrg HOST_WIDE_INT
   1923  1.1  mrg default_estimated_poly_value (poly_int64 x, poly_value_estimate_kind)
   1924  1.1  mrg {
   1925  1.1  mrg   return x.coeffs[0];
   1926  1.1  mrg }
   1927  1.1  mrg 
   1928  1.1  mrg /* For hooks which use the MOVE_RATIO macro, this gives the legacy default
   1929  1.1  mrg    behavior.  SPEED_P is true if we are compiling for speed.  */
   1930  1.1  mrg 
   1931  1.1  mrg unsigned int
   1932  1.1  mrg get_move_ratio (bool speed_p ATTRIBUTE_UNUSED)
   1933  1.1  mrg {
   1934  1.1  mrg   unsigned int move_ratio;
   1935  1.1  mrg #ifdef MOVE_RATIO
   1936  1.1  mrg   move_ratio = (unsigned int) MOVE_RATIO (speed_p);
   1937  1.1  mrg #else
   1938  1.1  mrg #if defined (HAVE_cpymemqi) || defined (HAVE_cpymemhi) || defined (HAVE_cpymemsi) || defined (HAVE_cpymemdi) || defined (HAVE_cpymemti)
   1939  1.1  mrg   move_ratio = 2;
   1940  1.1  mrg #else /* No cpymem patterns, pick a default.  */
   1941  1.1  mrg   move_ratio = ((speed_p) ? 15 : 3);
   1942  1.1  mrg #endif
   1943  1.1  mrg #endif
   1944  1.1  mrg   return move_ratio;
   1945  1.1  mrg }
   1946  1.1  mrg 
   1947  1.1  mrg /* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
   1948  1.1  mrg    used; return FALSE if the cpymem/setmem optab should be expanded, or
   1949  1.1  mrg    a call to memcpy emitted.  */
   1950  1.1  mrg 
   1951  1.1  mrg bool
   1952  1.1  mrg default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
   1953  1.1  mrg 					unsigned int alignment,
   1954  1.1  mrg 					enum by_pieces_operation op,
   1955  1.1  mrg 					bool speed_p)
   1956  1.1  mrg {
   1957  1.1  mrg   unsigned int max_size = 0;
   1958  1.1  mrg   unsigned int ratio = 0;
   1959  1.1  mrg 
   1960  1.1  mrg   switch (op)
   1961  1.1  mrg     {
   1962  1.1  mrg     case CLEAR_BY_PIECES:
   1963  1.1  mrg       max_size = STORE_MAX_PIECES;
   1964  1.1  mrg       ratio = CLEAR_RATIO (speed_p);
   1965  1.1  mrg       break;
   1966  1.1  mrg     case MOVE_BY_PIECES:
   1967  1.1  mrg       max_size = MOVE_MAX_PIECES;
   1968  1.1  mrg       ratio = get_move_ratio (speed_p);
   1969  1.1  mrg       break;
   1970  1.1  mrg     case SET_BY_PIECES:
   1971  1.1  mrg       max_size = STORE_MAX_PIECES;
   1972  1.1  mrg       ratio = SET_RATIO (speed_p);
   1973  1.1  mrg       break;
   1974  1.1  mrg     case STORE_BY_PIECES:
   1975  1.1  mrg       max_size = STORE_MAX_PIECES;
   1976  1.1  mrg       ratio = get_move_ratio (speed_p);
   1977  1.1  mrg       break;
   1978  1.1  mrg     case COMPARE_BY_PIECES:
   1979  1.1  mrg       max_size = COMPARE_MAX_PIECES;
   1980  1.1  mrg       /* Pick a likely default, just as in get_move_ratio.  */
   1981  1.1  mrg       ratio = speed_p ? 15 : 3;
   1982  1.1  mrg       break;
   1983  1.1  mrg     }
   1984  1.1  mrg 
   1985  1.1  mrg   return by_pieces_ninsns (size, alignment, max_size + 1, op) < ratio;
   1986  1.1  mrg }
   1987  1.1  mrg 
   1988  1.1  mrg /* This hook controls code generation for expanding a memcmp operation by
   1989  1.1  mrg    pieces.  Return 1 for the normal pattern of compare/jump after each pair
   1990  1.1  mrg    of loads, or a higher number to reduce the number of branches.  */
   1991  1.1  mrg 
   1992  1.1  mrg int
   1993  1.1  mrg default_compare_by_pieces_branch_ratio (machine_mode)
   1994  1.1  mrg {
   1995  1.1  mrg   return 1;
   1996  1.1  mrg }
   1997  1.1  mrg 
   1998  1.1  mrg /* Helper for default_print_patchable_function_entry and other
   1999  1.1  mrg    print_patchable_function_entry hook implementations.  */
   2000  1.1  mrg 
   2001  1.1  mrg void
   2002  1.1  mrg default_print_patchable_function_entry_1 (FILE *file,
   2003  1.1  mrg 					  unsigned HOST_WIDE_INT
   2004  1.1  mrg 					  patch_area_size,
   2005  1.1  mrg 					  bool record_p,
   2006  1.1  mrg 					  unsigned int flags)
   2007  1.1  mrg {
   2008  1.1  mrg   const char *nop_templ = 0;
   2009  1.1  mrg   int code_num;
   2010  1.1  mrg   rtx_insn *my_nop = make_insn_raw (gen_nop ());
   2011  1.1  mrg 
   2012  1.1  mrg   /* We use the template alone, relying on the (currently sane) assumption
   2013  1.1  mrg      that the NOP template does not have variable operands.  */
   2014  1.1  mrg   code_num = recog_memoized (my_nop);
   2015  1.1  mrg   nop_templ = get_insn_template (code_num, my_nop);
   2016  1.1  mrg 
   2017  1.1  mrg   if (record_p && targetm_common.have_named_sections)
   2018  1.1  mrg     {
   2019  1.1  mrg       char buf[256];
   2020  1.1  mrg       static int patch_area_number;
   2021  1.1  mrg       section *previous_section = in_section;
   2022  1.1  mrg       const char *asm_op = integer_asm_op (POINTER_SIZE_UNITS, false);
   2023  1.1  mrg 
   2024  1.1  mrg       gcc_assert (asm_op != NULL);
   2025  1.1  mrg       patch_area_number++;
   2026  1.1  mrg       ASM_GENERATE_INTERNAL_LABEL (buf, "LPFE", patch_area_number);
   2027  1.1  mrg 
   2028  1.1  mrg       switch_to_section (get_section ("__patchable_function_entries",
   2029  1.1  mrg 				      flags, current_function_decl));
   2030  1.1  mrg       assemble_align (POINTER_SIZE);
   2031  1.1  mrg       fputs (asm_op, file);
   2032  1.1  mrg       assemble_name_raw (file, buf);
   2033  1.1  mrg       fputc ('\n', file);
   2034  1.1  mrg 
   2035  1.1  mrg       switch_to_section (previous_section);
   2036  1.1  mrg       ASM_OUTPUT_LABEL (file, buf);
   2037  1.1  mrg     }
   2038  1.1  mrg 
   2039  1.1  mrg   unsigned i;
   2040  1.1  mrg   for (i = 0; i < patch_area_size; ++i)
   2041  1.1  mrg     output_asm_insn (nop_templ, NULL);
   2042  1.1  mrg }
   2043  1.1  mrg 
   2044  1.1  mrg /* Write PATCH_AREA_SIZE NOPs into the asm outfile FILE around a function
   2045  1.1  mrg    entry.  If RECORD_P is true and the target supports named sections,
   2046  1.1  mrg    the location of the NOPs will be recorded in a special object section
   2047  1.1  mrg    called "__patchable_function_entries".  This routine may be called
   2048  1.1  mrg    twice per function to put NOPs before and after the function
   2049  1.1  mrg    entry.  */
   2050  1.1  mrg 
   2051  1.1  mrg void
   2052  1.1  mrg default_print_patchable_function_entry (FILE *file,
   2053  1.1  mrg 					unsigned HOST_WIDE_INT patch_area_size,
   2054  1.1  mrg 					bool record_p)
   2055  1.1  mrg {
   2056  1.1  mrg   unsigned int flags = SECTION_WRITE | SECTION_RELRO;
   2057  1.1  mrg   if (HAVE_GAS_SECTION_LINK_ORDER)
   2058  1.1  mrg     flags |= SECTION_LINK_ORDER;
   2059  1.1  mrg   default_print_patchable_function_entry_1 (file, patch_area_size, record_p,
   2060  1.1  mrg 					    flags);
   2061  1.1  mrg }
   2062  1.1  mrg 
   2063  1.1  mrg bool
   2064  1.1  mrg default_profile_before_prologue (void)
   2065  1.1  mrg {
   2066  1.1  mrg #ifdef PROFILE_BEFORE_PROLOGUE
   2067  1.1  mrg   return true;
   2068  1.1  mrg #else
   2069  1.1  mrg   return false;
   2070  1.1  mrg #endif
   2071  1.1  mrg }
   2072  1.1  mrg 
   2073  1.1  mrg /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS.  */
   2074  1.1  mrg 
   2075  1.1  mrg reg_class_t
   2076  1.1  mrg default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
   2077  1.1  mrg 			        reg_class_t rclass)
   2078  1.1  mrg {
   2079  1.1  mrg #ifdef PREFERRED_RELOAD_CLASS
   2080  1.1  mrg   return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
   2081  1.1  mrg #else
   2082  1.1  mrg   return rclass;
   2083  1.1  mrg #endif
   2084  1.1  mrg }
   2085  1.1  mrg 
   2086  1.1  mrg /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS.  */
   2087  1.1  mrg 
   2088  1.1  mrg reg_class_t
   2089  1.1  mrg default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
   2090  1.1  mrg 				       reg_class_t rclass)
   2091  1.1  mrg {
   2092  1.1  mrg   return rclass;
   2093  1.1  mrg }
   2094  1.1  mrg 
   2095  1.1  mrg /* The default implementation of TARGET_PREFERRED_RENAME_CLASS.  */
   2096  1.1  mrg reg_class_t
   2097  1.1  mrg default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED)
   2098  1.1  mrg {
   2099  1.1  mrg   return NO_REGS;
   2100  1.1  mrg }
   2101  1.1  mrg 
   2102  1.1  mrg /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P.  */
   2103  1.1  mrg 
   2104  1.1  mrg bool
   2105  1.1  mrg default_class_likely_spilled_p (reg_class_t rclass)
   2106  1.1  mrg {
   2107  1.1  mrg   return (reg_class_size[(int) rclass] == 1);
   2108  1.1  mrg }
   2109  1.1  mrg 
   2110  1.1  mrg /* The default implementation of TARGET_CLASS_MAX_NREGS.  */
   2111  1.1  mrg 
   2112  1.1  mrg unsigned char
   2113  1.1  mrg default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
   2114  1.1  mrg 			 machine_mode mode ATTRIBUTE_UNUSED)
   2115  1.1  mrg {
   2116  1.1  mrg #ifdef CLASS_MAX_NREGS
   2117  1.1  mrg   return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass,
   2118  1.1  mrg 					  MACRO_MODE (mode));
   2119  1.1  mrg #else
   2120  1.1  mrg   /* Targets with variable-sized modes must provide their own definition
   2121  1.1  mrg      of this hook.  */
   2122  1.1  mrg   unsigned int size = GET_MODE_SIZE (mode).to_constant ();
   2123  1.1  mrg   return (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
   2124  1.1  mrg #endif
   2125  1.1  mrg }
   2126  1.1  mrg 
   2127  1.1  mrg /* Determine the debugging unwind mechanism for the target.  */
   2128  1.1  mrg 
   2129  1.1  mrg enum unwind_info_type
   2130  1.1  mrg default_debug_unwind_info (void)
   2131  1.1  mrg {
   2132  1.1  mrg   /* If the target wants to force the use of dwarf2 unwind info, let it.  */
   2133  1.1  mrg   /* ??? Change all users to the hook, then poison this.  */
   2134  1.1  mrg #ifdef DWARF2_FRAME_INFO
   2135  1.1  mrg   if (DWARF2_FRAME_INFO)
   2136  1.1  mrg     return UI_DWARF2;
   2137  1.1  mrg #endif
   2138  1.1  mrg 
   2139  1.1  mrg   /* Otherwise, only turn it on if dwarf2 debugging is enabled.  */
   2140  1.1  mrg #ifdef DWARF2_DEBUGGING_INFO
   2141  1.1  mrg   if (dwarf_debuginfo_p ())
   2142  1.1  mrg     return UI_DWARF2;
   2143  1.1  mrg #endif
   2144  1.1  mrg 
   2145  1.1  mrg   return UI_NONE;
   2146  1.1  mrg }
   2147  1.1  mrg 
   2148  1.1  mrg /* Targets that set NUM_POLY_INT_COEFFS to something greater than 1
   2149  1.1  mrg    must define this hook.  */
   2150  1.1  mrg 
   2151  1.1  mrg unsigned int
   2152  1.1  mrg default_dwarf_poly_indeterminate_value (unsigned int, unsigned int *, int *)
   2153  1.1  mrg {
   2154  1.1  mrg   gcc_unreachable ();
   2155  1.1  mrg }
   2156  1.1  mrg 
   2157  1.1  mrg /* Determine the correct mode for a Dwarf frame register that represents
   2158  1.1  mrg    register REGNO.  */
   2159  1.1  mrg 
   2160  1.1  mrg machine_mode
   2161  1.1  mrg default_dwarf_frame_reg_mode (int regno)
   2162  1.1  mrg {
   2163  1.1  mrg   machine_mode save_mode = reg_raw_mode[regno];
   2164  1.1  mrg 
   2165  1.1  mrg   if (targetm.hard_regno_call_part_clobbered (eh_edge_abi.id (),
   2166  1.1  mrg 					      regno, save_mode))
   2167  1.1  mrg     save_mode = choose_hard_reg_mode (regno, 1, &eh_edge_abi);
   2168  1.1  mrg   return save_mode;
   2169  1.1  mrg }
   2170  1.1  mrg 
   2171  1.1  mrg /* To be used by targets where reg_raw_mode doesn't return the right
   2172  1.1  mrg    mode for registers used in apply_builtin_return and apply_builtin_arg.  */
   2173  1.1  mrg 
   2174  1.1  mrg fixed_size_mode
   2175  1.1  mrg default_get_reg_raw_mode (int regno)
   2176  1.1  mrg {
   2177  1.1  mrg   /* Targets must override this hook if the underlying register is
   2178  1.1  mrg      variable-sized.  */
   2179  1.1  mrg   return as_a <fixed_size_mode> (reg_raw_mode[regno]);
   2180  1.1  mrg }
   2181  1.1  mrg 
   2182  1.1  mrg /* Return true if a leaf function should stay leaf even with profiling
   2183  1.1  mrg    enabled.  */
   2184  1.1  mrg 
   2185  1.1  mrg bool
   2186  1.1  mrg default_keep_leaf_when_profiled ()
   2187  1.1  mrg {
   2188  1.1  mrg   return false;
   2189  1.1  mrg }
   2190  1.1  mrg 
   2191  1.1  mrg /* Return true if the state of option OPTION should be stored in PCH files
   2192  1.1  mrg    and checked by default_pch_valid_p.  Store the option's current state
   2193  1.1  mrg    in STATE if so.  */
   2194  1.1  mrg 
   2195  1.1  mrg static inline bool
   2196  1.1  mrg option_affects_pch_p (int option, struct cl_option_state *state)
   2197  1.1  mrg {
   2198  1.1  mrg   if ((cl_options[option].flags & CL_TARGET) == 0)
   2199  1.1  mrg     return false;
   2200  1.1  mrg   if ((cl_options[option].flags & CL_PCH_IGNORE) != 0)
   2201  1.1  mrg     return false;
   2202  1.1  mrg   if (option_flag_var (option, &global_options) == &target_flags)
   2203  1.1  mrg     if (targetm.check_pch_target_flags)
   2204  1.1  mrg       return false;
   2205  1.1  mrg   return get_option_state (&global_options, option, state);
   2206  1.1  mrg }
   2207  1.1  mrg 
   2208  1.1  mrg /* Default version of get_pch_validity.
   2209  1.1  mrg    By default, every flag difference is fatal; that will be mostly right for
   2210  1.1  mrg    most targets, but completely right for very few.  */
   2211  1.1  mrg 
   2212  1.1  mrg void *
   2213  1.1  mrg default_get_pch_validity (size_t *sz)
   2214  1.1  mrg {
   2215  1.1  mrg   struct cl_option_state state;
   2216  1.1  mrg   size_t i;
   2217  1.1  mrg   char *result, *r;
   2218  1.1  mrg 
   2219  1.1  mrg   *sz = 2;
   2220  1.1  mrg   if (targetm.check_pch_target_flags)
   2221  1.1  mrg     *sz += sizeof (target_flags);
   2222  1.1  mrg   for (i = 0; i < cl_options_count; i++)
   2223  1.1  mrg     if (option_affects_pch_p (i, &state))
   2224  1.1  mrg       *sz += state.size;
   2225  1.1  mrg 
   2226  1.1  mrg   result = r = XNEWVEC (char, *sz);
   2227  1.1  mrg   r[0] = flag_pic;
   2228  1.1  mrg   r[1] = flag_pie;
   2229  1.1  mrg   r += 2;
   2230  1.1  mrg   if (targetm.check_pch_target_flags)
   2231  1.1  mrg     {
   2232  1.1  mrg       memcpy (r, &target_flags, sizeof (target_flags));
   2233  1.1  mrg       r += sizeof (target_flags);
   2234  1.1  mrg     }
   2235  1.1  mrg 
   2236  1.1  mrg   for (i = 0; i < cl_options_count; i++)
   2237  1.1  mrg     if (option_affects_pch_p (i, &state))
   2238  1.1  mrg       {
   2239  1.1  mrg 	memcpy (r, state.data, state.size);
   2240  1.1  mrg 	r += state.size;
   2241  1.1  mrg       }
   2242  1.1  mrg 
   2243  1.1  mrg   return result;
   2244  1.1  mrg }
   2245  1.1  mrg 
   2246  1.1  mrg /* Return a message which says that a PCH file was created with a different
   2247  1.1  mrg    setting of OPTION.  */
   2248  1.1  mrg 
   2249  1.1  mrg static const char *
   2250  1.1  mrg pch_option_mismatch (const char *option)
   2251  1.1  mrg {
   2252  1.1  mrg   return xasprintf (_("created and used with differing settings of '%s'"),
   2253  1.1  mrg 		    option);
   2254  1.1  mrg }
   2255  1.1  mrg 
   2256  1.1  mrg /* Default version of pch_valid_p.  */
   2257  1.1  mrg 
   2258  1.1  mrg const char *
   2259  1.1  mrg default_pch_valid_p (const void *data_p, size_t len ATTRIBUTE_UNUSED)
   2260  1.1  mrg {
   2261  1.1  mrg   struct cl_option_state state;
   2262  1.1  mrg   const char *data = (const char *)data_p;
   2263  1.1  mrg   size_t i;
   2264  1.1  mrg 
   2265  1.1  mrg   /* -fpic and -fpie also usually make a PCH invalid.  */
   2266  1.1  mrg   if (data[0] != flag_pic)
   2267  1.1  mrg     return _("created and used with different settings of %<-fpic%>");
   2268  1.1  mrg   if (data[1] != flag_pie)
   2269  1.1  mrg     return _("created and used with different settings of %<-fpie%>");
   2270  1.1  mrg   data += 2;
   2271  1.1  mrg 
   2272  1.1  mrg   /* Check target_flags.  */
   2273  1.1  mrg   if (targetm.check_pch_target_flags)
   2274  1.1  mrg     {
   2275  1.1  mrg       int tf;
   2276  1.1  mrg       const char *r;
   2277  1.1  mrg 
   2278  1.1  mrg       memcpy (&tf, data, sizeof (target_flags));
   2279  1.1  mrg       data += sizeof (target_flags);
   2280  1.1  mrg       r = targetm.check_pch_target_flags (tf);
   2281  1.1  mrg       if (r != NULL)
   2282  1.1  mrg 	return r;
   2283  1.1  mrg     }
   2284  1.1  mrg 
   2285  1.1  mrg   for (i = 0; i < cl_options_count; i++)
   2286  1.1  mrg     if (option_affects_pch_p (i, &state))
   2287  1.1  mrg       {
   2288  1.1  mrg 	if (memcmp (data, state.data, state.size) != 0)
   2289  1.1  mrg 	  return pch_option_mismatch (cl_options[i].opt_text);
   2290  1.1  mrg 	data += state.size;
   2291  1.1  mrg       }
   2292  1.1  mrg 
   2293  1.1  mrg   return NULL;
   2294  1.1  mrg }
   2295  1.1  mrg 
   2296  1.1  mrg /* Default version of cstore_mode.  */
   2297  1.1  mrg 
   2298  1.1  mrg scalar_int_mode
   2299  1.1  mrg default_cstore_mode (enum insn_code icode)
   2300  1.1  mrg {
   2301  1.1  mrg   return as_a <scalar_int_mode> (insn_data[(int) icode].operand[0].mode);
   2302  1.1  mrg }
   2303  1.1  mrg 
   2304  1.1  mrg /* Default version of member_type_forces_blk.  */
   2305  1.1  mrg 
   2306  1.1  mrg bool
   2307  1.1  mrg default_member_type_forces_blk (const_tree, machine_mode)
   2308  1.1  mrg {
   2309  1.1  mrg   return false;
   2310  1.1  mrg }
   2311  1.1  mrg 
   2312  1.1  mrg /* Default version of canonicalize_comparison.  */
   2313  1.1  mrg 
   2314  1.1  mrg void
   2315  1.1  mrg default_canonicalize_comparison (int *, rtx *, rtx *, bool)
   2316  1.1  mrg {
   2317  1.1  mrg }
   2318  1.1  mrg 
   2319  1.1  mrg /* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV.  */
   2320  1.1  mrg 
   2321  1.1  mrg void
   2322  1.1  mrg default_atomic_assign_expand_fenv (tree *, tree *, tree *)
   2323  1.1  mrg {
   2324  1.1  mrg }
   2325  1.1  mrg 
   2326  1.1  mrg #ifndef PAD_VARARGS_DOWN
   2327  1.1  mrg #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
   2328  1.1  mrg #endif
   2329  1.1  mrg 
   2330  1.1  mrg /* Build an indirect-ref expression over the given TREE, which represents a
   2331  1.1  mrg    piece of a va_arg() expansion.  */
   2332  1.1  mrg tree
   2333  1.1  mrg build_va_arg_indirect_ref (tree addr)
   2334  1.1  mrg {
   2335  1.1  mrg   addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
   2336  1.1  mrg   return addr;
   2337  1.1  mrg }
   2338  1.1  mrg 
   2339  1.1  mrg /* The "standard" implementation of va_arg: read the value from the
   2340  1.1  mrg    current (padded) address and increment by the (padded) size.  */
   2341  1.1  mrg 
   2342  1.1  mrg tree
   2343  1.1  mrg std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
   2344  1.1  mrg 			  gimple_seq *post_p)
   2345  1.1  mrg {
   2346  1.1  mrg   tree addr, t, type_size, rounded_size, valist_tmp;
   2347  1.1  mrg   unsigned HOST_WIDE_INT align, boundary;
   2348  1.1  mrg   bool indirect;
   2349  1.1  mrg 
   2350  1.1  mrg   /* All of the alignment and movement below is for args-grow-up machines.
   2351  1.1  mrg      As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
   2352  1.1  mrg      implement their own specialized gimplify_va_arg_expr routines.  */
   2353  1.1  mrg   if (ARGS_GROW_DOWNWARD)
   2354  1.1  mrg     gcc_unreachable ();
   2355  1.1  mrg 
   2356  1.1  mrg   indirect = pass_va_arg_by_reference (type);
   2357  1.1  mrg   if (indirect)
   2358  1.1  mrg     type = build_pointer_type (type);
   2359  1.1  mrg 
   2360  1.1  mrg   if (targetm.calls.split_complex_arg
   2361  1.1  mrg       && TREE_CODE (type) == COMPLEX_TYPE
   2362  1.1  mrg       && targetm.calls.split_complex_arg (type))
   2363  1.1  mrg     {
   2364  1.1  mrg       tree real_part, imag_part;
   2365  1.1  mrg 
   2366  1.1  mrg       real_part = std_gimplify_va_arg_expr (valist,
   2367  1.1  mrg 					    TREE_TYPE (type), pre_p, NULL);
   2368  1.1  mrg       real_part = get_initialized_tmp_var (real_part, pre_p);
   2369  1.1  mrg 
   2370  1.1  mrg       imag_part = std_gimplify_va_arg_expr (unshare_expr (valist),
   2371  1.1  mrg 					    TREE_TYPE (type), pre_p, NULL);
   2372  1.1  mrg       imag_part = get_initialized_tmp_var (imag_part, pre_p);
   2373  1.1  mrg 
   2374  1.1  mrg       return build2 (COMPLEX_EXPR, type, real_part, imag_part);
   2375  1.1  mrg    }
   2376  1.1  mrg 
   2377  1.1  mrg   align = PARM_BOUNDARY / BITS_PER_UNIT;
   2378  1.1  mrg   boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
   2379  1.1  mrg 
   2380  1.1  mrg   /* When we align parameter on stack for caller, if the parameter
   2381  1.1  mrg      alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
   2382  1.1  mrg      aligned at MAX_SUPPORTED_STACK_ALIGNMENT.  We will match callee
   2383  1.1  mrg      here with caller.  */
   2384  1.1  mrg   if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
   2385  1.1  mrg     boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
   2386  1.1  mrg 
   2387  1.1  mrg   boundary /= BITS_PER_UNIT;
   2388  1.1  mrg 
   2389  1.1  mrg   /* Hoist the valist value into a temporary for the moment.  */
   2390  1.1  mrg   valist_tmp = get_initialized_tmp_var (valist, pre_p);
   2391  1.1  mrg 
   2392  1.1  mrg   /* va_list pointer is aligned to PARM_BOUNDARY.  If argument actually
   2393  1.1  mrg      requires greater alignment, we must perform dynamic alignment.  */
   2394  1.1  mrg   if (boundary > align
   2395  1.1  mrg       && !TYPE_EMPTY_P (type)
   2396  1.1  mrg       && !integer_zerop (TYPE_SIZE (type)))
   2397  1.1  mrg     {
   2398  1.1  mrg       t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
   2399  1.1  mrg 		  fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
   2400  1.1  mrg       gimplify_and_add (t, pre_p);
   2401  1.1  mrg 
   2402  1.1  mrg       t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
   2403  1.1  mrg 		  fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
   2404  1.1  mrg 			       valist_tmp,
   2405  1.1  mrg 			       build_int_cst (TREE_TYPE (valist), -boundary)));
   2406  1.1  mrg       gimplify_and_add (t, pre_p);
   2407  1.1  mrg     }
   2408  1.1  mrg   else
   2409  1.1  mrg     boundary = align;
   2410  1.1  mrg 
   2411  1.1  mrg   /* If the actual alignment is less than the alignment of the type,
   2412  1.1  mrg      adjust the type accordingly so that we don't assume strict alignment
   2413  1.1  mrg      when dereferencing the pointer.  */
   2414  1.1  mrg   boundary *= BITS_PER_UNIT;
   2415  1.1  mrg   if (boundary < TYPE_ALIGN (type))
   2416  1.1  mrg     {
   2417  1.1  mrg       type = build_variant_type_copy (type);
   2418  1.1  mrg       SET_TYPE_ALIGN (type, boundary);
   2419  1.1  mrg     }
   2420  1.1  mrg 
   2421  1.1  mrg   /* Compute the rounded size of the type.  */
   2422  1.1  mrg   type_size = arg_size_in_bytes (type);
   2423  1.1  mrg   rounded_size = round_up (type_size, align);
   2424  1.1  mrg 
   2425  1.1  mrg   /* Reduce rounded_size so it's sharable with the postqueue.  */
   2426  1.1  mrg   gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
   2427  1.1  mrg 
   2428  1.1  mrg   /* Get AP.  */
   2429  1.1  mrg   addr = valist_tmp;
   2430  1.1  mrg   if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
   2431  1.1  mrg     {
   2432  1.1  mrg       /* Small args are padded downward.  */
   2433  1.1  mrg       t = fold_build2_loc (input_location, GT_EXPR, sizetype,
   2434  1.1  mrg 		       rounded_size, size_int (align));
   2435  1.1  mrg       t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
   2436  1.1  mrg 		       size_binop (MINUS_EXPR, rounded_size, type_size));
   2437  1.1  mrg       addr = fold_build_pointer_plus (addr, t);
   2438  1.1  mrg     }
   2439  1.1  mrg 
   2440  1.1  mrg   /* Compute new value for AP.  */
   2441  1.1  mrg   t = fold_build_pointer_plus (valist_tmp, rounded_size);
   2442  1.1  mrg   t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
   2443  1.1  mrg   gimplify_and_add (t, pre_p);
   2444  1.1  mrg 
   2445  1.1  mrg   addr = fold_convert (build_pointer_type (type), addr);
   2446  1.1  mrg 
   2447  1.1  mrg   if (indirect)
   2448  1.1  mrg     addr = build_va_arg_indirect_ref (addr);
   2449  1.1  mrg 
   2450  1.1  mrg   return build_va_arg_indirect_ref (addr);
   2451  1.1  mrg }
   2452  1.1  mrg 
   2453  1.1  mrg /* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
   2454  1.1  mrg    not support nested low-overhead loops.  */
   2455  1.1  mrg 
   2456  1.1  mrg bool
   2457  1.1  mrg can_use_doloop_if_innermost (const widest_int &, const widest_int &,
   2458  1.1  mrg 			     unsigned int loop_depth, bool)
   2459  1.1  mrg {
   2460  1.1  mrg   return loop_depth == 1;
   2461  1.1  mrg }
   2462  1.1  mrg 
   2463  1.1  mrg /* Default implementation of TARGET_OPTAB_SUPPORTED_P.  */
   2464  1.1  mrg 
   2465  1.1  mrg bool
   2466  1.1  mrg default_optab_supported_p (int, machine_mode, machine_mode, optimization_type)
   2467  1.1  mrg {
   2468  1.1  mrg   return true;
   2469  1.1  mrg }
   2470  1.1  mrg 
   2471  1.1  mrg /* Default implementation of TARGET_MAX_NOCE_IFCVT_SEQ_COST.  */
   2472  1.1  mrg 
   2473  1.1  mrg unsigned int
   2474  1.1  mrg default_max_noce_ifcvt_seq_cost (edge e)
   2475  1.1  mrg {
   2476  1.1  mrg   bool predictable_p = predictable_edge_p (e);
   2477  1.1  mrg 
   2478  1.1  mrg   if (predictable_p)
   2479  1.1  mrg     {
   2480  1.1  mrg       if (OPTION_SET_P (param_max_rtl_if_conversion_predictable_cost))
   2481  1.1  mrg 	return param_max_rtl_if_conversion_predictable_cost;
   2482  1.1  mrg     }
   2483  1.1  mrg   else
   2484  1.1  mrg     {
   2485  1.1  mrg       if (OPTION_SET_P (param_max_rtl_if_conversion_unpredictable_cost))
   2486  1.1  mrg 	return param_max_rtl_if_conversion_unpredictable_cost;
   2487  1.1  mrg     }
   2488  1.1  mrg 
   2489  1.1  mrg   return BRANCH_COST (true, predictable_p) * COSTS_N_INSNS (3);
   2490  1.1  mrg }
   2491  1.1  mrg 
   2492  1.1  mrg /* Default implementation of TARGET_MIN_ARITHMETIC_PRECISION.  */
   2493  1.1  mrg 
   2494  1.1  mrg unsigned int
   2495  1.1  mrg default_min_arithmetic_precision (void)
   2496  1.1  mrg {
   2497  1.1  mrg   return WORD_REGISTER_OPERATIONS ? BITS_PER_WORD : BITS_PER_UNIT;
   2498  1.1  mrg }
   2499  1.1  mrg 
   2500  1.1  mrg /* Default implementation of TARGET_C_EXCESS_PRECISION.  */
   2501  1.1  mrg 
   2502  1.1  mrg enum flt_eval_method
   2503  1.1  mrg default_excess_precision (enum excess_precision_type ATTRIBUTE_UNUSED)
   2504  1.1  mrg {
   2505  1.1  mrg   return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT;
   2506  1.1  mrg }
   2507  1.1  mrg 
   2508  1.1  mrg /* Default implementation for
   2509  1.1  mrg   TARGET_STACK_CLASH_PROTECTION_ALLOCA_PROBE_RANGE.  */
   2510  1.1  mrg HOST_WIDE_INT
   2511  1.1  mrg default_stack_clash_protection_alloca_probe_range (void)
   2512  1.1  mrg {
   2513  1.1  mrg   return 0;
   2514  1.1  mrg }
   2515  1.1  mrg 
   2516  1.1  mrg /* The default implementation of TARGET_EARLY_REMAT_MODES.  */
   2517  1.1  mrg 
   2518  1.1  mrg void
   2519  1.1  mrg default_select_early_remat_modes (sbitmap)
   2520  1.1  mrg {
   2521  1.1  mrg }
   2522  1.1  mrg 
   2523  1.1  mrg /* The default implementation of TARGET_PREFERRED_ELSE_VALUE.  */
   2524  1.1  mrg 
   2525  1.1  mrg tree
   2526  1.1  mrg default_preferred_else_value (unsigned, tree type, unsigned, tree *)
   2527  1.1  mrg {
   2528  1.1  mrg   return build_zero_cst (type);
   2529  1.1  mrg }
   2530  1.1  mrg 
   2531  1.1  mrg /* Default implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE.  */
   2532  1.1  mrg bool
   2533  1.1  mrg default_have_speculation_safe_value (bool active ATTRIBUTE_UNUSED)
   2534  1.1  mrg {
   2535  1.1  mrg #ifdef HAVE_speculation_barrier
   2536  1.1  mrg   return active ? HAVE_speculation_barrier : true;
   2537  1.1  mrg #else
   2538  1.1  mrg   return false;
   2539  1.1  mrg #endif
   2540  1.1  mrg }
   2541  1.1  mrg /* Alternative implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE
   2542  1.1  mrg    that can be used on targets that never have speculative execution.  */
   2543  1.1  mrg bool
   2544  1.1  mrg speculation_safe_value_not_needed (bool active)
   2545  1.1  mrg {
   2546  1.1  mrg   return !active;
   2547  1.1  mrg }
   2548  1.1  mrg 
   2549  1.1  mrg /* Default implementation of the speculation-safe-load builtin.  This
   2550  1.1  mrg    implementation simply copies val to result and generates a
   2551  1.1  mrg    speculation_barrier insn, if such a pattern is defined.  */
   2552  1.1  mrg rtx
   2553  1.1  mrg default_speculation_safe_value (machine_mode mode ATTRIBUTE_UNUSED,
   2554  1.1  mrg 				rtx result, rtx val,
   2555  1.1  mrg 				rtx failval ATTRIBUTE_UNUSED)
   2556  1.1  mrg {
   2557  1.1  mrg   emit_move_insn (result, val);
   2558  1.1  mrg 
   2559  1.1  mrg #ifdef HAVE_speculation_barrier
   2560  1.1  mrg   /* Assume the target knows what it is doing: if it defines a
   2561  1.1  mrg      speculation barrier, but it is not enabled, then assume that one
   2562  1.1  mrg      isn't needed.  */
   2563  1.1  mrg   if (HAVE_speculation_barrier)
   2564  1.1  mrg     emit_insn (gen_speculation_barrier ());
   2565  1.1  mrg #endif
   2566  1.1  mrg 
   2567  1.1  mrg   return result;
   2568  1.1  mrg }
   2569  1.1  mrg 
   2570  1.1  mrg /* How many bits to shift in order to access the tag bits.
   2571  1.1  mrg    The default is to store the tag in the top 8 bits of a 64 bit pointer, hence
   2572  1.1  mrg    shifting 56 bits will leave just the tag.  */
   2573  1.1  mrg #define HWASAN_SHIFT (GET_MODE_PRECISION (Pmode) - 8)
   2574  1.1  mrg #define HWASAN_SHIFT_RTX GEN_INT (HWASAN_SHIFT)
   2575  1.1  mrg 
   2576  1.1  mrg bool
   2577  1.1  mrg default_memtag_can_tag_addresses ()
   2578  1.1  mrg {
   2579  1.1  mrg   return false;
   2580  1.1  mrg }
   2581  1.1  mrg 
   2582  1.1  mrg uint8_t
   2583  1.1  mrg default_memtag_tag_size ()
   2584  1.1  mrg {
   2585  1.1  mrg   return 8;
   2586  1.1  mrg }
   2587  1.1  mrg 
   2588  1.1  mrg uint8_t
   2589  1.1  mrg default_memtag_granule_size ()
   2590  1.1  mrg {
   2591  1.1  mrg   return 16;
   2592  1.1  mrg }
   2593  1.1  mrg 
   2594  1.1  mrg /* The default implementation of TARGET_MEMTAG_INSERT_RANDOM_TAG.  */
   2595  1.1  mrg rtx
   2596  1.1  mrg default_memtag_insert_random_tag (rtx untagged, rtx target)
   2597  1.1  mrg {
   2598  1.1  mrg   gcc_assert (param_hwasan_instrument_stack);
   2599  1.1  mrg   if (param_hwasan_random_frame_tag)
   2600  1.1  mrg     {
   2601  1.1  mrg       rtx fn = init_one_libfunc ("__hwasan_generate_tag");
   2602  1.1  mrg       rtx new_tag = emit_library_call_value (fn, NULL_RTX, LCT_NORMAL, QImode);
   2603  1.1  mrg       return targetm.memtag.set_tag (untagged, new_tag, target);
   2604  1.1  mrg     }
   2605  1.1  mrg   else
   2606  1.1  mrg     {
   2607  1.1  mrg       /* NOTE: The kernel API does not have __hwasan_generate_tag exposed.
   2608  1.1  mrg 	 In the future we may add the option emit random tags with inline
   2609  1.1  mrg 	 instrumentation instead of function calls.  This would be the same
   2610  1.1  mrg 	 between the kernel and userland.  */
   2611  1.1  mrg       return untagged;
   2612  1.1  mrg     }
   2613  1.1  mrg }
   2614  1.1  mrg 
   2615  1.1  mrg /* The default implementation of TARGET_MEMTAG_ADD_TAG.  */
   2616  1.1  mrg rtx
   2617  1.1  mrg default_memtag_add_tag (rtx base, poly_int64 offset, uint8_t tag_offset)
   2618  1.1  mrg {
   2619  1.1  mrg   /* Need to look into what the most efficient code sequence is.
   2620  1.1  mrg      This is a code sequence that would be emitted *many* times, so we
   2621  1.1  mrg      want it as small as possible.
   2622  1.1  mrg 
   2623  1.1  mrg      There are two places where tag overflow is a question:
   2624  1.1  mrg        - Tagging the shadow stack.
   2625  1.1  mrg 	  (both tagging and untagging).
   2626  1.1  mrg        - Tagging addressable pointers.
   2627  1.1  mrg 
   2628  1.1  mrg      We need to ensure both behaviors are the same (i.e. that the tag that
   2629  1.1  mrg      ends up in a pointer after "overflowing" the tag bits with a tag addition
   2630  1.1  mrg      is the same that ends up in the shadow space).
   2631  1.1  mrg 
   2632  1.1  mrg      The aim is that the behavior of tag addition should follow modulo
   2633  1.1  mrg      wrapping in both instances.
   2634  1.1  mrg 
   2635  1.1  mrg      The libhwasan code doesn't have any path that increments a pointer's tag,
   2636  1.1  mrg      which means it has no opinion on what happens when a tag increment
   2637  1.1  mrg      overflows (and hence we can choose our own behavior).  */
   2638  1.1  mrg 
   2639  1.1  mrg   offset += ((uint64_t)tag_offset << HWASAN_SHIFT);
   2640  1.1  mrg   return plus_constant (Pmode, base, offset);
   2641  1.1  mrg }
   2642  1.1  mrg 
   2643  1.1  mrg /* The default implementation of TARGET_MEMTAG_SET_TAG.  */
   2644  1.1  mrg rtx
   2645  1.1  mrg default_memtag_set_tag (rtx untagged, rtx tag, rtx target)
   2646  1.1  mrg {
   2647  1.1  mrg   gcc_assert (GET_MODE (untagged) == Pmode && GET_MODE (tag) == QImode);
   2648  1.1  mrg   tag = expand_simple_binop (Pmode, ASHIFT, tag, HWASAN_SHIFT_RTX, NULL_RTX,
   2649  1.1  mrg 			     /* unsignedp = */1, OPTAB_WIDEN);
   2650  1.1  mrg   rtx ret = expand_simple_binop (Pmode, IOR, untagged, tag, target,
   2651  1.1  mrg 				 /* unsignedp = */1, OPTAB_DIRECT);
   2652  1.1  mrg   gcc_assert (ret);
   2653  1.1  mrg   return ret;
   2654  1.1  mrg }
   2655  1.1  mrg 
   2656  1.1  mrg /* The default implementation of TARGET_MEMTAG_EXTRACT_TAG.  */
   2657  1.1  mrg rtx
   2658  1.1  mrg default_memtag_extract_tag (rtx tagged_pointer, rtx target)
   2659  1.1  mrg {
   2660  1.1  mrg   rtx tag = expand_simple_binop (Pmode, LSHIFTRT, tagged_pointer,
   2661  1.1  mrg 				 HWASAN_SHIFT_RTX, target,
   2662  1.1  mrg 				 /* unsignedp = */0,
   2663  1.1  mrg 				 OPTAB_DIRECT);
   2664  1.1  mrg   rtx ret = gen_lowpart (QImode, tag);
   2665  1.1  mrg   gcc_assert (ret);
   2666  1.1  mrg   return ret;
   2667  1.1  mrg }
   2668  1.1  mrg 
   2669  1.1  mrg /* The default implementation of TARGET_MEMTAG_UNTAGGED_POINTER.  */
   2670  1.1  mrg rtx
   2671  1.1  mrg default_memtag_untagged_pointer (rtx tagged_pointer, rtx target)
   2672  1.1  mrg {
   2673  1.1  mrg   rtx tag_mask = gen_int_mode ((HOST_WIDE_INT_1U << HWASAN_SHIFT) - 1, Pmode);
   2674  1.1  mrg   rtx untagged_base = expand_simple_binop (Pmode, AND, tagged_pointer,
   2675  1.1  mrg 					   tag_mask, target, true,
   2676  1.1  mrg 					   OPTAB_DIRECT);
   2677  1.1  mrg   gcc_assert (untagged_base);
   2678  1.1  mrg   return untagged_base;
   2679  1.1  mrg }
   2680  1.1  mrg 
   2681  1.1  mrg /* The default implementation of TARGET_GCOV_TYPE_SIZE.  */
   2682  1.1  mrg HOST_WIDE_INT
   2683  1.1  mrg default_gcov_type_size (void)
   2684  1.1  mrg {
   2685  1.1  mrg   return TYPE_PRECISION (long_long_integer_type_node) > 32 ? 64 : 32;
   2686  1.1  mrg }
   2687  1.1  mrg 
   2688  1.1  mrg #include "gt-targhooks.h"
   2689