Home | History | Annotate | Line # | Download | only in loongarch
      1  1.1  mrg /* Subroutines used for LoongArch code generation.
      2  1.1  mrg    Copyright (C) 2021-2022 Free Software Foundation, Inc.
      3  1.1  mrg    Contributed by Loongson Ltd.
      4  1.1  mrg    Based on MIPS and RISC-V target for GNU compiler.
      5  1.1  mrg 
      6  1.1  mrg This file is part of GCC.
      7  1.1  mrg 
      8  1.1  mrg GCC is free software; you can redistribute it and/or modify
      9  1.1  mrg it under the terms of the GNU General Public License as published by
     10  1.1  mrg the Free Software Foundation; either version 3, or (at your option)
     11  1.1  mrg any later version.
     12  1.1  mrg 
     13  1.1  mrg GCC is distributed in the hope that it will be useful,
     14  1.1  mrg but WITHOUT ANY WARRANTY; without even the implied warranty of
     15  1.1  mrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     16  1.1  mrg GNU General Public License for more details.
     17  1.1  mrg 
     18  1.1  mrg You should have received a copy of the GNU General Public License
     19  1.1  mrg along with GCC; see the file COPYING3.  If not see
     20  1.1  mrg <http://www.gnu.org/licenses/>.  */
     21  1.1  mrg 
     22  1.1  mrg #define IN_TARGET_CODE 1
     23  1.1  mrg 
     24  1.1  mrg #include "config.h"
     25  1.1  mrg #include "system.h"
     26  1.1  mrg #include "coretypes.h"
     27  1.1  mrg #include "backend.h"
     28  1.1  mrg #include "target.h"
     29  1.1  mrg #include "rtl.h"
     30  1.1  mrg #include "tree.h"
     31  1.1  mrg #include "memmodel.h"
     32  1.1  mrg #include "gimple.h"
     33  1.1  mrg #include "cfghooks.h"
     34  1.1  mrg #include "df.h"
     35  1.1  mrg #include "tm_p.h"
     36  1.1  mrg #include "stringpool.h"
     37  1.1  mrg #include "attribs.h"
     38  1.1  mrg #include "optabs.h"
     39  1.1  mrg #include "regs.h"
     40  1.1  mrg #include "emit-rtl.h"
     41  1.1  mrg #include "recog.h"
     42  1.1  mrg #include "cgraph.h"
     43  1.1  mrg #include "diagnostic.h"
     44  1.1  mrg #include "insn-attr.h"
     45  1.1  mrg #include "output.h"
     46  1.1  mrg #include "alias.h"
     47  1.1  mrg #include "fold-const.h"
     48  1.1  mrg #include "varasm.h"
     49  1.1  mrg #include "stor-layout.h"
     50  1.1  mrg #include "calls.h"
     51  1.1  mrg #include "explow.h"
     52  1.1  mrg #include "expr.h"
     53  1.1  mrg #include "libfuncs.h"
     54  1.1  mrg #include "reload.h"
     55  1.1  mrg #include "common/common-target.h"
     56  1.1  mrg #include "langhooks.h"
     57  1.1  mrg #include "cfgrtl.h"
     58  1.1  mrg #include "cfganal.h"
     59  1.1  mrg #include "sched-int.h"
     60  1.1  mrg #include "gimplify.h"
     61  1.1  mrg #include "target-globals.h"
     62  1.1  mrg #include "tree-pass.h"
     63  1.1  mrg #include "context.h"
     64  1.1  mrg #include "builtins.h"
     65  1.1  mrg #include "rtl-iter.h"
     66  1.1  mrg 
     67  1.1  mrg /* This file should be included last.  */
     68  1.1  mrg #include "target-def.h"
     69  1.1  mrg 
     70  1.1  mrg /* True if X is an UNSPEC wrapper around a SYMBOL_REF or LABEL_REF.  */
     71  1.1  mrg #define UNSPEC_ADDRESS_P(X)					\
     72  1.1  mrg   (GET_CODE (X) == UNSPEC					\
     73  1.1  mrg    && XINT (X, 1) >= UNSPEC_ADDRESS_FIRST			\
     74  1.1  mrg    && XINT (X, 1) < UNSPEC_ADDRESS_FIRST + NUM_SYMBOL_TYPES)
     75  1.1  mrg 
     76  1.1  mrg /* Extract the symbol or label from UNSPEC wrapper X.  */
     77  1.1  mrg #define UNSPEC_ADDRESS(X) XVECEXP (X, 0, 0)
     78  1.1  mrg 
     79  1.1  mrg /* Extract the symbol type from UNSPEC wrapper X.  */
     80  1.1  mrg #define UNSPEC_ADDRESS_TYPE(X) \
     81  1.1  mrg   ((enum loongarch_symbol_type) (XINT (X, 1) - UNSPEC_ADDRESS_FIRST))
     82  1.1  mrg 
     83  1.1  mrg /* True if INSN is a loongarch.md pattern or asm statement.  */
     84  1.1  mrg /* ???	This test exists through the compiler, perhaps it should be
     85  1.1  mrg    moved to rtl.h.  */
     86  1.1  mrg #define USEFUL_INSN_P(INSN)						\
     87  1.1  mrg   (NONDEBUG_INSN_P (INSN)						\
     88  1.1  mrg    && GET_CODE (PATTERN (INSN)) != USE					\
     89  1.1  mrg    && GET_CODE (PATTERN (INSN)) != CLOBBER)
     90  1.1  mrg 
     91  1.1  mrg /* True if bit BIT is set in VALUE.  */
     92  1.1  mrg #define BITSET_P(VALUE, BIT) (((VALUE) & (1 << (BIT))) != 0)
     93  1.1  mrg 
     94  1.1  mrg /* Classifies an address.
     95  1.1  mrg 
     96  1.1  mrg    ADDRESS_REG
     97  1.1  mrg        A natural register + offset address.  The register satisfies
     98  1.1  mrg        loongarch_valid_base_register_p and the offset is a const_arith_operand.
     99  1.1  mrg 
    100  1.1  mrg    ADDRESS_REG_REG
    101  1.1  mrg        A base register indexed by (optionally scaled) register.
    102  1.1  mrg 
    103  1.1  mrg    ADDRESS_CONST_INT
    104  1.1  mrg        A signed 16-bit constant address.
    105  1.1  mrg 
    106  1.1  mrg    ADDRESS_SYMBOLIC:
    107  1.1  mrg        A constant symbolic address.  */
    108  1.1  mrg enum loongarch_address_type
    109  1.1  mrg {
    110  1.1  mrg   ADDRESS_REG,
    111  1.1  mrg   ADDRESS_REG_REG,
    112  1.1  mrg   ADDRESS_CONST_INT,
    113  1.1  mrg   ADDRESS_SYMBOLIC
    114  1.1  mrg };
    115  1.1  mrg 
    116  1.1  mrg 
    117  1.1  mrg /* Information about an address described by loongarch_address_type.
    118  1.1  mrg 
    119  1.1  mrg    ADDRESS_CONST_INT
    120  1.1  mrg        No fields are used.
    121  1.1  mrg 
    122  1.1  mrg    ADDRESS_REG
    123  1.1  mrg        REG is the base register and OFFSET is the constant offset.
    124  1.1  mrg 
    125  1.1  mrg    ADDRESS_REG_REG
    126  1.1  mrg        A base register indexed by (optionally scaled) register.
    127  1.1  mrg 
    128  1.1  mrg    ADDRESS_SYMBOLIC
    129  1.1  mrg        SYMBOL_TYPE is the type of symbol that the address references.  */
    130  1.1  mrg struct loongarch_address_info
    131  1.1  mrg {
    132  1.1  mrg   enum loongarch_address_type type;
    133  1.1  mrg   rtx reg;
    134  1.1  mrg   rtx offset;
    135  1.1  mrg   enum loongarch_symbol_type symbol_type;
    136  1.1  mrg };
    137  1.1  mrg 
    138  1.1  mrg /* Method of loading instant numbers:
    139  1.1  mrg 
    140  1.1  mrg    METHOD_NORMAL:
    141  1.1  mrg      Load 0-31 bit of the immediate number.
    142  1.1  mrg 
    143  1.1  mrg    METHOD_LU32I:
    144  1.1  mrg      Load 32-51 bit of the immediate number.
    145  1.1  mrg 
    146  1.1  mrg    METHOD_LU52I:
    147  1.1  mrg      Load 52-63 bit of the immediate number.
    148  1.1  mrg 
    149  1.1  mrg    METHOD_INSV:
    150  1.1  mrg      immediate like 0xfff00000fffffxxx
    151  1.1  mrg    */
    152  1.1  mrg enum loongarch_load_imm_method
    153  1.1  mrg {
    154  1.1  mrg   METHOD_NORMAL,
    155  1.1  mrg   METHOD_LU32I,
    156  1.1  mrg   METHOD_LU52I,
    157  1.1  mrg   METHOD_INSV
    158  1.1  mrg };
    159  1.1  mrg 
    160  1.1  mrg struct loongarch_integer_op
    161  1.1  mrg {
    162  1.1  mrg   enum rtx_code code;
    163  1.1  mrg   HOST_WIDE_INT value;
    164  1.1  mrg   enum loongarch_load_imm_method method;
    165  1.1  mrg };
    166  1.1  mrg 
    167  1.1  mrg /* The largest number of operations needed to load an integer constant.
    168  1.1  mrg    The worst accepted case for 64-bit constants is LU12I.W,LU32I.D,LU52I.D,ORI
    169  1.1  mrg    or LU12I.W,LU32I.D,LU52I.D,ADDI.D DECL_ASSEMBLER_NAME.  */
    170  1.1  mrg #define LARCH_MAX_INTEGER_OPS 4
    171  1.1  mrg 
    172  1.1  mrg /* Arrays that map GCC register numbers to debugger register numbers.  */
    173  1.1  mrg int loongarch_dwarf_regno[FIRST_PSEUDO_REGISTER];
    174  1.1  mrg 
    175  1.1  mrg /* Index [M][R] is true if register R is allowed to hold a value of mode M.  */
    176  1.1  mrg static bool loongarch_hard_regno_mode_ok_p[MAX_MACHINE_MODE]
    177  1.1  mrg 					  [FIRST_PSEUDO_REGISTER];
    178  1.1  mrg 
    179  1.1  mrg /* Index C is true if character C is a valid PRINT_OPERAND punctation
    180  1.1  mrg    character.  */
    181  1.1  mrg static bool loongarch_print_operand_punct[256];
    182  1.1  mrg 
    183  1.1  mrg /* Cached value of can_issue_more.  This is cached in loongarch_variable_issue
    184  1.1  mrg    hook and returned from loongarch_sched_reorder2.  */
    185  1.1  mrg static int cached_can_issue_more;
    186  1.1  mrg 
    187  1.1  mrg /* Index R is the smallest register class that contains register R.  */
    188  1.1  mrg const enum reg_class loongarch_regno_to_class[FIRST_PSEUDO_REGISTER] = {
    189  1.1  mrg     GR_REGS,	     GR_REGS,	      GR_REGS,	       GR_REGS,
    190  1.1  mrg     JIRL_REGS,       JIRL_REGS,       JIRL_REGS,       JIRL_REGS,
    191  1.1  mrg     JIRL_REGS,       JIRL_REGS,       JIRL_REGS,       JIRL_REGS,
    192  1.1  mrg     SIBCALL_REGS,    JIRL_REGS,       SIBCALL_REGS,    SIBCALL_REGS,
    193  1.1  mrg     SIBCALL_REGS,    SIBCALL_REGS,    SIBCALL_REGS,    SIBCALL_REGS,
    194  1.1  mrg     SIBCALL_REGS,    GR_REGS,	      GR_REGS,	       JIRL_REGS,
    195  1.1  mrg     JIRL_REGS,       JIRL_REGS,       JIRL_REGS,       JIRL_REGS,
    196  1.1  mrg     JIRL_REGS,       JIRL_REGS,       JIRL_REGS,       JIRL_REGS,
    197  1.1  mrg 
    198  1.1  mrg     FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
    199  1.1  mrg     FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
    200  1.1  mrg     FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
    201  1.1  mrg     FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
    202  1.1  mrg     FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
    203  1.1  mrg     FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
    204  1.1  mrg     FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
    205  1.1  mrg     FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
    206  1.1  mrg     FCC_REGS,	FCC_REGS,	FCC_REGS,	FCC_REGS,
    207  1.1  mrg     FCC_REGS,	FCC_REGS,	FCC_REGS,	FCC_REGS,
    208  1.1  mrg     FRAME_REGS,	FRAME_REGS
    209  1.1  mrg };
    210  1.1  mrg 
    211  1.1  mrg /* Which cost information to use.  */
    212  1.1  mrg static const struct loongarch_rtx_cost_data *loongarch_cost;
    213  1.1  mrg 
    214  1.1  mrg /* Information about a single argument.  */
    215  1.1  mrg struct loongarch_arg_info
    216  1.1  mrg {
    217  1.1  mrg   /* True if the argument is at least partially passed on the stack.  */
    218  1.1  mrg   bool stack_p;
    219  1.1  mrg 
    220  1.1  mrg   /* The number of integer registers allocated to this argument.  */
    221  1.1  mrg   unsigned int num_gprs;
    222  1.1  mrg 
    223  1.1  mrg   /* The offset of the first register used, provided num_gprs is nonzero.
    224  1.1  mrg      If passed entirely on the stack, the value is MAX_ARGS_IN_REGISTERS.  */
    225  1.1  mrg   unsigned int gpr_offset;
    226  1.1  mrg 
    227  1.1  mrg   /* The number of floating-point registers allocated to this argument.  */
    228  1.1  mrg   unsigned int num_fprs;
    229  1.1  mrg 
    230  1.1  mrg   /* The offset of the first register used, provided num_fprs is nonzero.  */
    231  1.1  mrg   unsigned int fpr_offset;
    232  1.1  mrg };
    233  1.1  mrg 
    234  1.1  mrg /* Invoke MACRO (COND) for each fcmp.cond.{s/d} condition.  */
    235  1.1  mrg #define LARCH_FP_CONDITIONS(MACRO) \
    236  1.1  mrg   MACRO (f),	\
    237  1.1  mrg   MACRO (un),	\
    238  1.1  mrg   MACRO (eq),	\
    239  1.1  mrg   MACRO (ueq),	\
    240  1.1  mrg   MACRO (olt),	\
    241  1.1  mrg   MACRO (ult),	\
    242  1.1  mrg   MACRO (ole),	\
    243  1.1  mrg   MACRO (ule),	\
    244  1.1  mrg   MACRO (sf),	\
    245  1.1  mrg   MACRO (ngle),	\
    246  1.1  mrg   MACRO (seq),	\
    247  1.1  mrg   MACRO (ngl),	\
    248  1.1  mrg   MACRO (lt),	\
    249  1.1  mrg   MACRO (nge),	\
    250  1.1  mrg   MACRO (le),	\
    251  1.1  mrg   MACRO (ngt)
    252  1.1  mrg 
    253  1.1  mrg /* Enumerates the codes above as LARCH_FP_COND_<X>.  */
    254  1.1  mrg #define DECLARE_LARCH_COND(X) LARCH_FP_COND_##X
    255  1.1  mrg enum loongarch_fp_condition
    256  1.1  mrg {
    257  1.1  mrg   LARCH_FP_CONDITIONS (DECLARE_LARCH_COND)
    258  1.1  mrg };
    259  1.1  mrg #undef DECLARE_LARCH_COND
    260  1.1  mrg 
    261  1.1  mrg /* Index X provides the string representation of LARCH_FP_COND_<X>.  */
    262  1.1  mrg #define STRINGIFY(X) #X
    263  1.1  mrg const char *const
    264  1.1  mrg loongarch_fp_conditions[16]= {LARCH_FP_CONDITIONS (STRINGIFY)};
    265  1.1  mrg #undef STRINGIFY
    266  1.1  mrg 
    267  1.1  mrg /* Implement TARGET_FUNCTION_ARG_BOUNDARY.  Every parameter gets at
    268  1.1  mrg    least PARM_BOUNDARY bits of alignment, but will be given anything up
    269  1.1  mrg    to PREFERRED_STACK_BOUNDARY bits if the type requires it.  */
    270  1.1  mrg 
    271  1.1  mrg static unsigned int
    272  1.1  mrg loongarch_function_arg_boundary (machine_mode mode, const_tree type)
    273  1.1  mrg {
    274  1.1  mrg   unsigned int alignment;
    275  1.1  mrg 
    276  1.1  mrg   /* Use natural alignment if the type is not aggregate data.  */
    277  1.1  mrg   if (type && !AGGREGATE_TYPE_P (type))
    278  1.1  mrg     alignment = TYPE_ALIGN (TYPE_MAIN_VARIANT (type));
    279  1.1  mrg   else
    280  1.1  mrg     alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
    281  1.1  mrg 
    282  1.1  mrg   return MIN (PREFERRED_STACK_BOUNDARY, MAX (PARM_BOUNDARY, alignment));
    283  1.1  mrg }
    284  1.1  mrg 
    285  1.1  mrg /* If MODE represents an argument that can be passed or returned in
    286  1.1  mrg    floating-point registers, return the number of registers, else 0.  */
    287  1.1  mrg 
    288  1.1  mrg static unsigned
    289  1.1  mrg loongarch_pass_mode_in_fpr_p (machine_mode mode)
    290  1.1  mrg {
    291  1.1  mrg   if (GET_MODE_UNIT_SIZE (mode) <= UNITS_PER_FP_ARG)
    292  1.1  mrg     {
    293  1.1  mrg       if (GET_MODE_CLASS (mode) == MODE_FLOAT)
    294  1.1  mrg 	return 1;
    295  1.1  mrg 
    296  1.1  mrg       if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
    297  1.1  mrg 	return 2;
    298  1.1  mrg     }
    299  1.1  mrg 
    300  1.1  mrg   return 0;
    301  1.1  mrg }
    302  1.1  mrg 
    303  1.1  mrg typedef struct
    304  1.1  mrg {
    305  1.1  mrg   const_tree type;
    306  1.1  mrg   HOST_WIDE_INT offset;
    307  1.1  mrg } loongarch_aggregate_field;
    308  1.1  mrg 
    309  1.1  mrg /* Identify subfields of aggregates that are candidates for passing in
    310  1.1  mrg    floating-point registers.  */
    311  1.1  mrg 
    312  1.1  mrg static int
    313  1.1  mrg loongarch_flatten_aggregate_field (const_tree type,
    314  1.1  mrg 				   loongarch_aggregate_field fields[2], int n,
    315  1.1  mrg 				   HOST_WIDE_INT offset)
    316  1.1  mrg {
    317  1.1  mrg   switch (TREE_CODE (type))
    318  1.1  mrg     {
    319  1.1  mrg     case RECORD_TYPE:
    320  1.1  mrg       /* Can't handle incomplete types nor sizes that are not fixed.  */
    321  1.1  mrg       if (!COMPLETE_TYPE_P (type)
    322  1.1  mrg 	  || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
    323  1.1  mrg 	  || !tree_fits_uhwi_p (TYPE_SIZE (type)))
    324  1.1  mrg 	return -1;
    325  1.1  mrg 
    326  1.1  mrg       for (tree f = TYPE_FIELDS (type); f; f = DECL_CHAIN (f))
    327  1.1  mrg 	if (TREE_CODE (f) == FIELD_DECL)
    328  1.1  mrg 	  {
    329  1.1  mrg 	    if (!TYPE_P (TREE_TYPE (f)))
    330  1.1  mrg 	      return -1;
    331  1.1  mrg 
    332  1.1  mrg 	    if (DECL_SIZE (f) && integer_zerop (DECL_SIZE (f)))
    333  1.1  mrg 	      continue;
    334  1.1  mrg 
    335  1.1  mrg 	    HOST_WIDE_INT pos = offset + int_byte_position (f);
    336  1.1  mrg 	    n = loongarch_flatten_aggregate_field (TREE_TYPE (f), fields, n,
    337  1.1  mrg 						   pos);
    338  1.1  mrg 	    if (n < 0)
    339  1.1  mrg 	      return -1;
    340  1.1  mrg 	  }
    341  1.1  mrg       return n;
    342  1.1  mrg 
    343  1.1  mrg     case ARRAY_TYPE:
    344  1.1  mrg       {
    345  1.1  mrg 	HOST_WIDE_INT n_elts;
    346  1.1  mrg 	loongarch_aggregate_field subfields[2];
    347  1.1  mrg 	tree index = TYPE_DOMAIN (type);
    348  1.1  mrg 	tree elt_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
    349  1.1  mrg 	int n_subfields = loongarch_flatten_aggregate_field (TREE_TYPE (type),
    350  1.1  mrg 							     subfields, 0,
    351  1.1  mrg 							     offset);
    352  1.1  mrg 
    353  1.1  mrg 	/* Can't handle incomplete types nor sizes that are not fixed.  */
    354  1.1  mrg 	if (n_subfields <= 0
    355  1.1  mrg 	    || !COMPLETE_TYPE_P (type)
    356  1.1  mrg 	    || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
    357  1.1  mrg 	    || !index
    358  1.1  mrg 	    || !TYPE_MAX_VALUE (index)
    359  1.1  mrg 	    || !tree_fits_uhwi_p (TYPE_MAX_VALUE (index))
    360  1.1  mrg 	    || !TYPE_MIN_VALUE (index)
    361  1.1  mrg 	    || !tree_fits_uhwi_p (TYPE_MIN_VALUE (index))
    362  1.1  mrg 	    || !tree_fits_uhwi_p (elt_size))
    363  1.1  mrg 	  return -1;
    364  1.1  mrg 
    365  1.1  mrg 	n_elts = 1 + tree_to_uhwi (TYPE_MAX_VALUE (index))
    366  1.1  mrg 		 - tree_to_uhwi (TYPE_MIN_VALUE (index));
    367  1.1  mrg 	gcc_assert (n_elts >= 0);
    368  1.1  mrg 
    369  1.1  mrg 	for (HOST_WIDE_INT i = 0; i < n_elts; i++)
    370  1.1  mrg 	  for (int j = 0; j < n_subfields; j++)
    371  1.1  mrg 	    {
    372  1.1  mrg 	      if (n >= 2)
    373  1.1  mrg 		return -1;
    374  1.1  mrg 
    375  1.1  mrg 	      fields[n] = subfields[j];
    376  1.1  mrg 	      fields[n++].offset += i * tree_to_uhwi (elt_size);
    377  1.1  mrg 	    }
    378  1.1  mrg 
    379  1.1  mrg 	return n;
    380  1.1  mrg       }
    381  1.1  mrg 
    382  1.1  mrg     case COMPLEX_TYPE:
    383  1.1  mrg       {
    384  1.1  mrg 	/* Complex type need consume 2 field, so n must be 0.  */
    385  1.1  mrg 	if (n != 0)
    386  1.1  mrg 	  return -1;
    387  1.1  mrg 
    388  1.1  mrg 	HOST_WIDE_INT elt_size = GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)));
    389  1.1  mrg 
    390  1.1  mrg 	if (elt_size <= UNITS_PER_FP_ARG)
    391  1.1  mrg 	  {
    392  1.1  mrg 	    fields[0].type = TREE_TYPE (type);
    393  1.1  mrg 	    fields[0].offset = offset;
    394  1.1  mrg 	    fields[1].type = TREE_TYPE (type);
    395  1.1  mrg 	    fields[1].offset = offset + elt_size;
    396  1.1  mrg 
    397  1.1  mrg 	    return 2;
    398  1.1  mrg 	  }
    399  1.1  mrg 
    400  1.1  mrg 	return -1;
    401  1.1  mrg       }
    402  1.1  mrg 
    403  1.1  mrg     default:
    404  1.1  mrg       if (n < 2
    405  1.1  mrg 	  && ((SCALAR_FLOAT_TYPE_P (type)
    406  1.1  mrg 	       && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_FP_ARG)
    407  1.1  mrg 	      || (INTEGRAL_TYPE_P (type)
    408  1.1  mrg 		  && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD)))
    409  1.1  mrg 	{
    410  1.1  mrg 	  fields[n].type = type;
    411  1.1  mrg 	  fields[n].offset = offset;
    412  1.1  mrg 	  return n + 1;
    413  1.1  mrg 	}
    414  1.1  mrg       else
    415  1.1  mrg 	return -1;
    416  1.1  mrg     }
    417  1.1  mrg }
    418  1.1  mrg 
    419  1.1  mrg /* Identify candidate aggregates for passing in floating-point registers.
    420  1.1  mrg    Candidates have at most two fields after flattening.  */
    421  1.1  mrg 
    422  1.1  mrg static int
    423  1.1  mrg loongarch_flatten_aggregate_argument (const_tree type,
    424  1.1  mrg 				      loongarch_aggregate_field fields[2])
    425  1.1  mrg {
    426  1.1  mrg   if (!type || TREE_CODE (type) != RECORD_TYPE)
    427  1.1  mrg     return -1;
    428  1.1  mrg 
    429  1.1  mrg   return loongarch_flatten_aggregate_field (type, fields, 0, 0);
    430  1.1  mrg }
    431  1.1  mrg 
    432  1.1  mrg /* See whether TYPE is a record whose fields should be returned in one or
    433  1.1  mrg    two floating-point registers.  If so, populate FIELDS accordingly.  */
    434  1.1  mrg 
    435  1.1  mrg static unsigned
    436  1.1  mrg loongarch_pass_aggregate_num_fpr (const_tree type,
    437  1.1  mrg 					loongarch_aggregate_field fields[2])
    438  1.1  mrg {
    439  1.1  mrg   int n = loongarch_flatten_aggregate_argument (type, fields);
    440  1.1  mrg 
    441  1.1  mrg   for (int i = 0; i < n; i++)
    442  1.1  mrg     if (!SCALAR_FLOAT_TYPE_P (fields[i].type))
    443  1.1  mrg       return 0;
    444  1.1  mrg 
    445  1.1  mrg   return n > 0 ? n : 0;
    446  1.1  mrg }
    447  1.1  mrg 
    448  1.1  mrg /* See whether TYPE is a record whose fields should be returned in one
    449  1.1  mrg    floating-point register and one integer register.  If so, populate
    450  1.1  mrg    FIELDS accordingly.  */
    451  1.1  mrg 
    452  1.1  mrg static bool
    453  1.1  mrg loongarch_pass_aggregate_in_fpr_and_gpr_p (const_tree type,
    454  1.1  mrg 					   loongarch_aggregate_field fields[2])
    455  1.1  mrg {
    456  1.1  mrg   unsigned num_int = 0, num_float = 0;
    457  1.1  mrg   int n = loongarch_flatten_aggregate_argument (type, fields);
    458  1.1  mrg 
    459  1.1  mrg   for (int i = 0; i < n; i++)
    460  1.1  mrg     {
    461  1.1  mrg       num_float += SCALAR_FLOAT_TYPE_P (fields[i].type);
    462  1.1  mrg       num_int += INTEGRAL_TYPE_P (fields[i].type);
    463  1.1  mrg     }
    464  1.1  mrg 
    465  1.1  mrg   return num_int == 1 && num_float == 1;
    466  1.1  mrg }
    467  1.1  mrg 
    468  1.1  mrg /* Return the representation of an argument passed or returned in an FPR
    469  1.1  mrg    when the value has mode VALUE_MODE and the type has TYPE_MODE.  The
    470  1.1  mrg    two modes may be different for structures like:
    471  1.1  mrg 
    472  1.1  mrg    struct __attribute__((packed)) foo { float f; }
    473  1.1  mrg 
    474  1.1  mrg    where the SFmode value "f" is passed in REGNO but the struct itself
    475  1.1  mrg    has mode BLKmode.  */
    476  1.1  mrg 
    477  1.1  mrg static rtx
    478  1.1  mrg loongarch_pass_fpr_single (machine_mode type_mode, unsigned regno,
    479  1.1  mrg 			   machine_mode value_mode,
    480  1.1  mrg 			   HOST_WIDE_INT offset)
    481  1.1  mrg {
    482  1.1  mrg   rtx x = gen_rtx_REG (value_mode, regno);
    483  1.1  mrg 
    484  1.1  mrg   if (type_mode != value_mode)
    485  1.1  mrg     {
    486  1.1  mrg       x = gen_rtx_EXPR_LIST (VOIDmode, x, GEN_INT (offset));
    487  1.1  mrg       x = gen_rtx_PARALLEL (type_mode, gen_rtvec (1, x));
    488  1.1  mrg     }
    489  1.1  mrg   return x;
    490  1.1  mrg }
    491  1.1  mrg 
    492  1.1  mrg /* Pass or return a composite value in the FPR pair REGNO and REGNO + 1.
    493  1.1  mrg    MODE is the mode of the composite.  MODE1 and OFFSET1 are the mode and
    494  1.1  mrg    byte offset for the first value, likewise MODE2 and OFFSET2 for the
    495  1.1  mrg    second value.  */
    496  1.1  mrg 
    497  1.1  mrg static rtx
    498  1.1  mrg loongarch_pass_fpr_pair (machine_mode mode, unsigned regno1,
    499  1.1  mrg 			 machine_mode mode1, HOST_WIDE_INT offset1,
    500  1.1  mrg 			 unsigned regno2, machine_mode mode2,
    501  1.1  mrg 			 HOST_WIDE_INT offset2)
    502  1.1  mrg {
    503  1.1  mrg   return gen_rtx_PARALLEL (
    504  1.1  mrg     mode, gen_rtvec (2,
    505  1.1  mrg 		     gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_REG (mode1, regno1),
    506  1.1  mrg 					GEN_INT (offset1)),
    507  1.1  mrg 		     gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_REG (mode2, regno2),
    508  1.1  mrg 					GEN_INT (offset2))));
    509  1.1  mrg }
    510  1.1  mrg 
    511  1.1  mrg /* Fill INFO with information about a single argument, and return an
    512  1.1  mrg    RTL pattern to pass or return the argument.  CUM is the cumulative
    513  1.1  mrg    state for earlier arguments.  MODE is the mode of this argument and
    514  1.1  mrg    TYPE is its type (if known).  NAMED is true if this is a named
    515  1.1  mrg    (fixed) argument rather than a variable one.  RETURN_P is true if
    516  1.1  mrg    returning the argument, or false if passing the argument.  */
    517  1.1  mrg 
    518  1.1  mrg static rtx
    519  1.1  mrg loongarch_get_arg_info (struct loongarch_arg_info *info,
    520  1.1  mrg 			const CUMULATIVE_ARGS *cum, machine_mode mode,
    521  1.1  mrg 			const_tree type, bool named, bool return_p)
    522  1.1  mrg {
    523  1.1  mrg   unsigned num_bytes, num_words;
    524  1.1  mrg   unsigned fpr_base = return_p ? FP_RETURN : FP_ARG_FIRST;
    525  1.1  mrg   unsigned gpr_base = return_p ? GP_RETURN : GP_ARG_FIRST;
    526  1.1  mrg   unsigned alignment = loongarch_function_arg_boundary (mode, type);
    527  1.1  mrg 
    528  1.1  mrg   memset (info, 0, sizeof (*info));
    529  1.1  mrg   info->gpr_offset = cum->num_gprs;
    530  1.1  mrg   info->fpr_offset = cum->num_fprs;
    531  1.1  mrg 
    532  1.1  mrg   if (named)
    533  1.1  mrg     {
    534  1.1  mrg       loongarch_aggregate_field fields[2];
    535  1.1  mrg       unsigned fregno = fpr_base + info->fpr_offset;
    536  1.1  mrg       unsigned gregno = gpr_base + info->gpr_offset;
    537  1.1  mrg 
    538  1.1  mrg       /* Pass one- or two-element floating-point aggregates in FPRs.  */
    539  1.1  mrg       if ((info->num_fprs
    540  1.1  mrg 	   = loongarch_pass_aggregate_num_fpr (type, fields))
    541  1.1  mrg 	  && info->fpr_offset + info->num_fprs <= MAX_ARGS_IN_REGISTERS)
    542  1.1  mrg 	switch (info->num_fprs)
    543  1.1  mrg 	  {
    544  1.1  mrg 	  case 1:
    545  1.1  mrg 	    return loongarch_pass_fpr_single (mode, fregno,
    546  1.1  mrg 					      TYPE_MODE (fields[0].type),
    547  1.1  mrg 					      fields[0].offset);
    548  1.1  mrg 
    549  1.1  mrg 	  case 2:
    550  1.1  mrg 	    return loongarch_pass_fpr_pair (mode, fregno,
    551  1.1  mrg 					    TYPE_MODE (fields[0].type),
    552  1.1  mrg 					    fields[0].offset,
    553  1.1  mrg 					    fregno + 1,
    554  1.1  mrg 					    TYPE_MODE (fields[1].type),
    555  1.1  mrg 					    fields[1].offset);
    556  1.1  mrg 
    557  1.1  mrg 	  default:
    558  1.1  mrg 	    gcc_unreachable ();
    559  1.1  mrg 	  }
    560  1.1  mrg 
    561  1.1  mrg       /* Pass real and complex floating-point numbers in FPRs.  */
    562  1.1  mrg       if ((info->num_fprs = loongarch_pass_mode_in_fpr_p (mode))
    563  1.1  mrg 	  && info->fpr_offset + info->num_fprs <= MAX_ARGS_IN_REGISTERS)
    564  1.1  mrg 	switch (GET_MODE_CLASS (mode))
    565  1.1  mrg 	  {
    566  1.1  mrg 	  case MODE_FLOAT:
    567  1.1  mrg 	    return gen_rtx_REG (mode, fregno);
    568  1.1  mrg 
    569  1.1  mrg 	  case MODE_COMPLEX_FLOAT:
    570  1.1  mrg 	    return loongarch_pass_fpr_pair (mode, fregno,
    571  1.1  mrg 					    GET_MODE_INNER (mode), 0,
    572  1.1  mrg 					    fregno + 1, GET_MODE_INNER (mode),
    573  1.1  mrg 					    GET_MODE_UNIT_SIZE (mode));
    574  1.1  mrg 
    575  1.1  mrg 	  default:
    576  1.1  mrg 	    gcc_unreachable ();
    577  1.1  mrg 	  }
    578  1.1  mrg 
    579  1.1  mrg       /* Pass structs with one float and one integer in an FPR and a GPR.  */
    580  1.1  mrg       if (loongarch_pass_aggregate_in_fpr_and_gpr_p (type, fields)
    581  1.1  mrg 	  && info->gpr_offset < MAX_ARGS_IN_REGISTERS
    582  1.1  mrg 	  && info->fpr_offset < MAX_ARGS_IN_REGISTERS)
    583  1.1  mrg 	{
    584  1.1  mrg 	  info->num_gprs = 1;
    585  1.1  mrg 	  info->num_fprs = 1;
    586  1.1  mrg 
    587  1.1  mrg 	  if (!SCALAR_FLOAT_TYPE_P (fields[0].type))
    588  1.1  mrg 	    std::swap (fregno, gregno);
    589  1.1  mrg 
    590  1.1  mrg 	  return loongarch_pass_fpr_pair (mode, fregno,
    591  1.1  mrg 					  TYPE_MODE (fields[0].type),
    592  1.1  mrg 					  fields[0].offset, gregno,
    593  1.1  mrg 					  TYPE_MODE (fields[1].type),
    594  1.1  mrg 					  fields[1].offset);
    595  1.1  mrg 	}
    596  1.1  mrg     }
    597  1.1  mrg 
    598  1.1  mrg   /* Work out the size of the argument.  */
    599  1.1  mrg   num_bytes = type ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
    600  1.1  mrg   num_words = (num_bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
    601  1.1  mrg 
    602  1.1  mrg   /* Doubleword-aligned varargs start on an even register boundary.  */
    603  1.1  mrg   if (!named && num_bytes != 0 && alignment > BITS_PER_WORD)
    604  1.1  mrg     info->gpr_offset += info->gpr_offset & 1;
    605  1.1  mrg 
    606  1.1  mrg   /* Partition the argument between registers and stack.  */
    607  1.1  mrg   info->num_fprs = 0;
    608  1.1  mrg   info->num_gprs = MIN (num_words, MAX_ARGS_IN_REGISTERS - info->gpr_offset);
    609  1.1  mrg   info->stack_p = (num_words - info->num_gprs) != 0;
    610  1.1  mrg 
    611  1.1  mrg   if (info->num_gprs || return_p)
    612  1.1  mrg     return gen_rtx_REG (mode, gpr_base + info->gpr_offset);
    613  1.1  mrg 
    614  1.1  mrg   return NULL_RTX;
    615  1.1  mrg }
    616  1.1  mrg 
    617  1.1  mrg /* Implement TARGET_FUNCTION_ARG.  */
    618  1.1  mrg 
    619  1.1  mrg static rtx
    620  1.1  mrg loongarch_function_arg (cumulative_args_t cum_v, const function_arg_info &arg)
    621  1.1  mrg {
    622  1.1  mrg   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
    623  1.1  mrg   struct loongarch_arg_info info;
    624  1.1  mrg 
    625  1.1  mrg   if (arg.end_marker_p ())
    626  1.1  mrg     return NULL;
    627  1.1  mrg 
    628  1.1  mrg   return loongarch_get_arg_info (&info, cum, arg.mode, arg.type, arg.named,
    629  1.1  mrg 				 false);
    630  1.1  mrg }
    631  1.1  mrg 
    632  1.1  mrg /* Implement TARGET_FUNCTION_ARG_ADVANCE.  */
    633  1.1  mrg 
    634  1.1  mrg static void
    635  1.1  mrg loongarch_function_arg_advance (cumulative_args_t cum_v,
    636  1.1  mrg 				const function_arg_info &arg)
    637  1.1  mrg {
    638  1.1  mrg   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
    639  1.1  mrg   struct loongarch_arg_info info;
    640  1.1  mrg 
    641  1.1  mrg   loongarch_get_arg_info (&info, cum, arg.mode, arg.type, arg.named, false);
    642  1.1  mrg 
    643  1.1  mrg   /* Advance the register count.  This has the effect of setting
    644  1.1  mrg      num_gprs to MAX_ARGS_IN_REGISTERS if a doubleword-aligned
    645  1.1  mrg      argument required us to skip the final GPR and pass the whole
    646  1.1  mrg      argument on the stack.  */
    647  1.1  mrg   cum->num_fprs = info.fpr_offset + info.num_fprs;
    648  1.1  mrg   cum->num_gprs = info.gpr_offset + info.num_gprs;
    649  1.1  mrg }
    650  1.1  mrg 
    651  1.1  mrg /* Implement TARGET_ARG_PARTIAL_BYTES.  */
    652  1.1  mrg 
    653  1.1  mrg static int
    654  1.1  mrg loongarch_arg_partial_bytes (cumulative_args_t cum,
    655  1.1  mrg 			     const function_arg_info &generic_arg)
    656  1.1  mrg {
    657  1.1  mrg   struct loongarch_arg_info arg;
    658  1.1  mrg 
    659  1.1  mrg   loongarch_get_arg_info (&arg, get_cumulative_args (cum), generic_arg.mode,
    660  1.1  mrg 			  generic_arg.type, generic_arg.named, false);
    661  1.1  mrg   return arg.stack_p ? arg.num_gprs * UNITS_PER_WORD : 0;
    662  1.1  mrg }
    663  1.1  mrg 
    664  1.1  mrg /* Implement FUNCTION_VALUE and LIBCALL_VALUE.  For normal calls,
    665  1.1  mrg    VALTYPE is the return type and MODE is VOIDmode.  For libcalls,
    666  1.1  mrg    VALTYPE is null and MODE is the mode of the return value.  */
    667  1.1  mrg 
    668  1.1  mrg static rtx
    669  1.1  mrg loongarch_function_value_1 (const_tree type, const_tree func,
    670  1.1  mrg 			    machine_mode mode)
    671  1.1  mrg {
    672  1.1  mrg   struct loongarch_arg_info info;
    673  1.1  mrg   CUMULATIVE_ARGS args;
    674  1.1  mrg 
    675  1.1  mrg   if (type)
    676  1.1  mrg     {
    677  1.1  mrg       int unsigned_p = TYPE_UNSIGNED (type);
    678  1.1  mrg 
    679  1.1  mrg       mode = TYPE_MODE (type);
    680  1.1  mrg 
    681  1.1  mrg       /* Since TARGET_PROMOTE_FUNCTION_MODE unconditionally promotes,
    682  1.1  mrg 	 return values, promote the mode here too.  */
    683  1.1  mrg       mode = promote_function_mode (type, mode, &unsigned_p, func, 1);
    684  1.1  mrg     }
    685  1.1  mrg 
    686  1.1  mrg   memset (&args, 0, sizeof (args));
    687  1.1  mrg   return loongarch_get_arg_info (&info, &args, mode, type, true, true);
    688  1.1  mrg }
    689  1.1  mrg 
    690  1.1  mrg 
    691  1.1  mrg /* Implement TARGET_FUNCTION_VALUE.  */
    692  1.1  mrg 
    693  1.1  mrg static rtx
    694  1.1  mrg loongarch_function_value (const_tree valtype, const_tree fn_decl_or_type,
    695  1.1  mrg 			  bool outgoing ATTRIBUTE_UNUSED)
    696  1.1  mrg {
    697  1.1  mrg   return loongarch_function_value_1 (valtype, fn_decl_or_type, VOIDmode);
    698  1.1  mrg }
    699  1.1  mrg 
    700  1.1  mrg /* Implement TARGET_LIBCALL_VALUE.  */
    701  1.1  mrg 
    702  1.1  mrg static rtx
    703  1.1  mrg loongarch_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
    704  1.1  mrg {
    705  1.1  mrg   return loongarch_function_value_1 (NULL_TREE, NULL_TREE, mode);
    706  1.1  mrg }
    707  1.1  mrg 
    708  1.1  mrg 
    709  1.1  mrg /* Implement TARGET_PASS_BY_REFERENCE.  */
    710  1.1  mrg 
    711  1.1  mrg static bool
    712  1.1  mrg loongarch_pass_by_reference (cumulative_args_t cum_v,
    713  1.1  mrg 			     const function_arg_info &arg)
    714  1.1  mrg {
    715  1.1  mrg   HOST_WIDE_INT size = arg.type_size_in_bytes ();
    716  1.1  mrg   struct loongarch_arg_info info;
    717  1.1  mrg   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
    718  1.1  mrg 
    719  1.1  mrg   /* ??? std_gimplify_va_arg_expr passes NULL for cum.  Fortunately, we
    720  1.1  mrg      never pass variadic arguments in floating-point registers, so we can
    721  1.1  mrg      avoid the call to loongarch_get_arg_info in this case.  */
    722  1.1  mrg   if (cum != NULL)
    723  1.1  mrg     {
    724  1.1  mrg       /* Don't pass by reference if we can use a floating-point register.  */
    725  1.1  mrg       loongarch_get_arg_info (&info, cum, arg.mode, arg.type, arg.named,
    726  1.1  mrg 			      false);
    727  1.1  mrg       if (info.num_fprs)
    728  1.1  mrg 	return false;
    729  1.1  mrg     }
    730  1.1  mrg 
    731  1.1  mrg   /* Pass by reference if the data do not fit in two integer registers.  */
    732  1.1  mrg   return !IN_RANGE (size, 0, 2 * UNITS_PER_WORD);
    733  1.1  mrg }
    734  1.1  mrg 
    735  1.1  mrg /* Implement TARGET_RETURN_IN_MEMORY.  */
    736  1.1  mrg 
    737  1.1  mrg static bool
    738  1.1  mrg loongarch_return_in_memory (const_tree type,
    739  1.1  mrg 			    const_tree fndecl ATTRIBUTE_UNUSED)
    740  1.1  mrg {
    741  1.1  mrg   CUMULATIVE_ARGS args;
    742  1.1  mrg   cumulative_args_t cum = pack_cumulative_args (&args);
    743  1.1  mrg 
    744  1.1  mrg   /* The rules for returning in memory are the same as for passing the
    745  1.1  mrg      first named argument by reference.  */
    746  1.1  mrg   memset (&args, 0, sizeof (args));
    747  1.1  mrg   function_arg_info arg (const_cast<tree> (type), /*named=*/true);
    748  1.1  mrg   return loongarch_pass_by_reference (cum, arg);
    749  1.1  mrg }
    750  1.1  mrg 
    751  1.1  mrg /* Implement TARGET_SETUP_INCOMING_VARARGS.  */
    752  1.1  mrg 
    753  1.1  mrg static void
    754  1.1  mrg loongarch_setup_incoming_varargs (cumulative_args_t cum,
    755  1.1  mrg 				  const function_arg_info &arg,
    756  1.1  mrg 				  int *pretend_size ATTRIBUTE_UNUSED,
    757  1.1  mrg 				  int no_rtl)
    758  1.1  mrg {
    759  1.1  mrg   CUMULATIVE_ARGS local_cum;
    760  1.1  mrg   int gp_saved;
    761  1.1  mrg 
    762  1.1  mrg   /* The caller has advanced CUM up to, but not beyond, the last named
    763  1.1  mrg      argument.  Advance a local copy of CUM past the last "real" named
    764  1.1  mrg      argument, to find out how many registers are left over.  */
    765  1.1  mrg   local_cum = *get_cumulative_args (cum);
    766  1.1  mrg   loongarch_function_arg_advance (pack_cumulative_args (&local_cum), arg);
    767  1.1  mrg 
    768  1.1  mrg   /* Found out how many registers we need to save.  */
    769  1.1  mrg   gp_saved = MAX_ARGS_IN_REGISTERS - local_cum.num_gprs;
    770  1.1  mrg 
    771  1.1  mrg   if (!no_rtl && gp_saved > 0)
    772  1.1  mrg     {
    773  1.1  mrg       rtx ptr = plus_constant (Pmode, virtual_incoming_args_rtx,
    774  1.1  mrg 			       REG_PARM_STACK_SPACE (cfun->decl)
    775  1.1  mrg 				 - gp_saved * UNITS_PER_WORD);
    776  1.1  mrg       rtx mem = gen_frame_mem (BLKmode, ptr);
    777  1.1  mrg       set_mem_alias_set (mem, get_varargs_alias_set ());
    778  1.1  mrg 
    779  1.1  mrg       move_block_from_reg (local_cum.num_gprs + GP_ARG_FIRST, mem, gp_saved);
    780  1.1  mrg     }
    781  1.1  mrg   if (REG_PARM_STACK_SPACE (cfun->decl) == 0)
    782  1.1  mrg     cfun->machine->varargs_size = gp_saved * UNITS_PER_WORD;
    783  1.1  mrg }
    784  1.1  mrg 
    785  1.1  mrg /* Make the last instruction frame-related and note that it performs
    786  1.1  mrg    the operation described by FRAME_PATTERN.  */
    787  1.1  mrg 
    788  1.1  mrg static void
    789  1.1  mrg loongarch_set_frame_expr (rtx frame_pattern)
    790  1.1  mrg {
    791  1.1  mrg   rtx insn;
    792  1.1  mrg 
    793  1.1  mrg   insn = get_last_insn ();
    794  1.1  mrg   RTX_FRAME_RELATED_P (insn) = 1;
    795  1.1  mrg   REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR, frame_pattern,
    796  1.1  mrg 				      REG_NOTES (insn));
    797  1.1  mrg }
    798  1.1  mrg 
    799  1.1  mrg /* Return a frame-related rtx that stores REG at MEM.
    800  1.1  mrg    REG must be a single register.  */
    801  1.1  mrg 
    802  1.1  mrg static rtx
    803  1.1  mrg loongarch_frame_set (rtx mem, rtx reg)
    804  1.1  mrg {
    805  1.1  mrg   rtx set = gen_rtx_SET (mem, reg);
    806  1.1  mrg   RTX_FRAME_RELATED_P (set) = 1;
    807  1.1  mrg   return set;
    808  1.1  mrg }
    809  1.1  mrg 
    810  1.1  mrg /* Return true if the current function must save register REGNO.  */
    811  1.1  mrg 
    812  1.1  mrg static bool
    813  1.1  mrg loongarch_save_reg_p (unsigned int regno)
    814  1.1  mrg {
    815  1.1  mrg   bool call_saved = !global_regs[regno] && !call_used_regs[regno];
    816  1.1  mrg   bool might_clobber
    817  1.1  mrg     = crtl->saves_all_registers || df_regs_ever_live_p (regno);
    818  1.1  mrg 
    819  1.1  mrg   if (call_saved && might_clobber)
    820  1.1  mrg     return true;
    821  1.1  mrg 
    822  1.1  mrg   if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
    823  1.1  mrg     return true;
    824  1.1  mrg 
    825  1.1  mrg   if (regno == RETURN_ADDR_REGNUM && crtl->calls_eh_return)
    826  1.1  mrg     return true;
    827  1.1  mrg 
    828  1.1  mrg   return false;
    829  1.1  mrg }
    830  1.1  mrg 
    831  1.1  mrg /* Determine which GPR save/restore routine to call.  */
    832  1.1  mrg 
    833  1.1  mrg static unsigned
    834  1.1  mrg loongarch_save_libcall_count (unsigned mask)
    835  1.1  mrg {
    836  1.1  mrg   for (unsigned n = GP_REG_LAST; n > GP_REG_FIRST; n--)
    837  1.1  mrg     if (BITSET_P (mask, n))
    838  1.1  mrg       return CALLEE_SAVED_REG_NUMBER (n) + 1;
    839  1.1  mrg   abort ();
    840  1.1  mrg }
    841  1.1  mrg 
    842  1.1  mrg /* Populate the current function's loongarch_frame_info structure.
    843  1.1  mrg 
    844  1.1  mrg    LoongArch stack frames grown downward.  High addresses are at the top.
    845  1.1  mrg 
    846  1.1  mrg      +-------------------------------+
    847  1.1  mrg      |				     |
    848  1.1  mrg      |  incoming stack arguments     |
    849  1.1  mrg      |				     |
    850  1.1  mrg      +-------------------------------+ <-- incoming stack pointer
    851  1.1  mrg      |				     |
    852  1.1  mrg      |  callee-allocated save area   |
    853  1.1  mrg      |  for arguments that are       |
    854  1.1  mrg      |  split between registers and  |
    855  1.1  mrg      |  the stack		     |
    856  1.1  mrg      |				     |
    857  1.1  mrg      +-------------------------------+ <-- arg_pointer_rtx (virtual)
    858  1.1  mrg      |				     |
    859  1.1  mrg      |  callee-allocated save area   |
    860  1.1  mrg      |  for register varargs	     |
    861  1.1  mrg      |				     |
    862  1.1  mrg      +-------------------------------+ <-- hard_frame_pointer_rtx;
    863  1.1  mrg      |				     |     stack_pointer_rtx + gp_sp_offset
    864  1.1  mrg      |  GPR save area		     |       + UNITS_PER_WORD
    865  1.1  mrg      |				     |
    866  1.1  mrg      +-------------------------------+ <-- stack_pointer_rtx + fp_sp_offset
    867  1.1  mrg      |				     |       + UNITS_PER_HWVALUE
    868  1.1  mrg      |  FPR save area		     |
    869  1.1  mrg      |				     |
    870  1.1  mrg      +-------------------------------+ <-- frame_pointer_rtx (virtual)
    871  1.1  mrg      |				     |
    872  1.1  mrg      |  local variables		     |
    873  1.1  mrg      |				     |
    874  1.1  mrg    P +-------------------------------+
    875  1.1  mrg      |				     |
    876  1.1  mrg      |  outgoing stack arguments     |
    877  1.1  mrg      |				     |
    878  1.1  mrg      +-------------------------------+ <-- stack_pointer_rtx
    879  1.1  mrg 
    880  1.1  mrg    Dynamic stack allocations such as alloca insert data at point P.
    881  1.1  mrg    They decrease stack_pointer_rtx but leave frame_pointer_rtx and
    882  1.1  mrg    hard_frame_pointer_rtx unchanged.  */
    883  1.1  mrg 
    884  1.1  mrg static void
    885  1.1  mrg loongarch_compute_frame_info (void)
    886  1.1  mrg {
    887  1.1  mrg   struct loongarch_frame_info *frame;
    888  1.1  mrg   HOST_WIDE_INT offset;
    889  1.1  mrg   unsigned int regno, i, num_x_saved = 0, num_f_saved = 0;
    890  1.1  mrg 
    891  1.1  mrg   frame = &cfun->machine->frame;
    892  1.1  mrg   memset (frame, 0, sizeof (*frame));
    893  1.1  mrg 
    894  1.1  mrg   /* Find out which GPRs we need to save.  */
    895  1.1  mrg   for (regno = GP_REG_FIRST; regno <= GP_REG_LAST; regno++)
    896  1.1  mrg     if (loongarch_save_reg_p (regno))
    897  1.1  mrg       frame->mask |= 1 << (regno - GP_REG_FIRST), num_x_saved++;
    898  1.1  mrg 
    899  1.1  mrg   /* If this function calls eh_return, we must also save and restore the
    900  1.1  mrg      EH data registers.  */
    901  1.1  mrg   if (crtl->calls_eh_return)
    902  1.1  mrg     for (i = 0; (regno = EH_RETURN_DATA_REGNO (i)) != INVALID_REGNUM; i++)
    903  1.1  mrg       frame->mask |= 1 << (regno - GP_REG_FIRST), num_x_saved++;
    904  1.1  mrg 
    905  1.1  mrg   /* Find out which FPRs we need to save.  This loop must iterate over
    906  1.1  mrg      the same space as its companion in loongarch_for_each_saved_reg.  */
    907  1.1  mrg   if (TARGET_HARD_FLOAT)
    908  1.1  mrg     for (regno = FP_REG_FIRST; regno <= FP_REG_LAST; regno++)
    909  1.1  mrg       if (loongarch_save_reg_p (regno))
    910  1.1  mrg 	frame->fmask |= 1 << (regno - FP_REG_FIRST), num_f_saved++;
    911  1.1  mrg 
    912  1.1  mrg   /* At the bottom of the frame are any outgoing stack arguments.  */
    913  1.1  mrg   offset = LARCH_STACK_ALIGN (crtl->outgoing_args_size);
    914  1.1  mrg   /* Next are local stack variables.  */
    915  1.1  mrg   offset += LARCH_STACK_ALIGN (get_frame_size ());
    916  1.1  mrg   /* The virtual frame pointer points above the local variables.  */
    917  1.1  mrg   frame->frame_pointer_offset = offset;
    918  1.1  mrg   /* Next are the callee-saved FPRs.  */
    919  1.1  mrg   if (frame->fmask)
    920  1.1  mrg     {
    921  1.1  mrg       offset += LARCH_STACK_ALIGN (num_f_saved * UNITS_PER_FP_REG);
    922  1.1  mrg       frame->fp_sp_offset = offset - UNITS_PER_FP_REG;
    923  1.1  mrg     }
    924  1.1  mrg   else
    925  1.1  mrg     frame->fp_sp_offset = offset;
    926  1.1  mrg   /* Next are the callee-saved GPRs.  */
    927  1.1  mrg   if (frame->mask)
    928  1.1  mrg     {
    929  1.1  mrg       unsigned x_save_size = LARCH_STACK_ALIGN (num_x_saved * UNITS_PER_WORD);
    930  1.1  mrg       unsigned num_save_restore
    931  1.1  mrg 	= 1 + loongarch_save_libcall_count (frame->mask);
    932  1.1  mrg 
    933  1.1  mrg       /* Only use save/restore routines if they don't alter the stack size.  */
    934  1.1  mrg       if (LARCH_STACK_ALIGN (num_save_restore * UNITS_PER_WORD) == x_save_size)
    935  1.1  mrg 	frame->save_libcall_adjustment = x_save_size;
    936  1.1  mrg 
    937  1.1  mrg       offset += x_save_size;
    938  1.1  mrg       frame->gp_sp_offset = offset - UNITS_PER_WORD;
    939  1.1  mrg     }
    940  1.1  mrg   else
    941  1.1  mrg     frame->gp_sp_offset = offset;
    942  1.1  mrg   /* The hard frame pointer points above the callee-saved GPRs.  */
    943  1.1  mrg   frame->hard_frame_pointer_offset = offset;
    944  1.1  mrg   /* Above the hard frame pointer is the callee-allocated varags save area.  */
    945  1.1  mrg   offset += LARCH_STACK_ALIGN (cfun->machine->varargs_size);
    946  1.1  mrg   /* Next is the callee-allocated area for pretend stack arguments.  */
    947  1.1  mrg   offset += LARCH_STACK_ALIGN (crtl->args.pretend_args_size);
    948  1.1  mrg   /* Arg pointer must be below pretend args, but must be above alignment
    949  1.1  mrg      padding.  */
    950  1.1  mrg   frame->arg_pointer_offset = offset - crtl->args.pretend_args_size;
    951  1.1  mrg   frame->total_size = offset;
    952  1.1  mrg   /* Next points the incoming stack pointer and any incoming arguments.  */
    953  1.1  mrg 
    954  1.1  mrg   /* Only use save/restore routines when the GPRs are atop the frame.  */
    955  1.1  mrg   if (frame->hard_frame_pointer_offset != frame->total_size)
    956  1.1  mrg     frame->save_libcall_adjustment = 0;
    957  1.1  mrg }
    958  1.1  mrg 
    959  1.1  mrg /* Implement INITIAL_ELIMINATION_OFFSET.  FROM is either the frame pointer
    960  1.1  mrg    or argument pointer.  TO is either the stack pointer or hard frame
    961  1.1  mrg    pointer.  */
    962  1.1  mrg 
    963  1.1  mrg HOST_WIDE_INT
    964  1.1  mrg loongarch_initial_elimination_offset (int from, int to)
    965  1.1  mrg {
    966  1.1  mrg   HOST_WIDE_INT src, dest;
    967  1.1  mrg 
    968  1.1  mrg   loongarch_compute_frame_info ();
    969  1.1  mrg 
    970  1.1  mrg   if (to == HARD_FRAME_POINTER_REGNUM)
    971  1.1  mrg     dest = cfun->machine->frame.hard_frame_pointer_offset;
    972  1.1  mrg   else if (to == STACK_POINTER_REGNUM)
    973  1.1  mrg     dest = 0; /* The stack pointer is the base of all offsets, hence 0.  */
    974  1.1  mrg   else
    975  1.1  mrg     gcc_unreachable ();
    976  1.1  mrg 
    977  1.1  mrg   if (from == FRAME_POINTER_REGNUM)
    978  1.1  mrg     src = cfun->machine->frame.frame_pointer_offset;
    979  1.1  mrg   else if (from == ARG_POINTER_REGNUM)
    980  1.1  mrg     src = cfun->machine->frame.arg_pointer_offset;
    981  1.1  mrg   else
    982  1.1  mrg     gcc_unreachable ();
    983  1.1  mrg 
    984  1.1  mrg   return src - dest;
    985  1.1  mrg }
    986  1.1  mrg 
    987  1.1  mrg /* A function to save or store a register.  The first argument is the
    988  1.1  mrg    register and the second is the stack slot.  */
    989  1.1  mrg typedef void (*loongarch_save_restore_fn) (rtx, rtx);
    990  1.1  mrg 
    991  1.1  mrg /* Use FN to save or restore register REGNO.  MODE is the register's
    992  1.1  mrg    mode and OFFSET is the offset of its save slot from the current
    993  1.1  mrg    stack pointer.  */
    994  1.1  mrg 
    995  1.1  mrg static void
    996  1.1  mrg loongarch_save_restore_reg (machine_mode mode, int regno, HOST_WIDE_INT offset,
    997  1.1  mrg 			    loongarch_save_restore_fn fn)
    998  1.1  mrg {
    999  1.1  mrg   rtx mem;
   1000  1.1  mrg 
   1001  1.1  mrg   mem = gen_frame_mem (mode, plus_constant (Pmode, stack_pointer_rtx, offset));
   1002  1.1  mrg   fn (gen_rtx_REG (mode, regno), mem);
   1003  1.1  mrg }
   1004  1.1  mrg 
   1005  1.1  mrg /* Call FN for each register that is saved by the current function.
   1006  1.1  mrg    SP_OFFSET is the offset of the current stack pointer from the start
   1007  1.1  mrg    of the frame.  */
   1008  1.1  mrg 
   1009  1.1  mrg static void
   1010  1.1  mrg loongarch_for_each_saved_reg (HOST_WIDE_INT sp_offset,
   1011  1.1  mrg 			      loongarch_save_restore_fn fn,
   1012  1.1  mrg 			      bool skip_eh_data_regs_p)
   1013  1.1  mrg {
   1014  1.1  mrg   HOST_WIDE_INT offset;
   1015  1.1  mrg 
   1016  1.1  mrg   /* Save the link register and s-registers.  */
   1017  1.1  mrg   offset = cfun->machine->frame.gp_sp_offset - sp_offset;
   1018  1.1  mrg   for (int regno = GP_REG_FIRST; regno <= GP_REG_LAST; regno++)
   1019  1.1  mrg     if (BITSET_P (cfun->machine->frame.mask, regno - GP_REG_FIRST))
   1020  1.1  mrg       {
   1021  1.1  mrg 	/* Special care needs to be taken for $r4-$r7 (EH_RETURN_DATA_REGNO)
   1022  1.1  mrg 	   when returning normally from a function that calls
   1023  1.1  mrg 	   __builtin_eh_return.  In this case, these registers are saved but
   1024  1.1  mrg 	   should not be restored, or the return value may be clobbered.  */
   1025  1.1  mrg 
   1026  1.1  mrg 	if (!(skip_eh_data_regs_p
   1027  1.1  mrg 	      && GP_ARG_FIRST <= regno && regno < GP_ARG_FIRST + 4))
   1028  1.1  mrg 	  loongarch_save_restore_reg (word_mode, regno, offset, fn);
   1029  1.1  mrg 
   1030  1.1  mrg 	offset -= UNITS_PER_WORD;
   1031  1.1  mrg       }
   1032  1.1  mrg 
   1033  1.1  mrg   /* This loop must iterate over the same space as its companion in
   1034  1.1  mrg      loongarch_compute_frame_info.  */
   1035  1.1  mrg   offset = cfun->machine->frame.fp_sp_offset - sp_offset;
   1036  1.1  mrg   for (int regno = FP_REG_FIRST; regno <= FP_REG_LAST; regno++)
   1037  1.1  mrg     if (BITSET_P (cfun->machine->frame.fmask, regno - FP_REG_FIRST))
   1038  1.1  mrg       {
   1039  1.1  mrg 	machine_mode mode = TARGET_DOUBLE_FLOAT ? DFmode : SFmode;
   1040  1.1  mrg 
   1041  1.1  mrg 	loongarch_save_restore_reg (mode, regno, offset, fn);
   1042  1.1  mrg 	offset -= GET_MODE_SIZE (mode);
   1043  1.1  mrg       }
   1044  1.1  mrg }
   1045  1.1  mrg 
   1046  1.1  mrg /* Emit a move from SRC to DEST.  Assume that the move expanders can
   1047  1.1  mrg    handle all moves if !can_create_pseudo_p ().  The distinction is
   1048  1.1  mrg    important because, unlike emit_move_insn, the move expanders know
   1049  1.1  mrg    how to force Pmode objects into the constant pool even when the
   1050  1.1  mrg    constant pool address is not itself legitimate.  */
   1051  1.1  mrg 
   1052  1.1  mrg rtx
   1053  1.1  mrg loongarch_emit_move (rtx dest, rtx src)
   1054  1.1  mrg {
   1055  1.1  mrg   return (can_create_pseudo_p () ? emit_move_insn (dest, src)
   1056  1.1  mrg 				 : emit_move_insn_1 (dest, src));
   1057  1.1  mrg }
   1058  1.1  mrg 
   1059  1.1  mrg /* Save register REG to MEM.  Make the instruction frame-related.  */
   1060  1.1  mrg 
   1061  1.1  mrg static void
   1062  1.1  mrg loongarch_save_reg (rtx reg, rtx mem)
   1063  1.1  mrg {
   1064  1.1  mrg   loongarch_emit_move (mem, reg);
   1065  1.1  mrg   loongarch_set_frame_expr (loongarch_frame_set (mem, reg));
   1066  1.1  mrg }
   1067  1.1  mrg 
   1068  1.1  mrg /* Restore register REG from MEM.  */
   1069  1.1  mrg 
   1070  1.1  mrg static void
   1071  1.1  mrg loongarch_restore_reg (rtx reg, rtx mem)
   1072  1.1  mrg {
   1073  1.1  mrg   rtx insn = loongarch_emit_move (reg, mem);
   1074  1.1  mrg   rtx dwarf = NULL_RTX;
   1075  1.1  mrg   dwarf = alloc_reg_note (REG_CFA_RESTORE, reg, dwarf);
   1076  1.1  mrg   REG_NOTES (insn) = dwarf;
   1077  1.1  mrg 
   1078  1.1  mrg   RTX_FRAME_RELATED_P (insn) = 1;
   1079  1.1  mrg }
   1080  1.1  mrg 
   1081  1.1  mrg /* For stack frames that can't be allocated with a single ADDI instruction,
   1082  1.1  mrg    compute the best value to initially allocate.  It must at a minimum
   1083  1.1  mrg    allocate enough space to spill the callee-saved registers.  */
   1084  1.1  mrg 
   1085  1.1  mrg static HOST_WIDE_INT
   1086  1.1  mrg loongarch_first_stack_step (struct loongarch_frame_info *frame)
   1087  1.1  mrg {
   1088  1.1  mrg   if (IMM12_OPERAND (frame->total_size))
   1089  1.1  mrg     return frame->total_size;
   1090  1.1  mrg 
   1091  1.1  mrg   HOST_WIDE_INT min_first_step
   1092  1.1  mrg     = LARCH_STACK_ALIGN (frame->total_size - frame->fp_sp_offset);
   1093  1.1  mrg   HOST_WIDE_INT max_first_step = IMM_REACH / 2 - PREFERRED_STACK_BOUNDARY / 8;
   1094  1.1  mrg   HOST_WIDE_INT min_second_step = frame->total_size - max_first_step;
   1095  1.1  mrg   gcc_assert (min_first_step <= max_first_step);
   1096  1.1  mrg 
   1097  1.1  mrg   /* As an optimization, use the least-significant bits of the total frame
   1098  1.1  mrg      size, so that the second adjustment step is just LU12I + ADD.  */
   1099  1.1  mrg   if (!IMM12_OPERAND (min_second_step)
   1100  1.1  mrg       && frame->total_size % IMM_REACH < IMM_REACH / 2
   1101  1.1  mrg       && frame->total_size % IMM_REACH >= min_first_step)
   1102  1.1  mrg     return frame->total_size % IMM_REACH;
   1103  1.1  mrg 
   1104  1.1  mrg   return max_first_step;
   1105  1.1  mrg }
   1106  1.1  mrg 
   1107  1.1  mrg static void
   1108  1.1  mrg loongarch_emit_stack_tie (void)
   1109  1.1  mrg {
   1110  1.1  mrg   emit_insn (gen_stack_tie (Pmode, stack_pointer_rtx,
   1111  1.1  mrg 			    frame_pointer_needed ? hard_frame_pointer_rtx
   1112  1.1  mrg 			    : stack_pointer_rtx));
   1113  1.1  mrg }
   1114  1.1  mrg 
   1115  1.1  mrg #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
   1116  1.1  mrg 
   1117  1.1  mrg #if PROBE_INTERVAL > 16384
   1118  1.1  mrg #error Cannot use indexed addressing mode for stack probing
   1119  1.1  mrg #endif
   1120  1.1  mrg 
   1121  1.1  mrg /* Emit code to probe a range of stack addresses from FIRST to FIRST+SIZE,
   1122  1.1  mrg    inclusive.  These are offsets from the current stack pointer.  */
   1123  1.1  mrg 
   1124  1.1  mrg static void
   1125  1.1  mrg loongarch_emit_probe_stack_range (HOST_WIDE_INT first, HOST_WIDE_INT size)
   1126  1.1  mrg {
   1127  1.1  mrg   /* See if we have a constant small number of probes to generate.  If so,
   1128  1.1  mrg      that's the easy case.  */
   1129  1.1  mrg   if ((TARGET_64BIT && (first + size <= 32768))
   1130  1.1  mrg       || (!TARGET_64BIT && (first + size <= 2048)))
   1131  1.1  mrg     {
   1132  1.1  mrg       HOST_WIDE_INT i;
   1133  1.1  mrg 
   1134  1.1  mrg       /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
   1135  1.1  mrg 	 it exceeds SIZE.  If only one probe is needed, this will not
   1136  1.1  mrg 	 generate any code.  Then probe at FIRST + SIZE.  */
   1137  1.1  mrg       for (i = PROBE_INTERVAL; i < size; i += PROBE_INTERVAL)
   1138  1.1  mrg 	emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
   1139  1.1  mrg 					 -(first + i)));
   1140  1.1  mrg 
   1141  1.1  mrg       emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
   1142  1.1  mrg 				       -(first + size)));
   1143  1.1  mrg     }
   1144  1.1  mrg 
   1145  1.1  mrg   /* Otherwise, do the same as above, but in a loop.  Note that we must be
   1146  1.1  mrg      extra careful with variables wrapping around because we might be at
   1147  1.1  mrg      the very top (or the very bottom) of the address space and we have
   1148  1.1  mrg      to be able to handle this case properly; in particular, we use an
   1149  1.1  mrg      equality test for the loop condition.  */
   1150  1.1  mrg   else
   1151  1.1  mrg     {
   1152  1.1  mrg       HOST_WIDE_INT rounded_size;
   1153  1.1  mrg       rtx r13 = LARCH_PROLOGUE_TEMP (Pmode);
   1154  1.1  mrg       rtx r12 = LARCH_PROLOGUE_TEMP2 (Pmode);
   1155  1.1  mrg       rtx r14 = LARCH_PROLOGUE_TEMP3 (Pmode);
   1156  1.1  mrg 
   1157  1.1  mrg       /* Sanity check for the addressing mode we're going to use.  */
   1158  1.1  mrg       gcc_assert (first <= 16384);
   1159  1.1  mrg 
   1160  1.1  mrg 
   1161  1.1  mrg       /* Step 1: round SIZE to the previous multiple of the interval.  */
   1162  1.1  mrg 
   1163  1.1  mrg       rounded_size = ROUND_DOWN (size, PROBE_INTERVAL);
   1164  1.1  mrg 
   1165  1.1  mrg       /* TEST_ADDR = SP + FIRST */
   1166  1.1  mrg       if (first != 0)
   1167  1.1  mrg 	{
   1168  1.1  mrg 	  emit_move_insn (r14, GEN_INT (first));
   1169  1.1  mrg 	  emit_insn (gen_rtx_SET (r13, gen_rtx_MINUS (Pmode,
   1170  1.1  mrg 						      stack_pointer_rtx,
   1171  1.1  mrg 						      r14)));
   1172  1.1  mrg 	}
   1173  1.1  mrg       else
   1174  1.1  mrg 	emit_move_insn (r13, stack_pointer_rtx);
   1175  1.1  mrg 
   1176  1.1  mrg       /* Step 2: compute initial and final value of the loop counter.  */
   1177  1.1  mrg 
   1178  1.1  mrg       emit_move_insn (r14, GEN_INT (PROBE_INTERVAL));
   1179  1.1  mrg       /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE.  */
   1180  1.1  mrg       if (rounded_size == 0)
   1181  1.1  mrg 	emit_move_insn (r12, r13);
   1182  1.1  mrg       else
   1183  1.1  mrg 	{
   1184  1.1  mrg 	  emit_move_insn (r12, GEN_INT (rounded_size));
   1185  1.1  mrg 	  emit_insn (gen_rtx_SET (r12, gen_rtx_MINUS (Pmode, r13, r12)));
   1186  1.1  mrg 	  /* Step 3: the loop
   1187  1.1  mrg 
   1188  1.1  mrg 	     do
   1189  1.1  mrg 	     {
   1190  1.1  mrg 	     TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
   1191  1.1  mrg 	     probe at TEST_ADDR
   1192  1.1  mrg 	     }
   1193  1.1  mrg 	     while (TEST_ADDR != LAST_ADDR)
   1194  1.1  mrg 
   1195  1.1  mrg 	     probes at FIRST + N * PROBE_INTERVAL for values of N from 1
   1196  1.1  mrg 	     until it is equal to ROUNDED_SIZE.  */
   1197  1.1  mrg 
   1198  1.1  mrg 	  emit_insn (gen_probe_stack_range (Pmode, r13, r13, r12, r14));
   1199  1.1  mrg 	}
   1200  1.1  mrg 
   1201  1.1  mrg       /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
   1202  1.1  mrg 	 that SIZE is equal to ROUNDED_SIZE.  */
   1203  1.1  mrg 
   1204  1.1  mrg       if (size != rounded_size)
   1205  1.1  mrg 	{
   1206  1.1  mrg 	  if (TARGET_64BIT)
   1207  1.1  mrg 	    emit_stack_probe (plus_constant (Pmode, r12, rounded_size - size));
   1208  1.1  mrg 	  else
   1209  1.1  mrg 	    {
   1210  1.1  mrg 	      HOST_WIDE_INT i;
   1211  1.1  mrg 	      for (i = 2048; i < (size - rounded_size); i += 2048)
   1212  1.1  mrg 		{
   1213  1.1  mrg 		  emit_stack_probe (plus_constant (Pmode, r12, -i));
   1214  1.1  mrg 		  emit_insn (gen_rtx_SET (r12,
   1215  1.1  mrg 					  plus_constant (Pmode, r12, -2048)));
   1216  1.1  mrg 		}
   1217  1.1  mrg 	      rtx r1 = plus_constant (Pmode, r12,
   1218  1.1  mrg 				      -(size - rounded_size - i + 2048));
   1219  1.1  mrg 	      emit_stack_probe (r1);
   1220  1.1  mrg 	    }
   1221  1.1  mrg 	}
   1222  1.1  mrg     }
   1223  1.1  mrg 
   1224  1.1  mrg   /* Make sure nothing is scheduled before we are done.  */
   1225  1.1  mrg   emit_insn (gen_blockage ());
   1226  1.1  mrg }
   1227  1.1  mrg 
   1228  1.1  mrg /* Probe a range of stack addresses from REG1 to REG2 inclusive.  These are
   1229  1.1  mrg    absolute addresses.  */
   1230  1.1  mrg const char *
   1231  1.1  mrg loongarch_output_probe_stack_range (rtx reg1, rtx reg2, rtx reg3)
   1232  1.1  mrg {
   1233  1.1  mrg   static int labelno = 0;
   1234  1.1  mrg   char loop_lab[32], tmp[64];
   1235  1.1  mrg   rtx xops[3];
   1236  1.1  mrg 
   1237  1.1  mrg   ASM_GENERATE_INTERNAL_LABEL (loop_lab, "LPSRL", labelno++);
   1238  1.1  mrg 
   1239  1.1  mrg   /* Loop.  */
   1240  1.1  mrg   ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, loop_lab);
   1241  1.1  mrg 
   1242  1.1  mrg   /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL.  */
   1243  1.1  mrg   xops[0] = reg1;
   1244  1.1  mrg   xops[1] = GEN_INT (-PROBE_INTERVAL);
   1245  1.1  mrg   xops[2] = reg3;
   1246  1.1  mrg   if (TARGET_64BIT)
   1247  1.1  mrg     output_asm_insn ("sub.d\t%0,%0,%2", xops);
   1248  1.1  mrg   else
   1249  1.1  mrg     output_asm_insn ("sub.w\t%0,%0,%2", xops);
   1250  1.1  mrg 
   1251  1.1  mrg   /* Probe at TEST_ADDR, test if TEST_ADDR == LAST_ADDR and branch.  */
   1252  1.1  mrg   xops[1] = reg2;
   1253  1.1  mrg   strcpy (tmp, "bne\t%0,%1,");
   1254  1.1  mrg   if (TARGET_64BIT)
   1255  1.1  mrg     output_asm_insn ("st.d\t$r0,%0,0", xops);
   1256  1.1  mrg   else
   1257  1.1  mrg     output_asm_insn ("st.w\t$r0,%0,0", xops);
   1258  1.1  mrg   output_asm_insn (strcat (tmp, &loop_lab[1]), xops);
   1259  1.1  mrg 
   1260  1.1  mrg   return "";
   1261  1.1  mrg }
   1262  1.1  mrg 
   1263  1.1  mrg /* Expand the "prologue" pattern.  */
   1264  1.1  mrg 
   1265  1.1  mrg void
   1266  1.1  mrg loongarch_expand_prologue (void)
   1267  1.1  mrg {
   1268  1.1  mrg   struct loongarch_frame_info *frame = &cfun->machine->frame;
   1269  1.1  mrg   HOST_WIDE_INT size = frame->total_size;
   1270  1.1  mrg   HOST_WIDE_INT tmp;
   1271  1.1  mrg   rtx insn;
   1272  1.1  mrg 
   1273  1.1  mrg   if (flag_stack_usage_info)
   1274  1.1  mrg     current_function_static_stack_size = size;
   1275  1.1  mrg 
   1276  1.1  mrg   if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK
   1277  1.1  mrg       || flag_stack_clash_protection)
   1278  1.1  mrg     {
   1279  1.1  mrg       if (crtl->is_leaf && !cfun->calls_alloca)
   1280  1.1  mrg 	{
   1281  1.1  mrg 	  if (size > PROBE_INTERVAL && size > get_stack_check_protect ())
   1282  1.1  mrg 	    {
   1283  1.1  mrg 	      tmp = size - get_stack_check_protect ();
   1284  1.1  mrg 	      loongarch_emit_probe_stack_range (get_stack_check_protect (),
   1285  1.1  mrg 						tmp);
   1286  1.1  mrg 	    }
   1287  1.1  mrg 	}
   1288  1.1  mrg       else if (size > 0)
   1289  1.1  mrg 	loongarch_emit_probe_stack_range (get_stack_check_protect (), size);
   1290  1.1  mrg     }
   1291  1.1  mrg 
   1292  1.1  mrg   /* Save the registers.  */
   1293  1.1  mrg   if ((frame->mask | frame->fmask) != 0)
   1294  1.1  mrg     {
   1295  1.1  mrg       HOST_WIDE_INT step1 = MIN (size, loongarch_first_stack_step (frame));
   1296  1.1  mrg 
   1297  1.1  mrg       insn = gen_add3_insn (stack_pointer_rtx, stack_pointer_rtx,
   1298  1.1  mrg 			    GEN_INT (-step1));
   1299  1.1  mrg       RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
   1300  1.1  mrg       size -= step1;
   1301  1.1  mrg       loongarch_for_each_saved_reg (size, loongarch_save_reg, false);
   1302  1.1  mrg     }
   1303  1.1  mrg 
   1304  1.1  mrg 
   1305  1.1  mrg   /* Set up the frame pointer, if we're using one.  */
   1306  1.1  mrg   if (frame_pointer_needed)
   1307  1.1  mrg     {
   1308  1.1  mrg       insn = gen_add3_insn (hard_frame_pointer_rtx, stack_pointer_rtx,
   1309  1.1  mrg 			    GEN_INT (frame->hard_frame_pointer_offset - size));
   1310  1.1  mrg       RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
   1311  1.1  mrg 
   1312  1.1  mrg       loongarch_emit_stack_tie ();
   1313  1.1  mrg     }
   1314  1.1  mrg 
   1315  1.1  mrg   /* Allocate the rest of the frame.  */
   1316  1.1  mrg   if (size > 0)
   1317  1.1  mrg     {
   1318  1.1  mrg       if (IMM12_OPERAND (-size))
   1319  1.1  mrg 	{
   1320  1.1  mrg 	  insn = gen_add3_insn (stack_pointer_rtx, stack_pointer_rtx,
   1321  1.1  mrg 				GEN_INT (-size));
   1322  1.1  mrg 	  RTX_FRAME_RELATED_P (emit_insn (insn)) = 1;
   1323  1.1  mrg 	}
   1324  1.1  mrg       else
   1325  1.1  mrg 	{
   1326  1.1  mrg 	  loongarch_emit_move (LARCH_PROLOGUE_TEMP (Pmode), GEN_INT (-size));
   1327  1.1  mrg 	  emit_insn (gen_add3_insn (stack_pointer_rtx, stack_pointer_rtx,
   1328  1.1  mrg 				    LARCH_PROLOGUE_TEMP (Pmode)));
   1329  1.1  mrg 
   1330  1.1  mrg 	  /* Describe the effect of the previous instructions.  */
   1331  1.1  mrg 	  insn = plus_constant (Pmode, stack_pointer_rtx, -size);
   1332  1.1  mrg 	  insn = gen_rtx_SET (stack_pointer_rtx, insn);
   1333  1.1  mrg 	  loongarch_set_frame_expr (insn);
   1334  1.1  mrg 	}
   1335  1.1  mrg     }
   1336  1.1  mrg }
   1337  1.1  mrg 
   1338  1.1  mrg /* Return nonzero if this function is known to have a null epilogue.
   1339  1.1  mrg    This allows the optimizer to omit jumps to jumps if no stack
   1340  1.1  mrg    was created.  */
   1341  1.1  mrg 
   1342  1.1  mrg bool
   1343  1.1  mrg loongarch_can_use_return_insn (void)
   1344  1.1  mrg {
   1345  1.1  mrg   return reload_completed && cfun->machine->frame.total_size == 0;
   1346  1.1  mrg }
   1347  1.1  mrg 
   1348  1.1  mrg /* Expand function epilogue using the following insn patterns:
   1349  1.1  mrg    "epilogue"	      (style == NORMAL_RETURN)
   1350  1.1  mrg    "sibcall_epilogue" (style == SIBCALL_RETURN)
   1351  1.1  mrg    "eh_return"	      (style == EXCEPTION_RETURN) */
   1352  1.1  mrg 
   1353  1.1  mrg void
   1354  1.1  mrg loongarch_expand_epilogue (int style)
   1355  1.1  mrg {
   1356  1.1  mrg   /* Split the frame into two.  STEP1 is the amount of stack we should
   1357  1.1  mrg      deallocate before restoring the registers.  STEP2 is the amount we
   1358  1.1  mrg      should deallocate afterwards.
   1359  1.1  mrg 
   1360  1.1  mrg      Start off by assuming that no registers need to be restored.  */
   1361  1.1  mrg   struct loongarch_frame_info *frame = &cfun->machine->frame;
   1362  1.1  mrg   HOST_WIDE_INT step1 = frame->total_size;
   1363  1.1  mrg   HOST_WIDE_INT step2 = 0;
   1364  1.1  mrg   rtx ra = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
   1365  1.1  mrg   rtx insn;
   1366  1.1  mrg 
   1367  1.1  mrg   /* We need to add memory barrier to prevent read from deallocated stack.  */
   1368  1.1  mrg   bool need_barrier_p
   1369  1.1  mrg     = (get_frame_size () + cfun->machine->frame.arg_pointer_offset) != 0;
   1370  1.1  mrg 
   1371  1.1  mrg   /* Handle simple returns.  */
   1372  1.1  mrg   if (style == NORMAL_RETURN && loongarch_can_use_return_insn ())
   1373  1.1  mrg     {
   1374  1.1  mrg       emit_jump_insn (gen_return ());
   1375  1.1  mrg       return;
   1376  1.1  mrg     }
   1377  1.1  mrg 
   1378  1.1  mrg   /* Move past any dynamic stack allocations.  */
   1379  1.1  mrg   if (cfun->calls_alloca)
   1380  1.1  mrg     {
   1381  1.1  mrg       /* Emit a barrier to prevent loads from a deallocated stack.  */
   1382  1.1  mrg       loongarch_emit_stack_tie ();
   1383  1.1  mrg       need_barrier_p = false;
   1384  1.1  mrg 
   1385  1.1  mrg       rtx adjust = GEN_INT (-frame->hard_frame_pointer_offset);
   1386  1.1  mrg       if (!IMM12_OPERAND (INTVAL (adjust)))
   1387  1.1  mrg 	{
   1388  1.1  mrg 	  loongarch_emit_move (LARCH_PROLOGUE_TEMP (Pmode), adjust);
   1389  1.1  mrg 	  adjust = LARCH_PROLOGUE_TEMP (Pmode);
   1390  1.1  mrg 	}
   1391  1.1  mrg 
   1392  1.1  mrg       insn = emit_insn (gen_add3_insn (stack_pointer_rtx,
   1393  1.1  mrg 				       hard_frame_pointer_rtx,
   1394  1.1  mrg 				       adjust));
   1395  1.1  mrg 
   1396  1.1  mrg       rtx dwarf = NULL_RTX;
   1397  1.1  mrg       rtx minus_offset = GEN_INT (-frame->hard_frame_pointer_offset);
   1398  1.1  mrg       rtx cfa_adjust_value = gen_rtx_PLUS (Pmode,
   1399  1.1  mrg 					   hard_frame_pointer_rtx,
   1400  1.1  mrg 					   minus_offset);
   1401  1.1  mrg 
   1402  1.1  mrg       rtx cfa_adjust_rtx = gen_rtx_SET (stack_pointer_rtx, cfa_adjust_value);
   1403  1.1  mrg       dwarf = alloc_reg_note (REG_CFA_ADJUST_CFA, cfa_adjust_rtx, dwarf);
   1404  1.1  mrg       RTX_FRAME_RELATED_P (insn) = 1;
   1405  1.1  mrg 
   1406  1.1  mrg       REG_NOTES (insn) = dwarf;
   1407  1.1  mrg     }
   1408  1.1  mrg 
   1409  1.1  mrg   /* If we need to restore registers, deallocate as much stack as
   1410  1.1  mrg      possible in the second step without going out of range.  */
   1411  1.1  mrg   if ((frame->mask | frame->fmask) != 0)
   1412  1.1  mrg     {
   1413  1.1  mrg       step2 = loongarch_first_stack_step (frame);
   1414  1.1  mrg       step1 -= step2;
   1415  1.1  mrg     }
   1416  1.1  mrg 
   1417  1.1  mrg   /* Set TARGET to BASE + STEP1.  */
   1418  1.1  mrg   if (step1 > 0)
   1419  1.1  mrg     {
   1420  1.1  mrg       /* Emit a barrier to prevent loads from a deallocated stack.  */
   1421  1.1  mrg       loongarch_emit_stack_tie ();
   1422  1.1  mrg       need_barrier_p = false;
   1423  1.1  mrg 
   1424  1.1  mrg       /* Get an rtx for STEP1 that we can add to BASE.  */
   1425  1.1  mrg       rtx adjust = GEN_INT (step1);
   1426  1.1  mrg       if (!IMM12_OPERAND (step1))
   1427  1.1  mrg 	{
   1428  1.1  mrg 	  loongarch_emit_move (LARCH_PROLOGUE_TEMP (Pmode), adjust);
   1429  1.1  mrg 	  adjust = LARCH_PROLOGUE_TEMP (Pmode);
   1430  1.1  mrg 	}
   1431  1.1  mrg 
   1432  1.1  mrg       insn = emit_insn (gen_add3_insn (stack_pointer_rtx,
   1433  1.1  mrg 				       stack_pointer_rtx,
   1434  1.1  mrg 				       adjust));
   1435  1.1  mrg 
   1436  1.1  mrg       rtx dwarf = NULL_RTX;
   1437  1.1  mrg       rtx cfa_adjust_rtx = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
   1438  1.1  mrg 					 GEN_INT (step2));
   1439  1.1  mrg 
   1440  1.1  mrg       dwarf = alloc_reg_note (REG_CFA_DEF_CFA, cfa_adjust_rtx, dwarf);
   1441  1.1  mrg       RTX_FRAME_RELATED_P (insn) = 1;
   1442  1.1  mrg 
   1443  1.1  mrg       REG_NOTES (insn) = dwarf;
   1444  1.1  mrg     }
   1445  1.1  mrg 
   1446  1.1  mrg   /* Restore the registers.  */
   1447  1.1  mrg   loongarch_for_each_saved_reg (frame->total_size - step2,
   1448  1.1  mrg 				loongarch_restore_reg,
   1449  1.1  mrg 				crtl->calls_eh_return
   1450  1.1  mrg 				&& style != EXCEPTION_RETURN);
   1451  1.1  mrg 
   1452  1.1  mrg   if (need_barrier_p)
   1453  1.1  mrg     loongarch_emit_stack_tie ();
   1454  1.1  mrg 
   1455  1.1  mrg   /* Deallocate the final bit of the frame.  */
   1456  1.1  mrg   if (step2 > 0)
   1457  1.1  mrg     {
   1458  1.1  mrg       insn = emit_insn (gen_add3_insn (stack_pointer_rtx,
   1459  1.1  mrg 				       stack_pointer_rtx,
   1460  1.1  mrg 				       GEN_INT (step2)));
   1461  1.1  mrg 
   1462  1.1  mrg       rtx dwarf = NULL_RTX;
   1463  1.1  mrg       rtx cfa_adjust_rtx = gen_rtx_PLUS (Pmode, stack_pointer_rtx, const0_rtx);
   1464  1.1  mrg       dwarf = alloc_reg_note (REG_CFA_DEF_CFA, cfa_adjust_rtx, dwarf);
   1465  1.1  mrg       RTX_FRAME_RELATED_P (insn) = 1;
   1466  1.1  mrg 
   1467  1.1  mrg       REG_NOTES (insn) = dwarf;
   1468  1.1  mrg     }
   1469  1.1  mrg 
   1470  1.1  mrg   /* Add in the __builtin_eh_return stack adjustment.  */
   1471  1.1  mrg   if (crtl->calls_eh_return && style == EXCEPTION_RETURN)
   1472  1.1  mrg     emit_insn (gen_add3_insn (stack_pointer_rtx, stack_pointer_rtx,
   1473  1.1  mrg 			      EH_RETURN_STACKADJ_RTX));
   1474  1.1  mrg 
   1475  1.1  mrg   /* Emit return unless doing sibcall.  */
   1476  1.1  mrg   if (style != SIBCALL_RETURN)
   1477  1.1  mrg     emit_jump_insn (gen_simple_return_internal (ra));
   1478  1.1  mrg }
   1479  1.1  mrg 
   1480  1.1  mrg #define LU32I_B (0xfffffULL << 32)
   1481  1.1  mrg #define LU52I_B (0xfffULL << 52)
   1482  1.1  mrg 
   1483  1.1  mrg /* Fill CODES with a sequence of rtl operations to load VALUE.
   1484  1.1  mrg    Return the number of operations needed.  */
   1485  1.1  mrg 
   1486  1.1  mrg static unsigned int
   1487  1.1  mrg loongarch_build_integer (struct loongarch_integer_op *codes,
   1488  1.1  mrg 			 HOST_WIDE_INT value)
   1489  1.1  mrg 
   1490  1.1  mrg {
   1491  1.1  mrg   unsigned int cost = 0;
   1492  1.1  mrg 
   1493  1.1  mrg   /* Get the lower 32 bits of the value.  */
   1494  1.1  mrg   HOST_WIDE_INT low_part = (int32_t)value;
   1495  1.1  mrg 
   1496  1.1  mrg   if (IMM12_OPERAND (low_part) || IMM12_OPERAND_UNSIGNED (low_part))
   1497  1.1  mrg     {
   1498  1.1  mrg       /* The value of the lower 32 bit be loaded with one instruction.
   1499  1.1  mrg 	 lu12i.w.  */
   1500  1.1  mrg       codes[0].code = UNKNOWN;
   1501  1.1  mrg       codes[0].method = METHOD_NORMAL;
   1502  1.1  mrg       codes[0].value = low_part;
   1503  1.1  mrg       cost++;
   1504  1.1  mrg     }
   1505  1.1  mrg   else
   1506  1.1  mrg     {
   1507  1.1  mrg       /* lu12i.w + ior.  */
   1508  1.1  mrg       codes[0].code = UNKNOWN;
   1509  1.1  mrg       codes[0].method = METHOD_NORMAL;
   1510  1.1  mrg       codes[0].value = low_part & ~(IMM_REACH - 1);
   1511  1.1  mrg       cost++;
   1512  1.1  mrg       HOST_WIDE_INT iorv = low_part & (IMM_REACH - 1);
   1513  1.1  mrg       if (iorv != 0)
   1514  1.1  mrg 	{
   1515  1.1  mrg 	  codes[1].code = IOR;
   1516  1.1  mrg 	  codes[1].method = METHOD_NORMAL;
   1517  1.1  mrg 	  codes[1].value = iorv;
   1518  1.1  mrg 	  cost++;
   1519  1.1  mrg 	}
   1520  1.1  mrg     }
   1521  1.1  mrg 
   1522  1.1  mrg   if (TARGET_64BIT)
   1523  1.1  mrg     {
   1524  1.1  mrg       bool lu32i[2] = {(value & LU32I_B) == 0, (value & LU32I_B) == LU32I_B};
   1525  1.1  mrg       bool lu52i[2] = {(value & LU52I_B) == 0, (value & LU52I_B) == LU52I_B};
   1526  1.1  mrg 
   1527  1.1  mrg       int sign31 = (value & (HOST_WIDE_INT_1U << 31)) >> 31;
   1528  1.1  mrg       int sign51 = (value & (HOST_WIDE_INT_1U << 51)) >> 51;
   1529  1.1  mrg       /* Determine whether the upper 32 bits are sign-extended from the lower
   1530  1.1  mrg 	 32 bits. If it is, the instructions to load the high order can be
   1531  1.1  mrg 	 ommitted.  */
   1532  1.1  mrg       if (lu32i[sign31] && lu52i[sign31])
   1533  1.1  mrg 	return cost;
   1534  1.1  mrg       /* Determine whether bits 32-51 are sign-extended from the lower 32
   1535  1.1  mrg 	 bits. If so, directly load 52-63 bits.  */
   1536  1.1  mrg       else if (lu32i[sign31])
   1537  1.1  mrg 	{
   1538  1.1  mrg 	  codes[cost].method = METHOD_LU52I;
   1539  1.1  mrg 	  codes[cost].value = value & LU52I_B;
   1540  1.1  mrg 	  return cost + 1;
   1541  1.1  mrg 	}
   1542  1.1  mrg 
   1543  1.1  mrg       codes[cost].method = METHOD_LU32I;
   1544  1.1  mrg       codes[cost].value = (value & LU32I_B) | (sign51 ? LU52I_B : 0);
   1545  1.1  mrg       cost++;
   1546  1.1  mrg 
   1547  1.1  mrg       /* Determine whether the 52-61 bits are sign-extended from the low order,
   1548  1.1  mrg 	 and if not, load the 52-61 bits.  */
   1549  1.1  mrg       if (!lu52i[(value & (HOST_WIDE_INT_1U << 51)) >> 51])
   1550  1.1  mrg 	{
   1551  1.1  mrg 	  codes[cost].method = METHOD_LU52I;
   1552  1.1  mrg 	  codes[cost].value = value & LU52I_B;
   1553  1.1  mrg 	  cost++;
   1554  1.1  mrg 	}
   1555  1.1  mrg     }
   1556  1.1  mrg 
   1557  1.1  mrg   gcc_assert (cost <= LARCH_MAX_INTEGER_OPS);
   1558  1.1  mrg 
   1559  1.1  mrg   return cost;
   1560  1.1  mrg }
   1561  1.1  mrg 
   1562  1.1  mrg /* Fill CODES with a sequence of rtl operations to load VALUE.
   1563  1.1  mrg    Return the number of operations needed.
   1564  1.1  mrg    Split interger in loongarch_output_move.  */
   1565  1.1  mrg 
   1566  1.1  mrg static unsigned int
   1567  1.1  mrg loongarch_integer_cost (HOST_WIDE_INT value)
   1568  1.1  mrg {
   1569  1.1  mrg   struct loongarch_integer_op codes[LARCH_MAX_INTEGER_OPS];
   1570  1.1  mrg   return loongarch_build_integer (codes, value);
   1571  1.1  mrg }
   1572  1.1  mrg 
   1573  1.1  mrg /* Implement TARGET_LEGITIMATE_CONSTANT_P.  */
   1574  1.1  mrg 
   1575  1.1  mrg static bool
   1576  1.1  mrg loongarch_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
   1577  1.1  mrg {
   1578  1.1  mrg   return loongarch_const_insns (x) > 0;
   1579  1.1  mrg }
   1580  1.1  mrg 
   1581  1.1  mrg /* Return true if X is a thread-local symbol.  */
   1582  1.1  mrg 
   1583  1.1  mrg static bool
   1584  1.1  mrg loongarch_tls_symbol_p (rtx x)
   1585  1.1  mrg {
   1586  1.1  mrg   return SYMBOL_REF_P (x) && SYMBOL_REF_TLS_MODEL (x) != 0;
   1587  1.1  mrg }
   1588  1.1  mrg 
   1589  1.1  mrg /* Return true if SYMBOL_REF X is associated with a global symbol
   1590  1.1  mrg    (in the STB_GLOBAL sense).  */
   1591  1.1  mrg 
   1592  1.1  mrg bool
   1593  1.1  mrg loongarch_global_symbol_p (const_rtx x)
   1594  1.1  mrg {
   1595  1.1  mrg   if (LABEL_REF_P (x))
   1596  1.1  mrg     return false;
   1597  1.1  mrg 
   1598  1.1  mrg   const_tree decl = SYMBOL_REF_DECL (x);
   1599  1.1  mrg 
   1600  1.1  mrg   if (!decl)
   1601  1.1  mrg     return !SYMBOL_REF_LOCAL_P (x) || SYMBOL_REF_EXTERNAL_P (x);
   1602  1.1  mrg 
   1603  1.1  mrg   /* Weakref symbols are not TREE_PUBLIC, but their targets are global
   1604  1.1  mrg      or weak symbols.  Relocations in the object file will be against
   1605  1.1  mrg      the target symbol, so it's that symbol's binding that matters here.  */
   1606  1.1  mrg   return DECL_P (decl) && (TREE_PUBLIC (decl) || DECL_WEAK (decl));
   1607  1.1  mrg }
   1608  1.1  mrg 
   1609  1.1  mrg bool
   1610  1.1  mrg loongarch_global_symbol_noweak_p (const_rtx x)
   1611  1.1  mrg {
   1612  1.1  mrg   if (LABEL_REF_P (x))
   1613  1.1  mrg     return false;
   1614  1.1  mrg 
   1615  1.1  mrg   const_tree decl = SYMBOL_REF_DECL (x);
   1616  1.1  mrg 
   1617  1.1  mrg   if (!decl)
   1618  1.1  mrg     return !SYMBOL_REF_LOCAL_P (x) || SYMBOL_REF_EXTERNAL_P (x);
   1619  1.1  mrg 
   1620  1.1  mrg   return DECL_P (decl) && TREE_PUBLIC (decl);
   1621  1.1  mrg }
   1622  1.1  mrg 
   1623  1.1  mrg bool
   1624  1.1  mrg loongarch_weak_symbol_p (const_rtx x)
   1625  1.1  mrg {
   1626  1.1  mrg   const_tree decl;
   1627  1.1  mrg   if (LABEL_REF_P (x) || !(decl = SYMBOL_REF_DECL (x)))
   1628  1.1  mrg     return false;
   1629  1.1  mrg   return DECL_P (decl) && DECL_WEAK (decl);
   1630  1.1  mrg }
   1631  1.1  mrg 
   1632  1.1  mrg /* Return true if SYMBOL_REF X binds locally.  */
   1633  1.1  mrg 
   1634  1.1  mrg bool
   1635  1.1  mrg loongarch_symbol_binds_local_p (const_rtx x)
   1636  1.1  mrg {
   1637  1.1  mrg   if (LABEL_REF_P (x))
   1638  1.1  mrg     return false;
   1639  1.1  mrg 
   1640  1.1  mrg   return (SYMBOL_REF_DECL (x) ? targetm.binds_local_p (SYMBOL_REF_DECL (x))
   1641  1.1  mrg 			      : SYMBOL_REF_LOCAL_P (x));
   1642  1.1  mrg }
   1643  1.1  mrg 
   1644  1.1  mrg /* Return true if rtx constants of mode MODE should be put into a small
   1645  1.1  mrg    data section.  */
   1646  1.1  mrg 
   1647  1.1  mrg static bool
   1648  1.1  mrg loongarch_rtx_constant_in_small_data_p (machine_mode mode)
   1649  1.1  mrg {
   1650  1.1  mrg   return (GET_MODE_SIZE (mode) <= g_switch_value);
   1651  1.1  mrg }
   1652  1.1  mrg 
   1653  1.1  mrg /* Return the method that should be used to access SYMBOL_REF or
   1654  1.1  mrg    LABEL_REF X.  */
   1655  1.1  mrg 
   1656  1.1  mrg static enum loongarch_symbol_type
   1657  1.1  mrg loongarch_classify_symbol (const_rtx x)
   1658  1.1  mrg {
   1659  1.1  mrg   if (LABEL_REF_P (x))
   1660  1.1  mrg     return SYMBOL_GOT_DISP;
   1661  1.1  mrg 
   1662  1.1  mrg   gcc_assert (SYMBOL_REF_P (x));
   1663  1.1  mrg 
   1664  1.1  mrg   if (SYMBOL_REF_TLS_MODEL (x))
   1665  1.1  mrg     return SYMBOL_TLS;
   1666  1.1  mrg 
   1667  1.1  mrg   if (SYMBOL_REF_P (x))
   1668  1.1  mrg     return SYMBOL_GOT_DISP;
   1669  1.1  mrg 
   1670  1.1  mrg   return SYMBOL_GOT_DISP;
   1671  1.1  mrg }
   1672  1.1  mrg 
   1673  1.1  mrg /* Return true if X is a symbolic constant.  If it is,
   1674  1.1  mrg    store the type of the symbol in *SYMBOL_TYPE.  */
   1675  1.1  mrg 
   1676  1.1  mrg bool
   1677  1.1  mrg loongarch_symbolic_constant_p (rtx x, enum loongarch_symbol_type *symbol_type)
   1678  1.1  mrg {
   1679  1.1  mrg   rtx offset;
   1680  1.1  mrg 
   1681  1.1  mrg   split_const (x, &x, &offset);
   1682  1.1  mrg   if (UNSPEC_ADDRESS_P (x))
   1683  1.1  mrg     {
   1684  1.1  mrg       *symbol_type = UNSPEC_ADDRESS_TYPE (x);
   1685  1.1  mrg       x = UNSPEC_ADDRESS (x);
   1686  1.1  mrg     }
   1687  1.1  mrg   else if (SYMBOL_REF_P (x) || LABEL_REF_P (x))
   1688  1.1  mrg     {
   1689  1.1  mrg       *symbol_type = loongarch_classify_symbol (x);
   1690  1.1  mrg       if (*symbol_type == SYMBOL_TLS)
   1691  1.1  mrg 	return true;
   1692  1.1  mrg     }
   1693  1.1  mrg   else
   1694  1.1  mrg     return false;
   1695  1.1  mrg 
   1696  1.1  mrg   if (offset == const0_rtx)
   1697  1.1  mrg     return true;
   1698  1.1  mrg 
   1699  1.1  mrg   /* Check whether a nonzero offset is valid for the underlying
   1700  1.1  mrg      relocations.  */
   1701  1.1  mrg   switch (*symbol_type)
   1702  1.1  mrg     {
   1703  1.1  mrg     case SYMBOL_GOT_DISP:
   1704  1.1  mrg     case SYMBOL_TLSGD:
   1705  1.1  mrg     case SYMBOL_TLSLDM:
   1706  1.1  mrg     case SYMBOL_TLS:
   1707  1.1  mrg       return false;
   1708  1.1  mrg     }
   1709  1.1  mrg   gcc_unreachable ();
   1710  1.1  mrg }
   1711  1.1  mrg 
   1712  1.1  mrg /* Returns the number of instructions necessary to reference a symbol.  */
   1713  1.1  mrg 
   1714  1.1  mrg static int
   1715  1.1  mrg loongarch_symbol_insns (enum loongarch_symbol_type type, machine_mode mode)
   1716  1.1  mrg {
   1717  1.1  mrg   switch (type)
   1718  1.1  mrg     {
   1719  1.1  mrg     case SYMBOL_GOT_DISP:
   1720  1.1  mrg       /* The constant will have to be loaded from the GOT before it
   1721  1.1  mrg 	 is used in an address.  */
   1722  1.1  mrg       if (mode != MAX_MACHINE_MODE)
   1723  1.1  mrg 	return 0;
   1724  1.1  mrg 
   1725  1.1  mrg       return 3;
   1726  1.1  mrg 
   1727  1.1  mrg     case SYMBOL_TLSGD:
   1728  1.1  mrg     case SYMBOL_TLSLDM:
   1729  1.1  mrg       return 1;
   1730  1.1  mrg 
   1731  1.1  mrg     case SYMBOL_TLS:
   1732  1.1  mrg       /* We don't treat a bare TLS symbol as a constant.  */
   1733  1.1  mrg       return 0;
   1734  1.1  mrg     }
   1735  1.1  mrg   gcc_unreachable ();
   1736  1.1  mrg }
   1737  1.1  mrg 
   1738  1.1  mrg /* Implement TARGET_CANNOT_FORCE_CONST_MEM.  */
   1739  1.1  mrg 
   1740  1.1  mrg static bool
   1741  1.1  mrg loongarch_cannot_force_const_mem (machine_mode mode, rtx x)
   1742  1.1  mrg {
   1743  1.1  mrg   enum loongarch_symbol_type type;
   1744  1.1  mrg   rtx base, offset;
   1745  1.1  mrg 
   1746  1.1  mrg   /* As an optimization, reject constants that loongarch_legitimize_move
   1747  1.1  mrg      can expand inline.
   1748  1.1  mrg 
   1749  1.1  mrg      Suppose we have a multi-instruction sequence that loads constant C
   1750  1.1  mrg      into register R.  If R does not get allocated a hard register, and
   1751  1.1  mrg      R is used in an operand that allows both registers and memory
   1752  1.1  mrg      references, reload will consider forcing C into memory and using
   1753  1.1  mrg      one of the instruction's memory alternatives.  Returning false
   1754  1.1  mrg      here will force it to use an input reload instead.  */
   1755  1.1  mrg   if (CONST_INT_P (x) && loongarch_legitimate_constant_p (mode, x))
   1756  1.1  mrg     return true;
   1757  1.1  mrg 
   1758  1.1  mrg   split_const (x, &base, &offset);
   1759  1.1  mrg   if (loongarch_symbolic_constant_p (base, &type))
   1760  1.1  mrg     {
   1761  1.1  mrg       /* The same optimization as for CONST_INT.  */
   1762  1.1  mrg       if (IMM12_INT (offset)
   1763  1.1  mrg 	  && loongarch_symbol_insns (type, MAX_MACHINE_MODE) > 0)
   1764  1.1  mrg 	return true;
   1765  1.1  mrg     }
   1766  1.1  mrg 
   1767  1.1  mrg   /* TLS symbols must be computed by loongarch_legitimize_move.  */
   1768  1.1  mrg   if (tls_referenced_p (x))
   1769  1.1  mrg     return true;
   1770  1.1  mrg 
   1771  1.1  mrg   return false;
   1772  1.1  mrg }
   1773  1.1  mrg 
   1774  1.1  mrg /* Return true if register REGNO is a valid base register for mode MODE.
   1775  1.1  mrg    STRICT_P is true if REG_OK_STRICT is in effect.  */
   1776  1.1  mrg 
   1777  1.1  mrg int
   1778  1.1  mrg loongarch_regno_mode_ok_for_base_p (int regno,
   1779  1.1  mrg 				    machine_mode mode ATTRIBUTE_UNUSED,
   1780  1.1  mrg 				    bool strict_p)
   1781  1.1  mrg {
   1782  1.1  mrg   if (!HARD_REGISTER_NUM_P (regno))
   1783  1.1  mrg     {
   1784  1.1  mrg       if (!strict_p)
   1785  1.1  mrg 	return true;
   1786  1.1  mrg       regno = reg_renumber[regno];
   1787  1.1  mrg     }
   1788  1.1  mrg 
   1789  1.1  mrg   /* These fake registers will be eliminated to either the stack or
   1790  1.1  mrg      hard frame pointer, both of which are usually valid base registers.
   1791  1.1  mrg      Reload deals with the cases where the eliminated form isn't valid.  */
   1792  1.1  mrg   if (regno == ARG_POINTER_REGNUM || regno == FRAME_POINTER_REGNUM)
   1793  1.1  mrg     return true;
   1794  1.1  mrg 
   1795  1.1  mrg   return GP_REG_P (regno);
   1796  1.1  mrg }
   1797  1.1  mrg 
   1798  1.1  mrg /* Return true if X is a valid base register for mode MODE.
   1799  1.1  mrg    STRICT_P is true if REG_OK_STRICT is in effect.  */
   1800  1.1  mrg 
   1801  1.1  mrg static bool
   1802  1.1  mrg loongarch_valid_base_register_p (rtx x, machine_mode mode, bool strict_p)
   1803  1.1  mrg {
   1804  1.1  mrg   if (!strict_p && SUBREG_P (x))
   1805  1.1  mrg     x = SUBREG_REG (x);
   1806  1.1  mrg 
   1807  1.1  mrg   return (REG_P (x)
   1808  1.1  mrg 	  && loongarch_regno_mode_ok_for_base_p (REGNO (x), mode, strict_p));
   1809  1.1  mrg }
   1810  1.1  mrg 
   1811  1.1  mrg /* Return true if, for every base register BASE_REG, (plus BASE_REG X)
   1812  1.1  mrg    can address a value of mode MODE.  */
   1813  1.1  mrg 
   1814  1.1  mrg static bool
   1815  1.1  mrg loongarch_valid_offset_p (rtx x, machine_mode mode)
   1816  1.1  mrg {
   1817  1.1  mrg   /* Check that X is a signed 12-bit number,
   1818  1.1  mrg      or check that X is a signed 16-bit number
   1819  1.1  mrg      and offset 4 byte aligned.  */
   1820  1.1  mrg   if (!(const_arith_operand (x, Pmode)
   1821  1.1  mrg 	|| ((mode == E_SImode || mode == E_DImode)
   1822  1.1  mrg 	    && const_imm16_operand (x, Pmode)
   1823  1.1  mrg 	    && (loongarch_signed_immediate_p (INTVAL (x), 14, 2)))))
   1824  1.1  mrg     return false;
   1825  1.1  mrg 
   1826  1.1  mrg   /* We may need to split multiword moves, so make sure that every word
   1827  1.1  mrg      is accessible.  */
   1828  1.1  mrg   if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
   1829  1.1  mrg       && !IMM12_OPERAND (INTVAL (x) + GET_MODE_SIZE (mode) - UNITS_PER_WORD))
   1830  1.1  mrg     return false;
   1831  1.1  mrg 
   1832  1.1  mrg   return true;
   1833  1.1  mrg }
   1834  1.1  mrg 
   1835  1.1  mrg static bool
   1836  1.1  mrg loongarch_valid_index_p (struct loongarch_address_info *info, rtx x,
   1837  1.1  mrg 			  machine_mode mode, bool strict_p)
   1838  1.1  mrg {
   1839  1.1  mrg   rtx index;
   1840  1.1  mrg 
   1841  1.1  mrg   if ((REG_P (x) || SUBREG_P (x))
   1842  1.1  mrg       && GET_MODE (x) == Pmode)
   1843  1.1  mrg     {
   1844  1.1  mrg       index = x;
   1845  1.1  mrg     }
   1846  1.1  mrg   else
   1847  1.1  mrg     return false;
   1848  1.1  mrg 
   1849  1.1  mrg   if (!strict_p
   1850  1.1  mrg       && SUBREG_P (index)
   1851  1.1  mrg       && contains_reg_of_mode[GENERAL_REGS][GET_MODE (SUBREG_REG (index))])
   1852  1.1  mrg     index = SUBREG_REG (index);
   1853  1.1  mrg 
   1854  1.1  mrg   if (loongarch_valid_base_register_p (index, mode, strict_p))
   1855  1.1  mrg     {
   1856  1.1  mrg       info->type = ADDRESS_REG_REG;
   1857  1.1  mrg       info->offset = index;
   1858  1.1  mrg       return true;
   1859  1.1  mrg     }
   1860  1.1  mrg 
   1861  1.1  mrg   return false;
   1862  1.1  mrg }
   1863  1.1  mrg 
   1864  1.1  mrg /* Return true if X is a valid address for machine mode MODE.  If it is,
   1865  1.1  mrg    fill in INFO appropriately.  STRICT_P is true if REG_OK_STRICT is in
   1866  1.1  mrg    effect.  */
   1867  1.1  mrg 
   1868  1.1  mrg static bool
   1869  1.1  mrg loongarch_classify_address (struct loongarch_address_info *info, rtx x,
   1870  1.1  mrg 			    machine_mode mode, bool strict_p)
   1871  1.1  mrg {
   1872  1.1  mrg   switch (GET_CODE (x))
   1873  1.1  mrg     {
   1874  1.1  mrg     case REG:
   1875  1.1  mrg     case SUBREG:
   1876  1.1  mrg       info->type = ADDRESS_REG;
   1877  1.1  mrg       info->reg = x;
   1878  1.1  mrg       info->offset = const0_rtx;
   1879  1.1  mrg       return loongarch_valid_base_register_p (info->reg, mode, strict_p);
   1880  1.1  mrg 
   1881  1.1  mrg     case PLUS:
   1882  1.1  mrg       if (loongarch_valid_base_register_p (XEXP (x, 0), mode, strict_p)
   1883  1.1  mrg 	  && loongarch_valid_index_p (info, XEXP (x, 1), mode, strict_p))
   1884  1.1  mrg 	{
   1885  1.1  mrg 	  info->reg = XEXP (x, 0);
   1886  1.1  mrg 	  return true;
   1887  1.1  mrg 	}
   1888  1.1  mrg 
   1889  1.1  mrg       if (loongarch_valid_base_register_p (XEXP (x, 1), mode, strict_p)
   1890  1.1  mrg 	 && loongarch_valid_index_p (info, XEXP (x, 0), mode, strict_p))
   1891  1.1  mrg 	{
   1892  1.1  mrg 	  info->reg = XEXP (x, 1);
   1893  1.1  mrg 	  return true;
   1894  1.1  mrg 	}
   1895  1.1  mrg 
   1896  1.1  mrg       info->type = ADDRESS_REG;
   1897  1.1  mrg       info->reg = XEXP (x, 0);
   1898  1.1  mrg       info->offset = XEXP (x, 1);
   1899  1.1  mrg       return (loongarch_valid_base_register_p (info->reg, mode, strict_p)
   1900  1.1  mrg 	      && loongarch_valid_offset_p (info->offset, mode));
   1901  1.1  mrg     default:
   1902  1.1  mrg       return false;
   1903  1.1  mrg     }
   1904  1.1  mrg }
   1905  1.1  mrg 
   1906  1.1  mrg /* Implement TARGET_LEGITIMATE_ADDRESS_P.  */
   1907  1.1  mrg 
   1908  1.1  mrg static bool
   1909  1.1  mrg loongarch_legitimate_address_p (machine_mode mode, rtx x, bool strict_p)
   1910  1.1  mrg {
   1911  1.1  mrg   struct loongarch_address_info addr;
   1912  1.1  mrg 
   1913  1.1  mrg   return loongarch_classify_address (&addr, x, mode, strict_p);
   1914  1.1  mrg }
   1915  1.1  mrg 
   1916  1.1  mrg /* Return true if ADDR matches the pattern for the indexed address
   1917  1.1  mrg    instruction.  */
   1918  1.1  mrg 
   1919  1.1  mrg static bool
   1920  1.1  mrg loongarch_index_address_p (rtx addr, machine_mode mode ATTRIBUTE_UNUSED)
   1921  1.1  mrg {
   1922  1.1  mrg   if (GET_CODE (addr) != PLUS
   1923  1.1  mrg       || !REG_P (XEXP (addr, 0))
   1924  1.1  mrg       || !REG_P (XEXP (addr, 1)))
   1925  1.1  mrg     return false;
   1926  1.1  mrg   return true;
   1927  1.1  mrg }
   1928  1.1  mrg 
   1929  1.1  mrg /* Return the number of instructions needed to load or store a value
   1930  1.1  mrg    of mode MODE at address X.  Return 0 if X isn't valid for MODE.
   1931  1.1  mrg    Assume that multiword moves may need to be split into word moves
   1932  1.1  mrg    if MIGHT_SPLIT_P, otherwise assume that a single load or store is
   1933  1.1  mrg    enough.  */
   1934  1.1  mrg 
   1935  1.1  mrg int
   1936  1.1  mrg loongarch_address_insns (rtx x, machine_mode mode, bool might_split_p)
   1937  1.1  mrg {
   1938  1.1  mrg   struct loongarch_address_info addr;
   1939  1.1  mrg   int factor;
   1940  1.1  mrg 
   1941  1.1  mrg   if (!loongarch_classify_address (&addr, x, mode, false))
   1942  1.1  mrg     return 0;
   1943  1.1  mrg 
   1944  1.1  mrg   /* BLKmode is used for single unaligned loads and stores and should
   1945  1.1  mrg      not count as a multiword mode.  (GET_MODE_SIZE (BLKmode) is pretty
   1946  1.1  mrg      meaningless, so we have to single it out as a special case one way
   1947  1.1  mrg      or the other.)  */
   1948  1.1  mrg   if (mode != BLKmode && might_split_p)
   1949  1.1  mrg     factor = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
   1950  1.1  mrg   else
   1951  1.1  mrg     factor = 1;
   1952  1.1  mrg 
   1953  1.1  mrg   if (loongarch_classify_address (&addr, x, mode, false))
   1954  1.1  mrg     switch (addr.type)
   1955  1.1  mrg       {
   1956  1.1  mrg       case ADDRESS_REG:
   1957  1.1  mrg 	return factor;
   1958  1.1  mrg 
   1959  1.1  mrg       case ADDRESS_REG_REG:
   1960  1.1  mrg 	return factor;
   1961  1.1  mrg 
   1962  1.1  mrg       case ADDRESS_CONST_INT:
   1963  1.1  mrg 	return factor;
   1964  1.1  mrg 
   1965  1.1  mrg       case ADDRESS_SYMBOLIC:
   1966  1.1  mrg 	return factor * loongarch_symbol_insns (addr.symbol_type, mode);
   1967  1.1  mrg       }
   1968  1.1  mrg   return 0;
   1969  1.1  mrg }
   1970  1.1  mrg 
   1971  1.1  mrg /* Return true if X fits within an unsigned field of BITS bits that is
   1972  1.1  mrg    shifted left SHIFT bits before being used.  */
   1973  1.1  mrg 
   1974  1.1  mrg bool
   1975  1.1  mrg loongarch_unsigned_immediate_p (unsigned HOST_WIDE_INT x, int bits,
   1976  1.1  mrg 				int shift = 0)
   1977  1.1  mrg {
   1978  1.1  mrg   return (x & ((1 << shift) - 1)) == 0 && x < ((unsigned) 1 << (shift + bits));
   1979  1.1  mrg }
   1980  1.1  mrg 
   1981  1.1  mrg /* Return true if X fits within a signed field of BITS bits that is
   1982  1.1  mrg    shifted left SHIFT bits before being used.  */
   1983  1.1  mrg 
   1984  1.1  mrg bool
   1985  1.1  mrg loongarch_signed_immediate_p (unsigned HOST_WIDE_INT x, int bits,
   1986  1.1  mrg 			      int shift = 0)
   1987  1.1  mrg {
   1988  1.1  mrg   x += 1 << (bits + shift - 1);
   1989  1.1  mrg   return loongarch_unsigned_immediate_p (x, bits, shift);
   1990  1.1  mrg }
   1991  1.1  mrg 
   1992  1.1  mrg /* Return true if X is a legitimate address with a 12-bit offset.
   1993  1.1  mrg    MODE is the mode of the value being accessed.  */
   1994  1.1  mrg 
   1995  1.1  mrg bool
   1996  1.1  mrg loongarch_12bit_offset_address_p (rtx x, machine_mode mode)
   1997  1.1  mrg {
   1998  1.1  mrg   struct loongarch_address_info addr;
   1999  1.1  mrg 
   2000  1.1  mrg   return (loongarch_classify_address (&addr, x, mode, false)
   2001  1.1  mrg 	  && addr.type == ADDRESS_REG
   2002  1.1  mrg 	  && CONST_INT_P (addr.offset)
   2003  1.1  mrg 	  && LARCH_U12BIT_OFFSET_P (INTVAL (addr.offset)));
   2004  1.1  mrg }
   2005  1.1  mrg 
   2006  1.1  mrg /* Return true if X is a legitimate address with a 14-bit offset shifted 2.
   2007  1.1  mrg    MODE is the mode of the value being accessed.  */
   2008  1.1  mrg 
   2009  1.1  mrg bool
   2010  1.1  mrg loongarch_14bit_shifted_offset_address_p (rtx x, machine_mode mode)
   2011  1.1  mrg {
   2012  1.1  mrg   struct loongarch_address_info addr;
   2013  1.1  mrg 
   2014  1.1  mrg   return (loongarch_classify_address (&addr, x, mode, false)
   2015  1.1  mrg 	  && addr.type == ADDRESS_REG
   2016  1.1  mrg 	  && CONST_INT_P (addr.offset)
   2017  1.1  mrg 	  && LARCH_16BIT_OFFSET_P (INTVAL (addr.offset))
   2018  1.1  mrg 	  && LARCH_SHIFT_2_OFFSET_P (INTVAL (addr.offset)));
   2019  1.1  mrg }
   2020  1.1  mrg 
   2021  1.1  mrg bool
   2022  1.1  mrg loongarch_base_index_address_p (rtx x, machine_mode mode)
   2023  1.1  mrg {
   2024  1.1  mrg   struct loongarch_address_info addr;
   2025  1.1  mrg 
   2026  1.1  mrg   return (loongarch_classify_address (&addr, x, mode, false)
   2027  1.1  mrg 	  && addr.type == ADDRESS_REG_REG
   2028  1.1  mrg 	  && REG_P (addr.offset));
   2029  1.1  mrg }
   2030  1.1  mrg 
   2031  1.1  mrg /* Return the number of instructions needed to load constant X,
   2032  1.1  mrg    Return 0 if X isn't a valid constant.  */
   2033  1.1  mrg 
   2034  1.1  mrg int
   2035  1.1  mrg loongarch_const_insns (rtx x)
   2036  1.1  mrg {
   2037  1.1  mrg   enum loongarch_symbol_type symbol_type;
   2038  1.1  mrg   rtx offset;
   2039  1.1  mrg 
   2040  1.1  mrg   switch (GET_CODE (x))
   2041  1.1  mrg     {
   2042  1.1  mrg     case CONST_INT:
   2043  1.1  mrg       return loongarch_integer_cost (INTVAL (x));
   2044  1.1  mrg 
   2045  1.1  mrg     case CONST_VECTOR:
   2046  1.1  mrg       /* Fall through.  */
   2047  1.1  mrg     case CONST_DOUBLE:
   2048  1.1  mrg       return x == CONST0_RTX (GET_MODE (x)) ? 1 : 0;
   2049  1.1  mrg 
   2050  1.1  mrg     case CONST:
   2051  1.1  mrg       /* See if we can refer to X directly.  */
   2052  1.1  mrg       if (loongarch_symbolic_constant_p (x, &symbol_type))
   2053  1.1  mrg 	return loongarch_symbol_insns (symbol_type, MAX_MACHINE_MODE);
   2054  1.1  mrg 
   2055  1.1  mrg       /* Otherwise try splitting the constant into a base and offset.
   2056  1.1  mrg 	 If the offset is a 12-bit value, we can load the base address
   2057  1.1  mrg 	 into a register and then use ADDI.{W/D} to add in the offset.
   2058  1.1  mrg 	 If the offset is larger, we can load the base and offset
   2059  1.1  mrg 	 into separate registers and add them together with ADD.{W/D}.
   2060  1.1  mrg 	 However, the latter is only possible before reload; during
   2061  1.1  mrg 	 and after reload, we must have the option of forcing the
   2062  1.1  mrg 	 constant into the pool instead.  */
   2063  1.1  mrg       split_const (x, &x, &offset);
   2064  1.1  mrg       if (offset != 0)
   2065  1.1  mrg 	{
   2066  1.1  mrg 	  int n = loongarch_const_insns (x);
   2067  1.1  mrg 	  if (n != 0)
   2068  1.1  mrg 	    {
   2069  1.1  mrg 	      if (IMM12_INT (offset))
   2070  1.1  mrg 		return n + 1;
   2071  1.1  mrg 	      else if (!targetm.cannot_force_const_mem (GET_MODE (x), x))
   2072  1.1  mrg 		return n + 1 + loongarch_integer_cost (INTVAL (offset));
   2073  1.1  mrg 	    }
   2074  1.1  mrg 	}
   2075  1.1  mrg       return 0;
   2076  1.1  mrg 
   2077  1.1  mrg     case SYMBOL_REF:
   2078  1.1  mrg     case LABEL_REF:
   2079  1.1  mrg       return loongarch_symbol_insns (
   2080  1.1  mrg 	loongarch_classify_symbol (x), MAX_MACHINE_MODE);
   2081  1.1  mrg 
   2082  1.1  mrg     default:
   2083  1.1  mrg       return 0;
   2084  1.1  mrg     }
   2085  1.1  mrg }
   2086  1.1  mrg 
   2087  1.1  mrg /* X is a doubleword constant that can be handled by splitting it into
   2088  1.1  mrg    two words and loading each word separately.  Return the number of
   2089  1.1  mrg    instructions required to do this.  */
   2090  1.1  mrg 
   2091  1.1  mrg int
   2092  1.1  mrg loongarch_split_const_insns (rtx x)
   2093  1.1  mrg {
   2094  1.1  mrg   unsigned int low, high;
   2095  1.1  mrg 
   2096  1.1  mrg   low = loongarch_const_insns (loongarch_subword (x, false));
   2097  1.1  mrg   high = loongarch_const_insns (loongarch_subword (x, true));
   2098  1.1  mrg   gcc_assert (low > 0 && high > 0);
   2099  1.1  mrg   return low + high;
   2100  1.1  mrg }
   2101  1.1  mrg 
   2102  1.1  mrg /* Return the number of instructions needed to implement INSN,
   2103  1.1  mrg    given that it loads from or stores to MEM.  */
   2104  1.1  mrg 
   2105  1.1  mrg int
   2106  1.1  mrg loongarch_load_store_insns (rtx mem, rtx_insn *insn)
   2107  1.1  mrg {
   2108  1.1  mrg   machine_mode mode;
   2109  1.1  mrg   bool might_split_p;
   2110  1.1  mrg   rtx set;
   2111  1.1  mrg 
   2112  1.1  mrg   gcc_assert (MEM_P (mem));
   2113  1.1  mrg   mode = GET_MODE (mem);
   2114  1.1  mrg 
   2115  1.1  mrg   /* Try to prove that INSN does not need to be split.  */
   2116  1.1  mrg   might_split_p = GET_MODE_SIZE (mode) > UNITS_PER_WORD;
   2117  1.1  mrg   if (might_split_p)
   2118  1.1  mrg     {
   2119  1.1  mrg       set = single_set (insn);
   2120  1.1  mrg       if (set
   2121  1.1  mrg 	  && !loongarch_split_move_insn_p (SET_DEST (set), SET_SRC (set)))
   2122  1.1  mrg 	might_split_p = false;
   2123  1.1  mrg     }
   2124  1.1  mrg 
   2125  1.1  mrg   return loongarch_address_insns (XEXP (mem, 0), mode, might_split_p);
   2126  1.1  mrg }
   2127  1.1  mrg 
   2128  1.1  mrg /* Return true if we need to trap on division by zero.  */
   2129  1.1  mrg 
   2130  1.1  mrg static bool
   2131  1.1  mrg loongarch_check_zero_div_p (void)
   2132  1.1  mrg {
   2133  1.1  mrg   /* if -m[no-]check-zero-division is given explicitly.  */
   2134  1.1  mrg   if (target_flags_explicit & MASK_CHECK_ZERO_DIV)
   2135  1.1  mrg     return TARGET_CHECK_ZERO_DIV;
   2136  1.1  mrg 
   2137  1.1  mrg   /* if not, don't trap for optimized code except -Og.  */
   2138  1.1  mrg   return !optimize || optimize_debug;
   2139  1.1  mrg }
   2140  1.1  mrg 
   2141  1.1  mrg /* Return the number of instructions needed for an integer division.  */
   2142  1.1  mrg 
   2143  1.1  mrg int
   2144  1.1  mrg loongarch_idiv_insns (machine_mode mode ATTRIBUTE_UNUSED)
   2145  1.1  mrg {
   2146  1.1  mrg   int count;
   2147  1.1  mrg 
   2148  1.1  mrg   count = 1;
   2149  1.1  mrg   if (loongarch_check_zero_div_p ())
   2150  1.1  mrg     count += 2;
   2151  1.1  mrg 
   2152  1.1  mrg   return count;
   2153  1.1  mrg }
   2154  1.1  mrg 
   2155  1.1  mrg /* Emit an instruction of the form (set TARGET (CODE OP0 OP1)).  */
   2156  1.1  mrg 
   2157  1.1  mrg void
   2158  1.1  mrg loongarch_emit_binary (enum rtx_code code, rtx target, rtx op0, rtx op1)
   2159  1.1  mrg {
   2160  1.1  mrg   emit_insn (gen_rtx_SET (target, gen_rtx_fmt_ee (code, GET_MODE (target),
   2161  1.1  mrg 						  op0, op1)));
   2162  1.1  mrg }
   2163  1.1  mrg 
   2164  1.1  mrg /* Compute (CODE OP0 OP1) and store the result in a new register
   2165  1.1  mrg    of mode MODE.  Return that new register.  */
   2166  1.1  mrg 
   2167  1.1  mrg static rtx
   2168  1.1  mrg loongarch_force_binary (machine_mode mode, enum rtx_code code, rtx op0,
   2169  1.1  mrg 			rtx op1)
   2170  1.1  mrg {
   2171  1.1  mrg   rtx reg;
   2172  1.1  mrg 
   2173  1.1  mrg   reg = gen_reg_rtx (mode);
   2174  1.1  mrg   loongarch_emit_binary (code, reg, op0, op1);
   2175  1.1  mrg   return reg;
   2176  1.1  mrg }
   2177  1.1  mrg 
   2178  1.1  mrg /* Copy VALUE to a register and return that register.  If new pseudos
   2179  1.1  mrg    are allowed, copy it into a new register, otherwise use DEST.  */
   2180  1.1  mrg 
   2181  1.1  mrg static rtx
   2182  1.1  mrg loongarch_force_temporary (rtx dest, rtx value)
   2183  1.1  mrg {
   2184  1.1  mrg   if (can_create_pseudo_p ())
   2185  1.1  mrg     return force_reg (Pmode, value);
   2186  1.1  mrg   else
   2187  1.1  mrg     {
   2188  1.1  mrg       loongarch_emit_move (dest, value);
   2189  1.1  mrg       return dest;
   2190  1.1  mrg     }
   2191  1.1  mrg }
   2192  1.1  mrg 
   2193  1.1  mrg /* Wrap symbol or label BASE in an UNSPEC address of type SYMBOL_TYPE,
   2194  1.1  mrg    then add CONST_INT OFFSET to the result.  */
   2195  1.1  mrg 
   2196  1.1  mrg static rtx
   2197  1.1  mrg loongarch_unspec_address_offset (rtx base, rtx offset,
   2198  1.1  mrg 				 enum loongarch_symbol_type symbol_type)
   2199  1.1  mrg {
   2200  1.1  mrg   base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base),
   2201  1.1  mrg 			 UNSPEC_ADDRESS_FIRST + symbol_type);
   2202  1.1  mrg   if (offset != const0_rtx)
   2203  1.1  mrg     base = gen_rtx_PLUS (Pmode, base, offset);
   2204  1.1  mrg   return gen_rtx_CONST (Pmode, base);
   2205  1.1  mrg }
   2206  1.1  mrg 
   2207  1.1  mrg /* Return an UNSPEC address with underlying address ADDRESS and symbol
   2208  1.1  mrg    type SYMBOL_TYPE.  */
   2209  1.1  mrg 
   2210  1.1  mrg rtx
   2211  1.1  mrg loongarch_unspec_address (rtx address, enum loongarch_symbol_type symbol_type)
   2212  1.1  mrg {
   2213  1.1  mrg   rtx base, offset;
   2214  1.1  mrg 
   2215  1.1  mrg   split_const (address, &base, &offset);
   2216  1.1  mrg   return loongarch_unspec_address_offset (base, offset, symbol_type);
   2217  1.1  mrg }
   2218  1.1  mrg 
   2219  1.1  mrg /* If OP is an UNSPEC address, return the address to which it refers,
   2220  1.1  mrg    otherwise return OP itself.  */
   2221  1.1  mrg 
   2222  1.1  mrg rtx
   2223  1.1  mrg loongarch_strip_unspec_address (rtx op)
   2224  1.1  mrg {
   2225  1.1  mrg   rtx base, offset;
   2226  1.1  mrg 
   2227  1.1  mrg   split_const (op, &base, &offset);
   2228  1.1  mrg   if (UNSPEC_ADDRESS_P (base))
   2229  1.1  mrg     op = plus_constant (Pmode, UNSPEC_ADDRESS (base), INTVAL (offset));
   2230  1.1  mrg   return op;
   2231  1.1  mrg }
   2232  1.1  mrg 
   2233  1.1  mrg /* Return a legitimate address for REG + OFFSET.  TEMP is as for
   2234  1.1  mrg    loongarch_force_temporary; it is only needed when OFFSET is not a
   2235  1.1  mrg    IMM12_OPERAND.  */
   2236  1.1  mrg 
   2237  1.1  mrg static rtx
   2238  1.1  mrg loongarch_add_offset (rtx temp, rtx reg, HOST_WIDE_INT offset)
   2239  1.1  mrg {
   2240  1.1  mrg   if (!IMM12_OPERAND (offset))
   2241  1.1  mrg     {
   2242  1.1  mrg       rtx high;
   2243  1.1  mrg 
   2244  1.1  mrg       /* Leave OFFSET as a 12-bit offset and put the excess in HIGH.
   2245  1.1  mrg 	 The addition inside the macro CONST_HIGH_PART may cause an
   2246  1.1  mrg 	 overflow, so we need to force a sign-extension check.  */
   2247  1.1  mrg       high = gen_int_mode (CONST_HIGH_PART (offset), Pmode);
   2248  1.1  mrg       offset = CONST_LOW_PART (offset);
   2249  1.1  mrg       high = loongarch_force_temporary (temp, high);
   2250  1.1  mrg       reg = loongarch_force_temporary (temp, gen_rtx_PLUS (Pmode, high, reg));
   2251  1.1  mrg     }
   2252  1.1  mrg   return plus_constant (Pmode, reg, offset);
   2253  1.1  mrg }
   2254  1.1  mrg 
   2255  1.1  mrg /* The __tls_get_attr symbol.  */
   2256  1.1  mrg static GTY (()) rtx loongarch_tls_symbol;
   2257  1.1  mrg 
   2258  1.1  mrg /* Load an entry from the GOT for a TLS GD access.  */
   2259  1.1  mrg 
   2260  1.1  mrg static rtx
   2261  1.1  mrg loongarch_got_load_tls_gd (rtx dest, rtx sym)
   2262  1.1  mrg {
   2263  1.1  mrg   return gen_got_load_tls_gd (Pmode, dest, sym);
   2264  1.1  mrg }
   2265  1.1  mrg 
   2266  1.1  mrg /* Load an entry from the GOT for a TLS LD access.  */
   2267  1.1  mrg 
   2268  1.1  mrg static rtx
   2269  1.1  mrg loongarch_got_load_tls_ld (rtx dest, rtx sym)
   2270  1.1  mrg {
   2271  1.1  mrg   return gen_got_load_tls_ld (Pmode, dest, sym);
   2272  1.1  mrg }
   2273  1.1  mrg 
   2274  1.1  mrg /* Load an entry from the GOT for a TLS IE access.  */
   2275  1.1  mrg 
   2276  1.1  mrg static rtx
   2277  1.1  mrg loongarch_got_load_tls_ie (rtx dest, rtx sym)
   2278  1.1  mrg {
   2279  1.1  mrg   return gen_got_load_tls_ie (Pmode, dest, sym);
   2280  1.1  mrg }
   2281  1.1  mrg 
   2282  1.1  mrg /* Add in the thread pointer for a TLS LE access.  */
   2283  1.1  mrg 
   2284  1.1  mrg static rtx
   2285  1.1  mrg loongarch_got_load_tls_le (rtx dest, rtx sym)
   2286  1.1  mrg {
   2287  1.1  mrg   return gen_got_load_tls_le (Pmode, dest, sym);
   2288  1.1  mrg }
   2289  1.1  mrg 
   2290  1.1  mrg /* Return an instruction sequence that calls __tls_get_addr.  SYM is
   2291  1.1  mrg    the TLS symbol we are referencing and TYPE is the symbol type to use
   2292  1.1  mrg    (either global dynamic or local dynamic).  V0 is an RTX for the
   2293  1.1  mrg    return value location.  */
   2294  1.1  mrg 
   2295  1.1  mrg static rtx_insn *
   2296  1.1  mrg loongarch_call_tls_get_addr (rtx sym, enum loongarch_symbol_type type, rtx v0)
   2297  1.1  mrg {
   2298  1.1  mrg   rtx loc, a0;
   2299  1.1  mrg   rtx_insn *insn;
   2300  1.1  mrg 
   2301  1.1  mrg   a0 = gen_rtx_REG (Pmode, GP_ARG_FIRST);
   2302  1.1  mrg 
   2303  1.1  mrg   if (!loongarch_tls_symbol)
   2304  1.1  mrg     loongarch_tls_symbol = init_one_libfunc ("__tls_get_addr");
   2305  1.1  mrg 
   2306  1.1  mrg   loc = loongarch_unspec_address (sym, type);
   2307  1.1  mrg 
   2308  1.1  mrg   start_sequence ();
   2309  1.1  mrg 
   2310  1.1  mrg   if (type == SYMBOL_TLSLDM)
   2311  1.1  mrg     emit_insn (loongarch_got_load_tls_ld (a0, loc));
   2312  1.1  mrg   else if (type == SYMBOL_TLSGD)
   2313  1.1  mrg     emit_insn (loongarch_got_load_tls_gd (a0, loc));
   2314  1.1  mrg   else
   2315  1.1  mrg     gcc_unreachable ();
   2316  1.1  mrg 
   2317  1.1  mrg   insn = emit_call_insn (gen_call_value_internal (v0, loongarch_tls_symbol,
   2318  1.1  mrg 						  const0_rtx));
   2319  1.1  mrg   RTL_CONST_CALL_P (insn) = 1;
   2320  1.1  mrg   use_reg (&CALL_INSN_FUNCTION_USAGE (insn), a0);
   2321  1.1  mrg   insn = get_insns ();
   2322  1.1  mrg 
   2323  1.1  mrg   end_sequence ();
   2324  1.1  mrg 
   2325  1.1  mrg   return insn;
   2326  1.1  mrg }
   2327  1.1  mrg 
   2328  1.1  mrg /* Generate the code to access LOC, a thread-local SYMBOL_REF, and return
   2329  1.1  mrg    its address.  The return value will be both a valid address and a valid
   2330  1.1  mrg    SET_SRC (either a REG or a LO_SUM).  */
   2331  1.1  mrg 
   2332  1.1  mrg static rtx
   2333  1.1  mrg loongarch_legitimize_tls_address (rtx loc)
   2334  1.1  mrg {
   2335  1.1  mrg   rtx dest, tp, tmp;
   2336  1.1  mrg   enum tls_model model = SYMBOL_REF_TLS_MODEL (loc);
   2337  1.1  mrg   rtx_insn *insn;
   2338  1.1  mrg 
   2339  1.1  mrg   switch (model)
   2340  1.1  mrg     {
   2341  1.1  mrg     case TLS_MODEL_LOCAL_DYNAMIC:
   2342  1.1  mrg       tmp = gen_rtx_REG (Pmode, GP_RETURN);
   2343  1.1  mrg       dest = gen_reg_rtx (Pmode);
   2344  1.1  mrg       insn = loongarch_call_tls_get_addr (loc, SYMBOL_TLSLDM, tmp);
   2345  1.1  mrg       emit_libcall_block (insn, dest, tmp, loc);
   2346  1.1  mrg       break;
   2347  1.1  mrg 
   2348  1.1  mrg     case TLS_MODEL_GLOBAL_DYNAMIC:
   2349  1.1  mrg       tmp = gen_rtx_REG (Pmode, GP_RETURN);
   2350  1.1  mrg       dest = gen_reg_rtx (Pmode);
   2351  1.1  mrg       insn = loongarch_call_tls_get_addr (loc, SYMBOL_TLSGD, tmp);
   2352  1.1  mrg       emit_libcall_block (insn, dest, tmp, loc);
   2353  1.1  mrg       break;
   2354  1.1  mrg 
   2355  1.1  mrg     case TLS_MODEL_INITIAL_EXEC:
   2356  1.1  mrg       /* la.tls.ie; tp-relative add  */
   2357  1.1  mrg       tp = gen_rtx_REG (Pmode, THREAD_POINTER_REGNUM);
   2358  1.1  mrg       tmp = gen_reg_rtx (Pmode);
   2359  1.1  mrg       emit_insn (loongarch_got_load_tls_ie (tmp, loc));
   2360  1.1  mrg       dest = gen_reg_rtx (Pmode);
   2361  1.1  mrg       emit_insn (gen_add3_insn (dest, tmp, tp));
   2362  1.1  mrg       break;
   2363  1.1  mrg 
   2364  1.1  mrg     case TLS_MODEL_LOCAL_EXEC:
   2365  1.1  mrg       /* la.tls.le; tp-relative add  */
   2366  1.1  mrg       tp = gen_rtx_REG (Pmode, THREAD_POINTER_REGNUM);
   2367  1.1  mrg       tmp = gen_reg_rtx (Pmode);
   2368  1.1  mrg       emit_insn (loongarch_got_load_tls_le (tmp, loc));
   2369  1.1  mrg       dest = gen_reg_rtx (Pmode);
   2370  1.1  mrg       emit_insn (gen_add3_insn (dest, tmp, tp));
   2371  1.1  mrg       break;
   2372  1.1  mrg 
   2373  1.1  mrg     default:
   2374  1.1  mrg       gcc_unreachable ();
   2375  1.1  mrg     }
   2376  1.1  mrg   return dest;
   2377  1.1  mrg }
   2378  1.1  mrg 
   2379  1.1  mrg rtx
   2380  1.1  mrg loongarch_legitimize_call_address (rtx addr)
   2381  1.1  mrg {
   2382  1.1  mrg   if (!call_insn_operand (addr, VOIDmode))
   2383  1.1  mrg     {
   2384  1.1  mrg       rtx reg = gen_reg_rtx (Pmode);
   2385  1.1  mrg       loongarch_emit_move (reg, addr);
   2386  1.1  mrg       return reg;
   2387  1.1  mrg     }
   2388  1.1  mrg   return addr;
   2389  1.1  mrg }
   2390  1.1  mrg 
   2391  1.1  mrg /* If X is a PLUS of a CONST_INT, return the two terms in *BASE_PTR
   2392  1.1  mrg    and *OFFSET_PTR.  Return X in *BASE_PTR and 0 in *OFFSET_PTR otherwise.  */
   2393  1.1  mrg 
   2394  1.1  mrg static void
   2395  1.1  mrg loongarch_split_plus (rtx x, rtx *base_ptr, HOST_WIDE_INT *offset_ptr)
   2396  1.1  mrg {
   2397  1.1  mrg   if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
   2398  1.1  mrg     {
   2399  1.1  mrg       *base_ptr = XEXP (x, 0);
   2400  1.1  mrg       *offset_ptr = INTVAL (XEXP (x, 1));
   2401  1.1  mrg     }
   2402  1.1  mrg   else
   2403  1.1  mrg     {
   2404  1.1  mrg       *base_ptr = x;
   2405  1.1  mrg       *offset_ptr = 0;
   2406  1.1  mrg     }
   2407  1.1  mrg }
   2408  1.1  mrg 
   2409  1.1  mrg /* If X is not a valid address for mode MODE, force it into a register.  */
   2410  1.1  mrg 
   2411  1.1  mrg static rtx
   2412  1.1  mrg loongarch_force_address (rtx x, machine_mode mode)
   2413  1.1  mrg {
   2414  1.1  mrg   if (!loongarch_legitimate_address_p (mode, x, false))
   2415  1.1  mrg     x = force_reg (Pmode, x);
   2416  1.1  mrg   return x;
   2417  1.1  mrg }
   2418  1.1  mrg 
   2419  1.1  mrg /* This function is used to implement LEGITIMIZE_ADDRESS.  If X can
   2420  1.1  mrg    be legitimized in a way that the generic machinery might not expect,
   2421  1.1  mrg    return a new address, otherwise return NULL.  MODE is the mode of
   2422  1.1  mrg    the memory being accessed.  */
   2423  1.1  mrg 
   2424  1.1  mrg static rtx
   2425  1.1  mrg loongarch_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
   2426  1.1  mrg 			      machine_mode mode)
   2427  1.1  mrg {
   2428  1.1  mrg   rtx base, addr;
   2429  1.1  mrg   HOST_WIDE_INT offset;
   2430  1.1  mrg 
   2431  1.1  mrg   if (loongarch_tls_symbol_p (x))
   2432  1.1  mrg     return loongarch_legitimize_tls_address (x);
   2433  1.1  mrg 
   2434  1.1  mrg   /* Handle BASE + OFFSET using loongarch_add_offset.  */
   2435  1.1  mrg   loongarch_split_plus (x, &base, &offset);
   2436  1.1  mrg   if (offset != 0)
   2437  1.1  mrg     {
   2438  1.1  mrg       if (!loongarch_valid_base_register_p (base, mode, false))
   2439  1.1  mrg 	base = copy_to_mode_reg (Pmode, base);
   2440  1.1  mrg       addr = loongarch_add_offset (NULL, base, offset);
   2441  1.1  mrg       return loongarch_force_address (addr, mode);
   2442  1.1  mrg     }
   2443  1.1  mrg 
   2444  1.1  mrg   return x;
   2445  1.1  mrg }
   2446  1.1  mrg 
   2447  1.1  mrg /* Load VALUE into DEST.  TEMP is as for loongarch_force_temporary.  */
   2448  1.1  mrg 
   2449  1.1  mrg void
   2450  1.1  mrg loongarch_move_integer (rtx temp, rtx dest, unsigned HOST_WIDE_INT value)
   2451  1.1  mrg {
   2452  1.1  mrg   struct loongarch_integer_op codes[LARCH_MAX_INTEGER_OPS];
   2453  1.1  mrg   machine_mode mode;
   2454  1.1  mrg   unsigned int i, num_ops;
   2455  1.1  mrg   rtx x;
   2456  1.1  mrg 
   2457  1.1  mrg   mode = GET_MODE (dest);
   2458  1.1  mrg   num_ops = loongarch_build_integer (codes, value);
   2459  1.1  mrg 
   2460  1.1  mrg   /* Apply each binary operation to X.  Invariant: X is a legitimate
   2461  1.1  mrg      source operand for a SET pattern.  */
   2462  1.1  mrg   x = GEN_INT (codes[0].value);
   2463  1.1  mrg   for (i = 1; i < num_ops; i++)
   2464  1.1  mrg     {
   2465  1.1  mrg       if (!can_create_pseudo_p ())
   2466  1.1  mrg 	{
   2467  1.1  mrg 	  emit_insn (gen_rtx_SET (temp, x));
   2468  1.1  mrg 	  x = temp;
   2469  1.1  mrg 	}
   2470  1.1  mrg       else
   2471  1.1  mrg 	x = force_reg (mode, x);
   2472  1.1  mrg 
   2473  1.1  mrg       switch (codes[i].method)
   2474  1.1  mrg 	{
   2475  1.1  mrg 	case METHOD_NORMAL:
   2476  1.1  mrg 	  x = gen_rtx_fmt_ee (codes[i].code, mode, x,
   2477  1.1  mrg 			      GEN_INT (codes[i].value));
   2478  1.1  mrg 	  break;
   2479  1.1  mrg 	case METHOD_LU32I:
   2480  1.1  mrg 	  emit_insn (
   2481  1.1  mrg 	    gen_rtx_SET (x,
   2482  1.1  mrg 			 gen_rtx_IOR (DImode,
   2483  1.1  mrg 				      gen_rtx_ZERO_EXTEND (
   2484  1.1  mrg 					DImode, gen_rtx_SUBREG (SImode, x, 0)),
   2485  1.1  mrg 				      GEN_INT (codes[i].value))));
   2486  1.1  mrg 	  break;
   2487  1.1  mrg 	case METHOD_LU52I:
   2488  1.1  mrg 	  emit_insn (gen_lu52i_d (x, x, GEN_INT (0xfffffffffffff),
   2489  1.1  mrg 				  GEN_INT (codes[i].value)));
   2490  1.1  mrg 	  break;
   2491  1.1  mrg 	case METHOD_INSV:
   2492  1.1  mrg 	  emit_insn (
   2493  1.1  mrg 	    gen_rtx_SET (gen_rtx_ZERO_EXTRACT (DImode, x, GEN_INT (20),
   2494  1.1  mrg 					       GEN_INT (32)),
   2495  1.1  mrg 			 gen_rtx_REG (DImode, 0)));
   2496  1.1  mrg 	  break;
   2497  1.1  mrg 	default:
   2498  1.1  mrg 	  gcc_unreachable ();
   2499  1.1  mrg 	}
   2500  1.1  mrg     }
   2501  1.1  mrg 
   2502  1.1  mrg   emit_insn (gen_rtx_SET (dest, x));
   2503  1.1  mrg }
   2504  1.1  mrg 
   2505  1.1  mrg /* Subroutine of loongarch_legitimize_move.  Move constant SRC into register
   2506  1.1  mrg    DEST given that SRC satisfies immediate_operand but doesn't satisfy
   2507  1.1  mrg    move_operand.  */
   2508  1.1  mrg 
   2509  1.1  mrg static void
   2510  1.1  mrg loongarch_legitimize_const_move (machine_mode mode, rtx dest, rtx src)
   2511  1.1  mrg {
   2512  1.1  mrg   rtx base, offset;
   2513  1.1  mrg 
   2514  1.1  mrg   /* Split moves of big integers into smaller pieces.  */
   2515  1.1  mrg   if (splittable_const_int_operand (src, mode))
   2516  1.1  mrg     {
   2517  1.1  mrg       loongarch_move_integer (dest, dest, INTVAL (src));
   2518  1.1  mrg       return;
   2519  1.1  mrg     }
   2520  1.1  mrg 
   2521  1.1  mrg   /* Generate the appropriate access sequences for TLS symbols.  */
   2522  1.1  mrg   if (loongarch_tls_symbol_p (src))
   2523  1.1  mrg     {
   2524  1.1  mrg       loongarch_emit_move (dest, loongarch_legitimize_tls_address (src));
   2525  1.1  mrg       return;
   2526  1.1  mrg     }
   2527  1.1  mrg 
   2528  1.1  mrg   /* If we have (const (plus symbol offset)), and that expression cannot
   2529  1.1  mrg      be forced into memory, load the symbol first and add in the offset.
   2530  1.1  mrg      prefer to do this even if the constant _can_ be forced into memory,
   2531  1.1  mrg      as it usually produces better code.  */
   2532  1.1  mrg   split_const (src, &base, &offset);
   2533  1.1  mrg   if (offset != const0_rtx
   2534  1.1  mrg       && (targetm.cannot_force_const_mem (mode, src)
   2535  1.1  mrg 	  || (can_create_pseudo_p ())))
   2536  1.1  mrg     {
   2537  1.1  mrg       base = loongarch_force_temporary (dest, base);
   2538  1.1  mrg       loongarch_emit_move (dest,
   2539  1.1  mrg 			   loongarch_add_offset (NULL, base, INTVAL (offset)));
   2540  1.1  mrg       return;
   2541  1.1  mrg     }
   2542  1.1  mrg 
   2543  1.1  mrg   src = force_const_mem (mode, src);
   2544  1.1  mrg 
   2545  1.1  mrg   loongarch_emit_move (dest, src);
   2546  1.1  mrg }
   2547  1.1  mrg 
   2548  1.1  mrg /* If (set DEST SRC) is not a valid move instruction, emit an equivalent
   2549  1.1  mrg    sequence that is valid.  */
   2550  1.1  mrg 
   2551  1.1  mrg bool
   2552  1.1  mrg loongarch_legitimize_move (machine_mode mode, rtx dest, rtx src)
   2553  1.1  mrg {
   2554  1.1  mrg   if (!register_operand (dest, mode) && !reg_or_0_operand (src, mode))
   2555  1.1  mrg     {
   2556  1.1  mrg       loongarch_emit_move (dest, force_reg (mode, src));
   2557  1.1  mrg       return true;
   2558  1.1  mrg     }
   2559  1.1  mrg 
   2560  1.1  mrg   /* Both src and dest are non-registers;  one special case is supported where
   2561  1.1  mrg      the source is (const_int 0) and the store can source the zero register.
   2562  1.1  mrg      */
   2563  1.1  mrg   if (!register_operand (dest, mode) && !register_operand (src, mode)
   2564  1.1  mrg       && !const_0_operand (src, mode))
   2565  1.1  mrg     {
   2566  1.1  mrg       loongarch_emit_move (dest, force_reg (mode, src));
   2567  1.1  mrg       return true;
   2568  1.1  mrg     }
   2569  1.1  mrg 
   2570  1.1  mrg   /* We need to deal with constants that would be legitimate
   2571  1.1  mrg      immediate_operands but aren't legitimate move_operands.  */
   2572  1.1  mrg   if (CONSTANT_P (src) && !move_operand (src, mode))
   2573  1.1  mrg     {
   2574  1.1  mrg       loongarch_legitimize_const_move (mode, dest, src);
   2575  1.1  mrg       set_unique_reg_note (get_last_insn (), REG_EQUAL, copy_rtx (src));
   2576  1.1  mrg       return true;
   2577  1.1  mrg     }
   2578  1.1  mrg 
   2579  1.1  mrg   return false;
   2580  1.1  mrg }
   2581  1.1  mrg 
   2582  1.1  mrg /* Return true if OP refers to small data symbols directly.  */
   2583  1.1  mrg 
   2584  1.1  mrg static int
   2585  1.1  mrg loongarch_small_data_pattern_1 (rtx x)
   2586  1.1  mrg {
   2587  1.1  mrg   subrtx_var_iterator::array_type array;
   2588  1.1  mrg   FOR_EACH_SUBRTX_VAR (iter, array, x, ALL)
   2589  1.1  mrg     {
   2590  1.1  mrg       rtx x = *iter;
   2591  1.1  mrg 
   2592  1.1  mrg       /* We make no particular guarantee about which symbolic constants are
   2593  1.1  mrg 	 acceptable as asm operands versus which must be forced into a GPR.  */
   2594  1.1  mrg       if (GET_CODE (x) == ASM_OPERANDS)
   2595  1.1  mrg 	iter.skip_subrtxes ();
   2596  1.1  mrg       else if (MEM_P (x))
   2597  1.1  mrg 	{
   2598  1.1  mrg 	  if (loongarch_small_data_pattern_1 (XEXP (x, 0)))
   2599  1.1  mrg 	    return true;
   2600  1.1  mrg 	  iter.skip_subrtxes ();
   2601  1.1  mrg 	}
   2602  1.1  mrg     }
   2603  1.1  mrg   return false;
   2604  1.1  mrg }
   2605  1.1  mrg 
   2606  1.1  mrg /* Return true if OP refers to small data symbols directly.  */
   2607  1.1  mrg 
   2608  1.1  mrg bool
   2609  1.1  mrg loongarch_small_data_pattern_p (rtx op)
   2610  1.1  mrg {
   2611  1.1  mrg   return loongarch_small_data_pattern_1 (op);
   2612  1.1  mrg }
   2613  1.1  mrg 
   2614  1.1  mrg /* Rewrite *LOC so that it refers to small data using explicit
   2615  1.1  mrg    relocations.  */
   2616  1.1  mrg 
   2617  1.1  mrg static void
   2618  1.1  mrg loongarch_rewrite_small_data_1 (rtx *loc)
   2619  1.1  mrg {
   2620  1.1  mrg   subrtx_ptr_iterator::array_type array;
   2621  1.1  mrg   FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL)
   2622  1.1  mrg     {
   2623  1.1  mrg       rtx *loc = *iter;
   2624  1.1  mrg       if (MEM_P (*loc))
   2625  1.1  mrg 	{
   2626  1.1  mrg 	  loongarch_rewrite_small_data_1 (&XEXP (*loc, 0));
   2627  1.1  mrg 	  iter.skip_subrtxes ();
   2628  1.1  mrg 	}
   2629  1.1  mrg     }
   2630  1.1  mrg }
   2631  1.1  mrg 
   2632  1.1  mrg /* Rewrite instruction pattern PATTERN so that it refers to small data
   2633  1.1  mrg    using explicit relocations.  */
   2634  1.1  mrg 
   2635  1.1  mrg rtx
   2636  1.1  mrg loongarch_rewrite_small_data (rtx pattern)
   2637  1.1  mrg {
   2638  1.1  mrg   pattern = copy_insn (pattern);
   2639  1.1  mrg   loongarch_rewrite_small_data_1 (&pattern);
   2640  1.1  mrg   return pattern;
   2641  1.1  mrg }
   2642  1.1  mrg 
   2643  1.1  mrg /* The cost of loading values from the constant pool.  It should be
   2644  1.1  mrg    larger than the cost of any constant we want to synthesize inline.  */
   2645  1.1  mrg #define CONSTANT_POOL_COST COSTS_N_INSNS (8)
   2646  1.1  mrg 
   2647  1.1  mrg /* Return true if there is a instruction that implements CODE
   2648  1.1  mrg    and if that instruction accepts X as an immediate operand.  */
   2649  1.1  mrg 
   2650  1.1  mrg static int
   2651  1.1  mrg loongarch_immediate_operand_p (int code, HOST_WIDE_INT x)
   2652  1.1  mrg {
   2653  1.1  mrg   switch (code)
   2654  1.1  mrg     {
   2655  1.1  mrg     case ASHIFT:
   2656  1.1  mrg     case ASHIFTRT:
   2657  1.1  mrg     case LSHIFTRT:
   2658  1.1  mrg       /* All shift counts are truncated to a valid constant.  */
   2659  1.1  mrg       return true;
   2660  1.1  mrg 
   2661  1.1  mrg     case ROTATE:
   2662  1.1  mrg     case ROTATERT:
   2663  1.1  mrg       return true;
   2664  1.1  mrg 
   2665  1.1  mrg     case AND:
   2666  1.1  mrg     case IOR:
   2667  1.1  mrg     case XOR:
   2668  1.1  mrg       /* These instructions take 12-bit unsigned immediates.  */
   2669  1.1  mrg       return IMM12_OPERAND_UNSIGNED (x);
   2670  1.1  mrg 
   2671  1.1  mrg     case PLUS:
   2672  1.1  mrg     case LT:
   2673  1.1  mrg     case LTU:
   2674  1.1  mrg       /* These instructions take 12-bit signed immediates.  */
   2675  1.1  mrg       return IMM12_OPERAND (x);
   2676  1.1  mrg 
   2677  1.1  mrg     case EQ:
   2678  1.1  mrg     case NE:
   2679  1.1  mrg     case GT:
   2680  1.1  mrg     case GTU:
   2681  1.1  mrg       /* The "immediate" forms of these instructions are really
   2682  1.1  mrg 	 implemented as comparisons with register 0.  */
   2683  1.1  mrg       return x == 0;
   2684  1.1  mrg 
   2685  1.1  mrg     case GE:
   2686  1.1  mrg     case GEU:
   2687  1.1  mrg       /* Likewise, meaning that the only valid immediate operand is 1.  */
   2688  1.1  mrg       return x == 1;
   2689  1.1  mrg 
   2690  1.1  mrg     case LE:
   2691  1.1  mrg       /* We add 1 to the immediate and use SLT.  */
   2692  1.1  mrg       return IMM12_OPERAND (x + 1);
   2693  1.1  mrg 
   2694  1.1  mrg     case LEU:
   2695  1.1  mrg       /* Likewise SLTU, but reject the always-true case.  */
   2696  1.1  mrg       return IMM12_OPERAND (x + 1) && x + 1 != 0;
   2697  1.1  mrg 
   2698  1.1  mrg     case SIGN_EXTRACT:
   2699  1.1  mrg     case ZERO_EXTRACT:
   2700  1.1  mrg       /* The bit position and size are immediate operands.  */
   2701  1.1  mrg       return 1;
   2702  1.1  mrg 
   2703  1.1  mrg     default:
   2704  1.1  mrg       /* By default assume that $0 can be used for 0.  */
   2705  1.1  mrg       return x == 0;
   2706  1.1  mrg     }
   2707  1.1  mrg }
   2708  1.1  mrg 
   2709  1.1  mrg /* Return the cost of binary operation X, given that the instruction
   2710  1.1  mrg    sequence for a word-sized or smaller operation has cost SINGLE_COST
   2711  1.1  mrg    and that the sequence of a double-word operation has cost DOUBLE_COST.
   2712  1.1  mrg    If SPEED is true, optimize for speed otherwise optimize for size.  */
   2713  1.1  mrg 
   2714  1.1  mrg static int
   2715  1.1  mrg loongarch_binary_cost (rtx x, int single_cost, int double_cost, bool speed)
   2716  1.1  mrg {
   2717  1.1  mrg   int cost;
   2718  1.1  mrg 
   2719  1.1  mrg   if (GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD * 2)
   2720  1.1  mrg     cost = double_cost;
   2721  1.1  mrg   else
   2722  1.1  mrg     cost = single_cost;
   2723  1.1  mrg   return (cost
   2724  1.1  mrg 	  + set_src_cost (XEXP (x, 0), GET_MODE (x), speed)
   2725  1.1  mrg 	  + rtx_cost (XEXP (x, 1), GET_MODE (x), GET_CODE (x), 1, speed));
   2726  1.1  mrg }
   2727  1.1  mrg 
   2728  1.1  mrg /* Return the cost of floating-point multiplications of mode MODE.  */
   2729  1.1  mrg 
   2730  1.1  mrg static int
   2731  1.1  mrg loongarch_fp_mult_cost (machine_mode mode)
   2732  1.1  mrg {
   2733  1.1  mrg   return mode == DFmode ? loongarch_cost->fp_mult_df
   2734  1.1  mrg 			: loongarch_cost->fp_mult_sf;
   2735  1.1  mrg }
   2736  1.1  mrg 
   2737  1.1  mrg /* Return the cost of floating-point divisions of mode MODE.  */
   2738  1.1  mrg 
   2739  1.1  mrg static int
   2740  1.1  mrg loongarch_fp_div_cost (machine_mode mode)
   2741  1.1  mrg {
   2742  1.1  mrg   return mode == DFmode ? loongarch_cost->fp_div_df
   2743  1.1  mrg 			: loongarch_cost->fp_div_sf;
   2744  1.1  mrg }
   2745  1.1  mrg 
   2746  1.1  mrg /* Return the cost of sign-extending OP to mode MODE, not including the
   2747  1.1  mrg    cost of OP itself.  */
   2748  1.1  mrg 
   2749  1.1  mrg static int
   2750  1.1  mrg loongarch_sign_extend_cost (rtx op)
   2751  1.1  mrg {
   2752  1.1  mrg   if (MEM_P (op))
   2753  1.1  mrg     /* Extended loads are as cheap as unextended ones.  */
   2754  1.1  mrg     return 0;
   2755  1.1  mrg 
   2756  1.1  mrg   return COSTS_N_INSNS (1);
   2757  1.1  mrg }
   2758  1.1  mrg 
   2759  1.1  mrg /* Return the cost of zero-extending OP to mode MODE, not including the
   2760  1.1  mrg    cost of OP itself.  */
   2761  1.1  mrg 
   2762  1.1  mrg static int
   2763  1.1  mrg loongarch_zero_extend_cost (rtx op)
   2764  1.1  mrg {
   2765  1.1  mrg   if (MEM_P (op))
   2766  1.1  mrg     /* Extended loads are as cheap as unextended ones.  */
   2767  1.1  mrg     return 0;
   2768  1.1  mrg 
   2769  1.1  mrg   /* We can use ANDI.  */
   2770  1.1  mrg   return COSTS_N_INSNS (1);
   2771  1.1  mrg }
   2772  1.1  mrg 
   2773  1.1  mrg /* Return the cost of moving between two registers of mode MODE,
   2774  1.1  mrg    assuming that the move will be in pieces of at most UNITS bytes.  */
   2775  1.1  mrg 
   2776  1.1  mrg static int
   2777  1.1  mrg loongarch_set_reg_reg_piece_cost (machine_mode mode, unsigned int units)
   2778  1.1  mrg {
   2779  1.1  mrg   return COSTS_N_INSNS ((GET_MODE_SIZE (mode) + units - 1) / units);
   2780  1.1  mrg }
   2781  1.1  mrg 
   2782  1.1  mrg /* Return the cost of moving between two registers of mode MODE.  */
   2783  1.1  mrg 
   2784  1.1  mrg static int
   2785  1.1  mrg loongarch_set_reg_reg_cost (machine_mode mode)
   2786  1.1  mrg {
   2787  1.1  mrg   switch (GET_MODE_CLASS (mode))
   2788  1.1  mrg     {
   2789  1.1  mrg     case MODE_CC:
   2790  1.1  mrg       return loongarch_set_reg_reg_piece_cost (mode, GET_MODE_SIZE (CCmode));
   2791  1.1  mrg 
   2792  1.1  mrg     case MODE_FLOAT:
   2793  1.1  mrg     case MODE_COMPLEX_FLOAT:
   2794  1.1  mrg     case MODE_VECTOR_FLOAT:
   2795  1.1  mrg       if (TARGET_HARD_FLOAT)
   2796  1.1  mrg 	return loongarch_set_reg_reg_piece_cost (mode, UNITS_PER_HWFPVALUE);
   2797  1.1  mrg       /* Fall through.  */
   2798  1.1  mrg 
   2799  1.1  mrg     default:
   2800  1.1  mrg       return loongarch_set_reg_reg_piece_cost (mode, UNITS_PER_WORD);
   2801  1.1  mrg     }
   2802  1.1  mrg }
   2803  1.1  mrg 
   2804  1.1  mrg /* Implement TARGET_RTX_COSTS.  */
   2805  1.1  mrg 
   2806  1.1  mrg static bool
   2807  1.1  mrg loongarch_rtx_costs (rtx x, machine_mode mode, int outer_code,
   2808  1.1  mrg 		     int opno ATTRIBUTE_UNUSED, int *total, bool speed)
   2809  1.1  mrg {
   2810  1.1  mrg   int code = GET_CODE (x);
   2811  1.1  mrg   bool float_mode_p = FLOAT_MODE_P (mode);
   2812  1.1  mrg   int cost;
   2813  1.1  mrg   rtx addr;
   2814  1.1  mrg 
   2815  1.1  mrg   if (outer_code == COMPARE)
   2816  1.1  mrg     {
   2817  1.1  mrg       gcc_assert (CONSTANT_P (x));
   2818  1.1  mrg       *total = 0;
   2819  1.1  mrg       return true;
   2820  1.1  mrg     }
   2821  1.1  mrg 
   2822  1.1  mrg   switch (code)
   2823  1.1  mrg     {
   2824  1.1  mrg     case CONST_INT:
   2825  1.1  mrg       if (TARGET_64BIT && outer_code == AND && UINTVAL (x) == 0xffffffff)
   2826  1.1  mrg 	{
   2827  1.1  mrg 	  *total = 0;
   2828  1.1  mrg 	  return true;
   2829  1.1  mrg 	}
   2830  1.1  mrg 
   2831  1.1  mrg       /* When not optimizing for size, we care more about the cost
   2832  1.1  mrg 	 of hot code, and hot code is often in a loop.  If a constant
   2833  1.1  mrg 	 operand needs to be forced into a register, we will often be
   2834  1.1  mrg 	 able to hoist the constant load out of the loop, so the load
   2835  1.1  mrg 	 should not contribute to the cost.  */
   2836  1.1  mrg       if (speed || loongarch_immediate_operand_p (outer_code, INTVAL (x)))
   2837  1.1  mrg 	{
   2838  1.1  mrg 	  *total = 0;
   2839  1.1  mrg 	  return true;
   2840  1.1  mrg 	}
   2841  1.1  mrg       /* Fall through.  */
   2842  1.1  mrg 
   2843  1.1  mrg     case CONST:
   2844  1.1  mrg     case SYMBOL_REF:
   2845  1.1  mrg     case LABEL_REF:
   2846  1.1  mrg     case CONST_DOUBLE:
   2847  1.1  mrg       cost = loongarch_const_insns (x);
   2848  1.1  mrg       if (cost > 0)
   2849  1.1  mrg 	{
   2850  1.1  mrg 	  if (cost == 1 && outer_code == SET
   2851  1.1  mrg 	      && !(float_mode_p && TARGET_HARD_FLOAT))
   2852  1.1  mrg 	    cost = 0;
   2853  1.1  mrg 	  else if ((outer_code == SET || GET_MODE (x) == VOIDmode))
   2854  1.1  mrg 	    cost = 1;
   2855  1.1  mrg 	  *total = COSTS_N_INSNS (cost);
   2856  1.1  mrg 	  return true;
   2857  1.1  mrg 	}
   2858  1.1  mrg       /* The value will need to be fetched from the constant pool.  */
   2859  1.1  mrg       *total = CONSTANT_POOL_COST;
   2860  1.1  mrg       return true;
   2861  1.1  mrg 
   2862  1.1  mrg     case MEM:
   2863  1.1  mrg       /* If the address is legitimate, return the number of
   2864  1.1  mrg 	 instructions it needs.  */
   2865  1.1  mrg       addr = XEXP (x, 0);
   2866  1.1  mrg       /* Check for a scaled indexed address.  */
   2867  1.1  mrg       if (loongarch_index_address_p (addr, mode))
   2868  1.1  mrg 	{
   2869  1.1  mrg 	  *total = COSTS_N_INSNS (2);
   2870  1.1  mrg 	  return true;
   2871  1.1  mrg 	}
   2872  1.1  mrg       cost = loongarch_address_insns (addr, mode, true);
   2873  1.1  mrg       if (cost > 0)
   2874  1.1  mrg 	{
   2875  1.1  mrg 	  *total = COSTS_N_INSNS (cost + 1);
   2876  1.1  mrg 	  return true;
   2877  1.1  mrg 	}
   2878  1.1  mrg       /* Otherwise use the default handling.  */
   2879  1.1  mrg       return false;
   2880  1.1  mrg 
   2881  1.1  mrg     case FFS:
   2882  1.1  mrg       *total = COSTS_N_INSNS (6);
   2883  1.1  mrg       return false;
   2884  1.1  mrg 
   2885  1.1  mrg     case NOT:
   2886  1.1  mrg       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) > UNITS_PER_WORD ? 2 : 1);
   2887  1.1  mrg       return false;
   2888  1.1  mrg 
   2889  1.1  mrg     case AND:
   2890  1.1  mrg       /* Check for a *clear_upper32 pattern and treat it like a zero
   2891  1.1  mrg 	 extension.  See the pattern's comment for details.  */
   2892  1.1  mrg       if (TARGET_64BIT && mode == DImode && CONST_INT_P (XEXP (x, 1))
   2893  1.1  mrg 	  && UINTVAL (XEXP (x, 1)) == 0xffffffff)
   2894  1.1  mrg 	{
   2895  1.1  mrg 	  *total = (loongarch_zero_extend_cost (XEXP (x, 0))
   2896  1.1  mrg 		    + set_src_cost (XEXP (x, 0), mode, speed));
   2897  1.1  mrg 	  return true;
   2898  1.1  mrg 	}
   2899  1.1  mrg       /* (AND (NOT op0) (NOT op1) is a nor operation that can be done in
   2900  1.1  mrg 	 a single instruction.  */
   2901  1.1  mrg       if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
   2902  1.1  mrg 	{
   2903  1.1  mrg 	  cost = GET_MODE_SIZE (mode) > UNITS_PER_WORD ? 2 : 1;
   2904  1.1  mrg 	  *total = (COSTS_N_INSNS (cost)
   2905  1.1  mrg 		    + set_src_cost (XEXP (XEXP (x, 0), 0), mode, speed)
   2906  1.1  mrg 		    + set_src_cost (XEXP (XEXP (x, 1), 0), mode, speed));
   2907  1.1  mrg 	  return true;
   2908  1.1  mrg 	}
   2909  1.1  mrg 
   2910  1.1  mrg       /* Fall through.  */
   2911  1.1  mrg 
   2912  1.1  mrg     case IOR:
   2913  1.1  mrg     case XOR:
   2914  1.1  mrg       /* Double-word operations use two single-word operations.  */
   2915  1.1  mrg       *total = loongarch_binary_cost (x, COSTS_N_INSNS (1), COSTS_N_INSNS (2),
   2916  1.1  mrg 				      speed);
   2917  1.1  mrg       return true;
   2918  1.1  mrg 
   2919  1.1  mrg     case ASHIFT:
   2920  1.1  mrg     case ASHIFTRT:
   2921  1.1  mrg     case LSHIFTRT:
   2922  1.1  mrg     case ROTATE:
   2923  1.1  mrg     case ROTATERT:
   2924  1.1  mrg       if (CONSTANT_P (XEXP (x, 1)))
   2925  1.1  mrg 	*total = loongarch_binary_cost (x, COSTS_N_INSNS (1),
   2926  1.1  mrg 					COSTS_N_INSNS (4), speed);
   2927  1.1  mrg       else
   2928  1.1  mrg 	*total = loongarch_binary_cost (x, COSTS_N_INSNS (1),
   2929  1.1  mrg 					COSTS_N_INSNS (12), speed);
   2930  1.1  mrg       return true;
   2931  1.1  mrg 
   2932  1.1  mrg     case ABS:
   2933  1.1  mrg       if (float_mode_p)
   2934  1.1  mrg 	*total = loongarch_cost->fp_add;
   2935  1.1  mrg       else
   2936  1.1  mrg 	*total = COSTS_N_INSNS (4);
   2937  1.1  mrg       return false;
   2938  1.1  mrg 
   2939  1.1  mrg     case LT:
   2940  1.1  mrg     case LTU:
   2941  1.1  mrg     case LE:
   2942  1.1  mrg     case LEU:
   2943  1.1  mrg     case GT:
   2944  1.1  mrg     case GTU:
   2945  1.1  mrg     case GE:
   2946  1.1  mrg     case GEU:
   2947  1.1  mrg     case EQ:
   2948  1.1  mrg     case NE:
   2949  1.1  mrg     case UNORDERED:
   2950  1.1  mrg     case LTGT:
   2951  1.1  mrg     case UNGE:
   2952  1.1  mrg     case UNGT:
   2953  1.1  mrg     case UNLE:
   2954  1.1  mrg     case UNLT:
   2955  1.1  mrg       /* Branch comparisons have VOIDmode, so use the first operand's
   2956  1.1  mrg 	 mode instead.  */
   2957  1.1  mrg       mode = GET_MODE (XEXP (x, 0));
   2958  1.1  mrg       if (FLOAT_MODE_P (mode))
   2959  1.1  mrg 	{
   2960  1.1  mrg 	  *total = loongarch_cost->fp_add;
   2961  1.1  mrg 	  return false;
   2962  1.1  mrg 	}
   2963  1.1  mrg       *total = loongarch_binary_cost (x, COSTS_N_INSNS (1), COSTS_N_INSNS (4),
   2964  1.1  mrg 				      speed);
   2965  1.1  mrg       return true;
   2966  1.1  mrg 
   2967  1.1  mrg     case MINUS:
   2968  1.1  mrg     case PLUS:
   2969  1.1  mrg       if (float_mode_p)
   2970  1.1  mrg 	{
   2971  1.1  mrg 	  *total = loongarch_cost->fp_add;
   2972  1.1  mrg 	  return false;
   2973  1.1  mrg 	}
   2974  1.1  mrg 
   2975  1.1  mrg       /* If it's an add + mult (which is equivalent to shift left) and
   2976  1.1  mrg 	 it's immediate operand satisfies const_immalsl_operand predicate.  */
   2977  1.1  mrg       if ((mode == SImode || (TARGET_64BIT && mode == DImode))
   2978  1.1  mrg 	  && GET_CODE (XEXP (x, 0)) == MULT)
   2979  1.1  mrg 	{
   2980  1.1  mrg 	  rtx op2 = XEXP (XEXP (x, 0), 1);
   2981  1.1  mrg 	  if (const_immalsl_operand (op2, mode))
   2982  1.1  mrg 	    {
   2983  1.1  mrg 	      *total = (COSTS_N_INSNS (1)
   2984  1.1  mrg 			+ set_src_cost (XEXP (XEXP (x, 0), 0), mode, speed)
   2985  1.1  mrg 			+ set_src_cost (XEXP (x, 1), mode, speed));
   2986  1.1  mrg 	      return true;
   2987  1.1  mrg 	    }
   2988  1.1  mrg 	}
   2989  1.1  mrg 
   2990  1.1  mrg       /* Double-word operations require three single-word operations and
   2991  1.1  mrg 	 an SLTU.  */
   2992  1.1  mrg       *total = loongarch_binary_cost (x, COSTS_N_INSNS (1), COSTS_N_INSNS (4),
   2993  1.1  mrg 				      speed);
   2994  1.1  mrg       return true;
   2995  1.1  mrg 
   2996  1.1  mrg     case NEG:
   2997  1.1  mrg       if (float_mode_p)
   2998  1.1  mrg 	*total = loongarch_cost->fp_add;
   2999  1.1  mrg       else
   3000  1.1  mrg 	*total = COSTS_N_INSNS (GET_MODE_SIZE (mode) > UNITS_PER_WORD ? 4 : 1);
   3001  1.1  mrg       return false;
   3002  1.1  mrg 
   3003  1.1  mrg     case FMA:
   3004  1.1  mrg       *total = loongarch_fp_mult_cost (mode);
   3005  1.1  mrg       return false;
   3006  1.1  mrg 
   3007  1.1  mrg     case MULT:
   3008  1.1  mrg       if (float_mode_p)
   3009  1.1  mrg 	*total = loongarch_fp_mult_cost (mode);
   3010  1.1  mrg       else if (mode == DImode && !TARGET_64BIT)
   3011  1.1  mrg 	*total = (speed
   3012  1.1  mrg 		  ? loongarch_cost->int_mult_si * 3 + 6
   3013  1.1  mrg 		  : COSTS_N_INSNS (7));
   3014  1.1  mrg       else if (!speed)
   3015  1.1  mrg 	*total = COSTS_N_INSNS (1) + 1;
   3016  1.1  mrg       else if (mode == DImode)
   3017  1.1  mrg 	*total = loongarch_cost->int_mult_di;
   3018  1.1  mrg       else
   3019  1.1  mrg 	*total = loongarch_cost->int_mult_si;
   3020  1.1  mrg       return false;
   3021  1.1  mrg 
   3022  1.1  mrg     case DIV:
   3023  1.1  mrg       /* Check for a reciprocal.  */
   3024  1.1  mrg       if (float_mode_p
   3025  1.1  mrg 	  && flag_unsafe_math_optimizations
   3026  1.1  mrg 	  && XEXP (x, 0) == CONST1_RTX (mode))
   3027  1.1  mrg 	{
   3028  1.1  mrg 	  if (outer_code == SQRT || GET_CODE (XEXP (x, 1)) == SQRT)
   3029  1.1  mrg 	    /* An rsqrt<mode>a or rsqrt<mode>b pattern.  Count the
   3030  1.1  mrg 	       division as being free.  */
   3031  1.1  mrg 	    *total = set_src_cost (XEXP (x, 1), mode, speed);
   3032  1.1  mrg 	  else
   3033  1.1  mrg 	    *total = (loongarch_fp_div_cost (mode)
   3034  1.1  mrg 		      + set_src_cost (XEXP (x, 1), mode, speed));
   3035  1.1  mrg 	  return true;
   3036  1.1  mrg 	}
   3037  1.1  mrg       /* Fall through.  */
   3038  1.1  mrg 
   3039  1.1  mrg     case SQRT:
   3040  1.1  mrg     case MOD:
   3041  1.1  mrg       if (float_mode_p)
   3042  1.1  mrg 	{
   3043  1.1  mrg 	  *total = loongarch_fp_div_cost (mode);
   3044  1.1  mrg 	  return false;
   3045  1.1  mrg 	}
   3046  1.1  mrg       /* Fall through.  */
   3047  1.1  mrg 
   3048  1.1  mrg     case UDIV:
   3049  1.1  mrg     case UMOD:
   3050  1.1  mrg       if (!speed)
   3051  1.1  mrg 	{
   3052  1.1  mrg 	  *total = COSTS_N_INSNS (loongarch_idiv_insns (mode));
   3053  1.1  mrg 	}
   3054  1.1  mrg       else if (mode == DImode)
   3055  1.1  mrg 	*total = loongarch_cost->int_div_di;
   3056  1.1  mrg       else
   3057  1.1  mrg 	*total = loongarch_cost->int_div_si;
   3058  1.1  mrg       return false;
   3059  1.1  mrg 
   3060  1.1  mrg     case SIGN_EXTEND:
   3061  1.1  mrg       *total = loongarch_sign_extend_cost (XEXP (x, 0));
   3062  1.1  mrg       return false;
   3063  1.1  mrg 
   3064  1.1  mrg     case ZERO_EXTEND:
   3065  1.1  mrg       *total = loongarch_zero_extend_cost (XEXP (x, 0));
   3066  1.1  mrg       return false;
   3067  1.1  mrg     case TRUNCATE:
   3068  1.1  mrg       /* Costings for highpart multiplies.  Matching patterns of the form:
   3069  1.1  mrg 
   3070  1.1  mrg 	 (lshiftrt:DI (mult:DI (sign_extend:DI (...)
   3071  1.1  mrg 			       (sign_extend:DI (...))
   3072  1.1  mrg 		      (const_int 32)
   3073  1.1  mrg       */
   3074  1.1  mrg       if ((GET_CODE (XEXP (x, 0)) == ASHIFTRT
   3075  1.1  mrg 	   || GET_CODE (XEXP (x, 0)) == LSHIFTRT)
   3076  1.1  mrg 	  && CONST_INT_P (XEXP (XEXP (x, 0), 1))
   3077  1.1  mrg 	  && ((INTVAL (XEXP (XEXP (x, 0), 1)) == 32
   3078  1.1  mrg 	       && GET_MODE (XEXP (x, 0)) == DImode)
   3079  1.1  mrg 	      || (TARGET_64BIT
   3080  1.1  mrg 		  && INTVAL (XEXP (XEXP (x, 0), 1)) == 64
   3081  1.1  mrg 		  && GET_MODE (XEXP (x, 0)) == TImode))
   3082  1.1  mrg 	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
   3083  1.1  mrg 	  && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND
   3084  1.1  mrg 	       && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == SIGN_EXTEND)
   3085  1.1  mrg 	      || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
   3086  1.1  mrg 		  && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1))
   3087  1.1  mrg 		      == ZERO_EXTEND))))
   3088  1.1  mrg 	{
   3089  1.1  mrg 	  if (!speed)
   3090  1.1  mrg 	    *total = COSTS_N_INSNS (1) + 1;
   3091  1.1  mrg 	  else if (mode == DImode)
   3092  1.1  mrg 	    *total = loongarch_cost->int_mult_di;
   3093  1.1  mrg 	  else
   3094  1.1  mrg 	    *total = loongarch_cost->int_mult_si;
   3095  1.1  mrg 
   3096  1.1  mrg 	  /* Sign extension is free, zero extension costs for DImode when
   3097  1.1  mrg 	     on a 64bit core / when DMUL is present.  */
   3098  1.1  mrg 	  for (int i = 0; i < 2; ++i)
   3099  1.1  mrg 	    {
   3100  1.1  mrg 	      rtx op = XEXP (XEXP (XEXP (x, 0), 0), i);
   3101  1.1  mrg 	      if (TARGET_64BIT
   3102  1.1  mrg 		  && GET_CODE (op) == ZERO_EXTEND
   3103  1.1  mrg 		  && GET_MODE (op) == DImode)
   3104  1.1  mrg 		*total += rtx_cost (op, DImode, MULT, i, speed);
   3105  1.1  mrg 	      else
   3106  1.1  mrg 		*total += rtx_cost (XEXP (op, 0), VOIDmode, GET_CODE (op), 0,
   3107  1.1  mrg 				    speed);
   3108  1.1  mrg 	    }
   3109  1.1  mrg 
   3110  1.1  mrg 	  return true;
   3111  1.1  mrg 	}
   3112  1.1  mrg       return false;
   3113  1.1  mrg 
   3114  1.1  mrg     case FLOAT:
   3115  1.1  mrg     case UNSIGNED_FLOAT:
   3116  1.1  mrg     case FIX:
   3117  1.1  mrg     case FLOAT_EXTEND:
   3118  1.1  mrg     case FLOAT_TRUNCATE:
   3119  1.1  mrg       *total = loongarch_cost->fp_add;
   3120  1.1  mrg       return false;
   3121  1.1  mrg 
   3122  1.1  mrg     case SET:
   3123  1.1  mrg       if (register_operand (SET_DEST (x), VOIDmode)
   3124  1.1  mrg 	  && reg_or_0_operand (SET_SRC (x), VOIDmode))
   3125  1.1  mrg 	{
   3126  1.1  mrg 	  *total = loongarch_set_reg_reg_cost (GET_MODE (SET_DEST (x)));
   3127  1.1  mrg 	  return true;
   3128  1.1  mrg 	}
   3129  1.1  mrg       return false;
   3130  1.1  mrg 
   3131  1.1  mrg     default:
   3132  1.1  mrg       return false;
   3133  1.1  mrg     }
   3134  1.1  mrg }
   3135  1.1  mrg 
   3136  1.1  mrg /* Implement TARGET_ADDRESS_COST.  */
   3137  1.1  mrg 
   3138  1.1  mrg static int
   3139  1.1  mrg loongarch_address_cost (rtx addr, machine_mode mode,
   3140  1.1  mrg 			addr_space_t as ATTRIBUTE_UNUSED,
   3141  1.1  mrg 			bool speed ATTRIBUTE_UNUSED)
   3142  1.1  mrg {
   3143  1.1  mrg   return loongarch_address_insns (addr, mode, false);
   3144  1.1  mrg }
   3145  1.1  mrg 
   3146  1.1  mrg /* Return one word of double-word value OP, taking into account the fixed
   3147  1.1  mrg    endianness of certain registers.  HIGH_P is true to select the high part,
   3148  1.1  mrg    false to select the low part.  */
   3149  1.1  mrg 
   3150  1.1  mrg rtx
   3151  1.1  mrg loongarch_subword (rtx op, bool high_p)
   3152  1.1  mrg {
   3153  1.1  mrg   unsigned int byte;
   3154  1.1  mrg   machine_mode mode;
   3155  1.1  mrg 
   3156  1.1  mrg   byte = high_p ? UNITS_PER_WORD : 0;
   3157  1.1  mrg   mode = GET_MODE (op);
   3158  1.1  mrg   if (mode == VOIDmode)
   3159  1.1  mrg     mode = TARGET_64BIT ? TImode : DImode;
   3160  1.1  mrg 
   3161  1.1  mrg   if (FP_REG_RTX_P (op))
   3162  1.1  mrg     return gen_rtx_REG (word_mode, REGNO (op) + high_p);
   3163  1.1  mrg 
   3164  1.1  mrg   if (MEM_P (op))
   3165  1.1  mrg     return loongarch_rewrite_small_data (adjust_address (op, word_mode, byte));
   3166  1.1  mrg 
   3167  1.1  mrg   return simplify_gen_subreg (word_mode, op, mode, byte);
   3168  1.1  mrg }
   3169  1.1  mrg 
   3170  1.1  mrg /* Return true if a move from SRC to DEST should be split into two.
   3171  1.1  mrg    SPLIT_TYPE describes the split condition.  */
   3172  1.1  mrg 
   3173  1.1  mrg bool
   3174  1.1  mrg loongarch_split_move_p (rtx dest, rtx src)
   3175  1.1  mrg {
   3176  1.1  mrg   /* FPR-to-FPR moves can be done in a single instruction, if they're
   3177  1.1  mrg      allowed at all.  */
   3178  1.1  mrg   unsigned int size = GET_MODE_SIZE (GET_MODE (dest));
   3179  1.1  mrg   if (size == 8 && FP_REG_RTX_P (src) && FP_REG_RTX_P (dest))
   3180  1.1  mrg     return false;
   3181  1.1  mrg 
   3182  1.1  mrg   /* Check for floating-point loads and stores.  */
   3183  1.1  mrg   if (size == 8)
   3184  1.1  mrg     {
   3185  1.1  mrg       if (FP_REG_RTX_P (dest) && MEM_P (src))
   3186  1.1  mrg 	return false;
   3187  1.1  mrg       if (FP_REG_RTX_P (src) && MEM_P (dest))
   3188  1.1  mrg 	return false;
   3189  1.1  mrg     }
   3190  1.1  mrg   /* Otherwise split all multiword moves.  */
   3191  1.1  mrg   return size > UNITS_PER_WORD;
   3192  1.1  mrg }
   3193  1.1  mrg 
   3194  1.1  mrg /* Split a move from SRC to DEST, given that loongarch_split_move_p holds.
   3195  1.1  mrg    SPLIT_TYPE describes the split condition.  */
   3196  1.1  mrg 
   3197  1.1  mrg void
   3198  1.1  mrg loongarch_split_move (rtx dest, rtx src, rtx insn_)
   3199  1.1  mrg {
   3200  1.1  mrg   rtx low_dest;
   3201  1.1  mrg 
   3202  1.1  mrg   gcc_checking_assert (loongarch_split_move_p (dest, src));
   3203  1.1  mrg   if (FP_REG_RTX_P (dest) || FP_REG_RTX_P (src))
   3204  1.1  mrg     {
   3205  1.1  mrg       if (!TARGET_64BIT && GET_MODE (dest) == DImode)
   3206  1.1  mrg 	emit_insn (gen_move_doubleword_fprdi (dest, src));
   3207  1.1  mrg       else if (!TARGET_64BIT && GET_MODE (dest) == DFmode)
   3208  1.1  mrg 	emit_insn (gen_move_doubleword_fprdf (dest, src));
   3209  1.1  mrg       else if (TARGET_64BIT && GET_MODE (dest) == TFmode)
   3210  1.1  mrg 	emit_insn (gen_move_doubleword_fprtf (dest, src));
   3211  1.1  mrg       else
   3212  1.1  mrg 	gcc_unreachable ();
   3213  1.1  mrg     }
   3214  1.1  mrg   else
   3215  1.1  mrg     {
   3216  1.1  mrg       /* The operation can be split into two normal moves.  Decide in
   3217  1.1  mrg 	 which order to do them.  */
   3218  1.1  mrg       low_dest = loongarch_subword (dest, false);
   3219  1.1  mrg       if (REG_P (low_dest) && reg_overlap_mentioned_p (low_dest, src))
   3220  1.1  mrg 	{
   3221  1.1  mrg 	  loongarch_emit_move (loongarch_subword (dest, true),
   3222  1.1  mrg 			       loongarch_subword (src, true));
   3223  1.1  mrg 	  loongarch_emit_move (low_dest, loongarch_subword (src, false));
   3224  1.1  mrg 	}
   3225  1.1  mrg       else
   3226  1.1  mrg 	{
   3227  1.1  mrg 	  loongarch_emit_move (low_dest, loongarch_subword (src, false));
   3228  1.1  mrg 	  loongarch_emit_move (loongarch_subword (dest, true),
   3229  1.1  mrg 			       loongarch_subword (src, true));
   3230  1.1  mrg 	}
   3231  1.1  mrg     }
   3232  1.1  mrg 
   3233  1.1  mrg   /* This is a hack.  See if the next insn uses DEST and if so, see if we
   3234  1.1  mrg      can forward SRC for DEST.  This is most useful if the next insn is a
   3235  1.1  mrg      simple store.  */
   3236  1.1  mrg   rtx_insn *insn = (rtx_insn *) insn_;
   3237  1.1  mrg   struct loongarch_address_info addr = {};
   3238  1.1  mrg   if (insn)
   3239  1.1  mrg     {
   3240  1.1  mrg       rtx_insn *next = next_nonnote_nondebug_insn_bb (insn);
   3241  1.1  mrg       if (next)
   3242  1.1  mrg 	{
   3243  1.1  mrg 	  rtx set = single_set (next);
   3244  1.1  mrg 	  if (set && SET_SRC (set) == dest)
   3245  1.1  mrg 	    {
   3246  1.1  mrg 	      if (MEM_P (src))
   3247  1.1  mrg 		{
   3248  1.1  mrg 		  rtx tmp = XEXP (src, 0);
   3249  1.1  mrg 		  loongarch_classify_address (&addr, tmp, GET_MODE (tmp),
   3250  1.1  mrg 					      true);
   3251  1.1  mrg 		  if (addr.reg && !reg_overlap_mentioned_p (dest, addr.reg))
   3252  1.1  mrg 		    validate_change (next, &SET_SRC (set), src, false);
   3253  1.1  mrg 		}
   3254  1.1  mrg 	      else
   3255  1.1  mrg 		validate_change (next, &SET_SRC (set), src, false);
   3256  1.1  mrg 	    }
   3257  1.1  mrg 	}
   3258  1.1  mrg     }
   3259  1.1  mrg }
   3260  1.1  mrg 
   3261  1.1  mrg /* Return true if a move from SRC to DEST in INSN should be split.  */
   3262  1.1  mrg 
   3263  1.1  mrg bool
   3264  1.1  mrg loongarch_split_move_insn_p (rtx dest, rtx src)
   3265  1.1  mrg {
   3266  1.1  mrg   return loongarch_split_move_p (dest, src);
   3267  1.1  mrg }
   3268  1.1  mrg 
   3269  1.1  mrg /* Split a move from SRC to DEST in INSN, given that
   3270  1.1  mrg    loongarch_split_move_insn_p holds.  */
   3271  1.1  mrg 
   3272  1.1  mrg void
   3273  1.1  mrg loongarch_split_move_insn (rtx dest, rtx src, rtx insn)
   3274  1.1  mrg {
   3275  1.1  mrg   loongarch_split_move (dest, src, insn);
   3276  1.1  mrg }
   3277  1.1  mrg 
   3278  1.1  mrg /* Implement TARGET_CONSTANT_ALIGNMENT.  */
   3279  1.1  mrg 
   3280  1.1  mrg static HOST_WIDE_INT
   3281  1.1  mrg loongarch_constant_alignment (const_tree exp, HOST_WIDE_INT align)
   3282  1.1  mrg {
   3283  1.1  mrg   if (TREE_CODE (exp) == STRING_CST || TREE_CODE (exp) == CONSTRUCTOR)
   3284  1.1  mrg     return MAX (align, BITS_PER_WORD);
   3285  1.1  mrg   return align;
   3286  1.1  mrg }
   3287  1.1  mrg 
   3288  1.1  mrg const char *
   3289  1.1  mrg loongarch_output_move_index (rtx x, machine_mode mode, bool ldr)
   3290  1.1  mrg {
   3291  1.1  mrg   int index = exact_log2 (GET_MODE_SIZE (mode));
   3292  1.1  mrg   if (!IN_RANGE (index, 0, 3))
   3293  1.1  mrg     return NULL;
   3294  1.1  mrg 
   3295  1.1  mrg   struct loongarch_address_info info;
   3296  1.1  mrg   if ((loongarch_classify_address (&info, x, mode, false)
   3297  1.1  mrg        && !(info.type == ADDRESS_REG_REG))
   3298  1.1  mrg       || !loongarch_legitimate_address_p (mode, x, false))
   3299  1.1  mrg     return NULL;
   3300  1.1  mrg 
   3301  1.1  mrg   const char *const insn[][4] =
   3302  1.1  mrg     {
   3303  1.1  mrg       {
   3304  1.1  mrg 	"stx.b\t%z1,%0",
   3305  1.1  mrg 	"stx.h\t%z1,%0",
   3306  1.1  mrg 	"stx.w\t%z1,%0",
   3307  1.1  mrg 	"stx.d\t%z1,%0",
   3308  1.1  mrg       },
   3309  1.1  mrg       {
   3310  1.1  mrg 	"ldx.bu\t%0,%1",
   3311  1.1  mrg 	"ldx.hu\t%0,%1",
   3312  1.1  mrg 	"ldx.w\t%0,%1",
   3313  1.1  mrg 	"ldx.d\t%0,%1",
   3314  1.1  mrg       }
   3315  1.1  mrg     };
   3316  1.1  mrg 
   3317  1.1  mrg   return insn[ldr][index];
   3318  1.1  mrg }
   3319  1.1  mrg 
   3320  1.1  mrg const char *
   3321  1.1  mrg loongarch_output_move_index_float (rtx x, machine_mode mode, bool ldr)
   3322  1.1  mrg {
   3323  1.1  mrg   int index = exact_log2 (GET_MODE_SIZE (mode));
   3324  1.1  mrg   if (!IN_RANGE (index, 2, 3))
   3325  1.1  mrg     return NULL;
   3326  1.1  mrg 
   3327  1.1  mrg   struct loongarch_address_info info;
   3328  1.1  mrg   if ((loongarch_classify_address (&info, x, mode, false)
   3329  1.1  mrg        && !(info.type == ADDRESS_REG_REG))
   3330  1.1  mrg       || !loongarch_legitimate_address_p (mode, x, false))
   3331  1.1  mrg     return NULL;
   3332  1.1  mrg 
   3333  1.1  mrg   const char *const insn[][2] =
   3334  1.1  mrg     {
   3335  1.1  mrg 	{
   3336  1.1  mrg 	  "fstx.s\t%1,%0",
   3337  1.1  mrg 	  "fstx.d\t%1,%0"
   3338  1.1  mrg 	},
   3339  1.1  mrg 	{
   3340  1.1  mrg 	  "fldx.s\t%0,%1",
   3341  1.1  mrg 	  "fldx.d\t%0,%1"
   3342  1.1  mrg 	},
   3343  1.1  mrg     };
   3344  1.1  mrg 
   3345  1.1  mrg   return insn[ldr][index-2];
   3346  1.1  mrg }
   3347  1.1  mrg 
   3348  1.1  mrg /* Return the appropriate instructions to move SRC into DEST.  Assume
   3349  1.1  mrg    that SRC is operand 1 and DEST is operand 0.  */
   3350  1.1  mrg 
   3351  1.1  mrg const char *
   3352  1.1  mrg loongarch_output_move (rtx dest, rtx src)
   3353  1.1  mrg {
   3354  1.1  mrg   enum rtx_code dest_code = GET_CODE (dest);
   3355  1.1  mrg   enum rtx_code src_code = GET_CODE (src);
   3356  1.1  mrg   machine_mode mode = GET_MODE (dest);
   3357  1.1  mrg   bool dbl_p = (GET_MODE_SIZE (mode) == 8);
   3358  1.1  mrg 
   3359  1.1  mrg   if (loongarch_split_move_p (dest, src))
   3360  1.1  mrg     return "#";
   3361  1.1  mrg 
   3362  1.1  mrg   if ((src_code == REG && GP_REG_P (REGNO (src)))
   3363  1.1  mrg       || (src == CONST0_RTX (mode)))
   3364  1.1  mrg     {
   3365  1.1  mrg       if (dest_code == REG)
   3366  1.1  mrg 	{
   3367  1.1  mrg 	  if (GP_REG_P (REGNO (dest)))
   3368  1.1  mrg 	    return "or\t%0,%z1,$r0";
   3369  1.1  mrg 
   3370  1.1  mrg 	  if (FP_REG_P (REGNO (dest)))
   3371  1.1  mrg 	    return dbl_p ? "movgr2fr.d\t%0,%z1" : "movgr2fr.w\t%0,%z1";
   3372  1.1  mrg 	}
   3373  1.1  mrg       if (dest_code == MEM)
   3374  1.1  mrg 	{
   3375  1.1  mrg 	  const char *insn = NULL;
   3376  1.1  mrg 	  insn = loongarch_output_move_index (XEXP (dest, 0), GET_MODE (dest),
   3377  1.1  mrg 					      false);
   3378  1.1  mrg 	  if (insn)
   3379  1.1  mrg 	    return insn;
   3380  1.1  mrg 
   3381  1.1  mrg 	  rtx offset = XEXP (dest, 0);
   3382  1.1  mrg 	  if (GET_CODE (offset) == PLUS)
   3383  1.1  mrg 	    offset = XEXP (offset, 1);
   3384  1.1  mrg 	  switch (GET_MODE_SIZE (mode))
   3385  1.1  mrg 	    {
   3386  1.1  mrg 	    case 1:
   3387  1.1  mrg 	      return "st.b\t%z1,%0";
   3388  1.1  mrg 	    case 2:
   3389  1.1  mrg 	      return "st.h\t%z1,%0";
   3390  1.1  mrg 	    case 4:
   3391  1.1  mrg 	      if (const_arith_operand (offset, Pmode))
   3392  1.1  mrg 		return "st.w\t%z1,%0";
   3393  1.1  mrg 	      else
   3394  1.1  mrg 		return "stptr.w\t%z1,%0";
   3395  1.1  mrg 	    case 8:
   3396  1.1  mrg 	      if (const_arith_operand (offset, Pmode))
   3397  1.1  mrg 		return "st.d\t%z1,%0";
   3398  1.1  mrg 	      else
   3399  1.1  mrg 		return "stptr.d\t%z1,%0";
   3400  1.1  mrg 	    default:
   3401  1.1  mrg 	      gcc_unreachable ();
   3402  1.1  mrg 	    }
   3403  1.1  mrg 	}
   3404  1.1  mrg     }
   3405  1.1  mrg   if (dest_code == REG && GP_REG_P (REGNO (dest)))
   3406  1.1  mrg     {
   3407  1.1  mrg       if (src_code == REG)
   3408  1.1  mrg 	if (FP_REG_P (REGNO (src)))
   3409  1.1  mrg 	  return dbl_p ? "movfr2gr.d\t%0,%1" : "movfr2gr.s\t%0,%1";
   3410  1.1  mrg 
   3411  1.1  mrg       if (src_code == MEM)
   3412  1.1  mrg 	{
   3413  1.1  mrg 	  const char *insn = NULL;
   3414  1.1  mrg 	  insn = loongarch_output_move_index (XEXP (src, 0), GET_MODE (src),
   3415  1.1  mrg 					      true);
   3416  1.1  mrg 	  if (insn)
   3417  1.1  mrg 	    return insn;
   3418  1.1  mrg 
   3419  1.1  mrg 	  rtx offset = XEXP (src, 0);
   3420  1.1  mrg 	  if (GET_CODE (offset) == PLUS)
   3421  1.1  mrg 	    offset = XEXP (offset, 1);
   3422  1.1  mrg 	  switch (GET_MODE_SIZE (mode))
   3423  1.1  mrg 	    {
   3424  1.1  mrg 	    case 1:
   3425  1.1  mrg 	      return "ld.bu\t%0,%1";
   3426  1.1  mrg 	    case 2:
   3427  1.1  mrg 	      return "ld.hu\t%0,%1";
   3428  1.1  mrg 	    case 4:
   3429  1.1  mrg 	      if (const_arith_operand (offset, Pmode))
   3430  1.1  mrg 		return "ld.w\t%0,%1";
   3431  1.1  mrg 	      else
   3432  1.1  mrg 		return "ldptr.w\t%0,%1";
   3433  1.1  mrg 	    case 8:
   3434  1.1  mrg 	      if (const_arith_operand (offset, Pmode))
   3435  1.1  mrg 		return "ld.d\t%0,%1";
   3436  1.1  mrg 	      else
   3437  1.1  mrg 		return "ldptr.d\t%0,%1";
   3438  1.1  mrg 	    default:
   3439  1.1  mrg 	      gcc_unreachable ();
   3440  1.1  mrg 	    }
   3441  1.1  mrg 	}
   3442  1.1  mrg 
   3443  1.1  mrg       if (src_code == CONST_INT)
   3444  1.1  mrg 	{
   3445  1.1  mrg 	  if (LU12I_INT (src))
   3446  1.1  mrg 	    return "lu12i.w\t%0,%1>>12\t\t\t# %X1";
   3447  1.1  mrg 	  else if (IMM12_INT (src))
   3448  1.1  mrg 	    return "addi.w\t%0,$r0,%1\t\t\t# %X1";
   3449  1.1  mrg 	  else if (IMM12_INT_UNSIGNED (src))
   3450  1.1  mrg 	    return "ori\t%0,$r0,%1\t\t\t# %X1";
   3451  1.1  mrg 	  else if (LU52I_INT (src))
   3452  1.1  mrg 	    return "lu52i.d\t%0,$r0,%X1>>52\t\t\t# %1";
   3453  1.1  mrg 	  else
   3454  1.1  mrg 	    gcc_unreachable ();
   3455  1.1  mrg 	}
   3456  1.1  mrg 
   3457  1.1  mrg       if (symbolic_operand (src, VOIDmode))
   3458  1.1  mrg 	{
   3459  1.1  mrg 	  if ((TARGET_CMODEL_TINY && (!loongarch_global_symbol_p (src)
   3460  1.1  mrg 				      || loongarch_symbol_binds_local_p (src)))
   3461  1.1  mrg 	      || (TARGET_CMODEL_TINY_STATIC && !loongarch_weak_symbol_p (src)))
   3462  1.1  mrg 	    {
   3463  1.1  mrg 	      /* The symbol must be aligned to 4 byte.  */
   3464  1.1  mrg 	      unsigned int align;
   3465  1.1  mrg 
   3466  1.1  mrg 	      if (LABEL_REF_P (src))
   3467  1.1  mrg 		align = 32 /* Whatever.  */;
   3468  1.1  mrg 	      else if (CONSTANT_POOL_ADDRESS_P (src))
   3469  1.1  mrg 		align = GET_MODE_ALIGNMENT (get_pool_mode (src));
   3470  1.1  mrg 	      else if (TREE_CONSTANT_POOL_ADDRESS_P (src))
   3471  1.1  mrg 		{
   3472  1.1  mrg 		  tree exp = SYMBOL_REF_DECL (src);
   3473  1.1  mrg 		  align = TYPE_ALIGN (TREE_TYPE (exp));
   3474  1.1  mrg 		  align = loongarch_constant_alignment (exp, align);
   3475  1.1  mrg 		}
   3476  1.1  mrg 	      else if (SYMBOL_REF_DECL (src))
   3477  1.1  mrg 		align = DECL_ALIGN (SYMBOL_REF_DECL (src));
   3478  1.1  mrg 	      else if (SYMBOL_REF_HAS_BLOCK_INFO_P (src)
   3479  1.1  mrg 		       && SYMBOL_REF_BLOCK (src) != NULL)
   3480  1.1  mrg 		align = SYMBOL_REF_BLOCK (src)->alignment;
   3481  1.1  mrg 	      else
   3482  1.1  mrg 		align = BITS_PER_UNIT;
   3483  1.1  mrg 
   3484  1.1  mrg 	      if (align % (4 * 8) == 0)
   3485  1.1  mrg 		return "pcaddi\t%0,%%pcrel(%1)>>2";
   3486  1.1  mrg 	    }
   3487  1.1  mrg 	  if (TARGET_CMODEL_TINY
   3488  1.1  mrg 	      || TARGET_CMODEL_TINY_STATIC
   3489  1.1  mrg 	      || TARGET_CMODEL_NORMAL
   3490  1.1  mrg 	      || TARGET_CMODEL_LARGE)
   3491  1.1  mrg 	    {
   3492  1.1  mrg 	      if (!loongarch_global_symbol_p (src)
   3493  1.1  mrg 		  || loongarch_symbol_binds_local_p (src))
   3494  1.1  mrg 		return "la.local\t%0,%1";
   3495  1.1  mrg 	      else
   3496  1.1  mrg 		return "la.global\t%0,%1";
   3497  1.1  mrg 	    }
   3498  1.1  mrg 	  if (TARGET_CMODEL_EXTREME)
   3499  1.1  mrg 	    {
   3500  1.1  mrg 	      sorry ("Normal symbol loading not implemented in extreme mode.");
   3501  1.1  mrg 	      gcc_unreachable ();
   3502  1.1  mrg 	    }
   3503  1.1  mrg 
   3504  1.1  mrg 	}
   3505  1.1  mrg     }
   3506  1.1  mrg   if (src_code == REG && FP_REG_P (REGNO (src)))
   3507  1.1  mrg     {
   3508  1.1  mrg       if (dest_code == REG && FP_REG_P (REGNO (dest)))
   3509  1.1  mrg 	return dbl_p ? "fmov.d\t%0,%1" : "fmov.s\t%0,%1";
   3510  1.1  mrg 
   3511  1.1  mrg       if (dest_code == MEM)
   3512  1.1  mrg 	{
   3513  1.1  mrg 	  const char *insn = NULL;
   3514  1.1  mrg 	  insn = loongarch_output_move_index_float (XEXP (dest, 0),
   3515  1.1  mrg 						    GET_MODE (dest),
   3516  1.1  mrg 						    false);
   3517  1.1  mrg 	  if (insn)
   3518  1.1  mrg 	    return insn;
   3519  1.1  mrg 
   3520  1.1  mrg 	  return dbl_p ? "fst.d\t%1,%0" : "fst.s\t%1,%0";
   3521  1.1  mrg 	}
   3522  1.1  mrg     }
   3523  1.1  mrg   if (dest_code == REG && FP_REG_P (REGNO (dest)))
   3524  1.1  mrg     {
   3525  1.1  mrg       if (src_code == MEM)
   3526  1.1  mrg 	{
   3527  1.1  mrg 	  const char *insn = NULL;
   3528  1.1  mrg 	  insn = loongarch_output_move_index_float (XEXP (src, 0),
   3529  1.1  mrg 						    GET_MODE (src),
   3530  1.1  mrg 						    true);
   3531  1.1  mrg 	  if (insn)
   3532  1.1  mrg 	    return insn;
   3533  1.1  mrg 
   3534  1.1  mrg 	  return dbl_p ? "fld.d\t%0,%1" : "fld.s\t%0,%1";
   3535  1.1  mrg 	}
   3536  1.1  mrg     }
   3537  1.1  mrg   gcc_unreachable ();
   3538  1.1  mrg }
   3539  1.1  mrg 
   3540  1.1  mrg /* Return true if CMP1 is a suitable second operand for integer ordering
   3541  1.1  mrg    test CODE.  */
   3542  1.1  mrg 
   3543  1.1  mrg static bool
   3544  1.1  mrg loongarch_int_order_operand_ok_p (enum rtx_code code, rtx cmp1)
   3545  1.1  mrg {
   3546  1.1  mrg   switch (code)
   3547  1.1  mrg     {
   3548  1.1  mrg     case GT:
   3549  1.1  mrg     case GTU:
   3550  1.1  mrg       return reg_or_0_operand (cmp1, VOIDmode);
   3551  1.1  mrg 
   3552  1.1  mrg     case GE:
   3553  1.1  mrg     case GEU:
   3554  1.1  mrg       return cmp1 == const1_rtx;
   3555  1.1  mrg 
   3556  1.1  mrg     case LT:
   3557  1.1  mrg     case LTU:
   3558  1.1  mrg       return arith_operand (cmp1, VOIDmode);
   3559  1.1  mrg 
   3560  1.1  mrg     case LE:
   3561  1.1  mrg       return sle_operand (cmp1, VOIDmode);
   3562  1.1  mrg 
   3563  1.1  mrg     case LEU:
   3564  1.1  mrg       return sleu_operand (cmp1, VOIDmode);
   3565  1.1  mrg 
   3566  1.1  mrg     default:
   3567  1.1  mrg       gcc_unreachable ();
   3568  1.1  mrg     }
   3569  1.1  mrg }
   3570  1.1  mrg 
   3571  1.1  mrg /* Return true if *CMP1 (of mode MODE) is a valid second operand for
   3572  1.1  mrg    integer ordering test *CODE, or if an equivalent combination can
   3573  1.1  mrg    be formed by adjusting *CODE and *CMP1.  When returning true, update
   3574  1.1  mrg    *CODE and *CMP1 with the chosen code and operand, otherwise leave
   3575  1.1  mrg    them alone.  */
   3576  1.1  mrg 
   3577  1.1  mrg static bool
   3578  1.1  mrg loongarch_canonicalize_int_order_test (enum rtx_code *code, rtx *cmp1,
   3579  1.1  mrg 				       machine_mode mode)
   3580  1.1  mrg {
   3581  1.1  mrg   HOST_WIDE_INT plus_one;
   3582  1.1  mrg 
   3583  1.1  mrg   if (loongarch_int_order_operand_ok_p (*code, *cmp1))
   3584  1.1  mrg     return true;
   3585  1.1  mrg 
   3586  1.1  mrg   if (CONST_INT_P (*cmp1))
   3587  1.1  mrg     switch (*code)
   3588  1.1  mrg       {
   3589  1.1  mrg       case LE:
   3590  1.1  mrg 	plus_one = trunc_int_for_mode (UINTVAL (*cmp1) + 1, mode);
   3591  1.1  mrg 	if (INTVAL (*cmp1) < plus_one)
   3592  1.1  mrg 	  {
   3593  1.1  mrg 	    *code = LT;
   3594  1.1  mrg 	    *cmp1 = force_reg (mode, GEN_INT (plus_one));
   3595  1.1  mrg 	    return true;
   3596  1.1  mrg 	  }
   3597  1.1  mrg 	break;
   3598  1.1  mrg 
   3599  1.1  mrg       case LEU:
   3600  1.1  mrg 	plus_one = trunc_int_for_mode (UINTVAL (*cmp1) + 1, mode);
   3601  1.1  mrg 	if (plus_one != 0)
   3602  1.1  mrg 	  {
   3603  1.1  mrg 	    *code = LTU;
   3604  1.1  mrg 	    *cmp1 = force_reg (mode, GEN_INT (plus_one));
   3605  1.1  mrg 	    return true;
   3606  1.1  mrg 	  }
   3607  1.1  mrg 	break;
   3608  1.1  mrg 
   3609  1.1  mrg       default:
   3610  1.1  mrg 	break;
   3611  1.1  mrg       }
   3612  1.1  mrg   return false;
   3613  1.1  mrg }
   3614  1.1  mrg 
   3615  1.1  mrg /* Compare CMP0 and CMP1 using ordering test CODE and store the result
   3616  1.1  mrg    in TARGET.  CMP0 and TARGET are register_operands.  If INVERT_PTR
   3617  1.1  mrg    is nonnull, it's OK to set TARGET to the inverse of the result and
   3618  1.1  mrg    flip *INVERT_PTR instead.  */
   3619  1.1  mrg 
   3620  1.1  mrg static void
   3621  1.1  mrg loongarch_emit_int_order_test (enum rtx_code code, bool *invert_ptr,
   3622  1.1  mrg 			       rtx target, rtx cmp0, rtx cmp1)
   3623  1.1  mrg {
   3624  1.1  mrg   machine_mode mode;
   3625  1.1  mrg 
   3626  1.1  mrg   /* First see if there is a LoongArch instruction that can do this operation.
   3627  1.1  mrg      If not, try doing the same for the inverse operation.  If that also
   3628  1.1  mrg      fails, force CMP1 into a register and try again.  */
   3629  1.1  mrg   mode = GET_MODE (cmp0);
   3630  1.1  mrg   if (loongarch_canonicalize_int_order_test (&code, &cmp1, mode))
   3631  1.1  mrg     loongarch_emit_binary (code, target, cmp0, cmp1);
   3632  1.1  mrg   else
   3633  1.1  mrg     {
   3634  1.1  mrg       enum rtx_code inv_code = reverse_condition (code);
   3635  1.1  mrg       if (!loongarch_canonicalize_int_order_test (&inv_code, &cmp1, mode))
   3636  1.1  mrg 	{
   3637  1.1  mrg 	  cmp1 = force_reg (mode, cmp1);
   3638  1.1  mrg 	  loongarch_emit_int_order_test (code, invert_ptr, target, cmp0, cmp1);
   3639  1.1  mrg 	}
   3640  1.1  mrg       else if (invert_ptr == 0)
   3641  1.1  mrg 	{
   3642  1.1  mrg 	  rtx inv_target;
   3643  1.1  mrg 
   3644  1.1  mrg 	  inv_target = loongarch_force_binary (GET_MODE (target),
   3645  1.1  mrg 					       inv_code, cmp0, cmp1);
   3646  1.1  mrg 	  loongarch_emit_binary (XOR, target, inv_target, const1_rtx);
   3647  1.1  mrg 	}
   3648  1.1  mrg       else
   3649  1.1  mrg 	{
   3650  1.1  mrg 	  *invert_ptr = !*invert_ptr;
   3651  1.1  mrg 	  loongarch_emit_binary (inv_code, target, cmp0, cmp1);
   3652  1.1  mrg 	}
   3653  1.1  mrg     }
   3654  1.1  mrg }
   3655  1.1  mrg 
   3656  1.1  mrg /* Return a register that is zero if CMP0 and CMP1 are equal.
   3657  1.1  mrg    The register will have the same mode as CMP0.  */
   3658  1.1  mrg 
   3659  1.1  mrg static rtx
   3660  1.1  mrg loongarch_zero_if_equal (rtx cmp0, rtx cmp1)
   3661  1.1  mrg {
   3662  1.1  mrg   if (cmp1 == const0_rtx)
   3663  1.1  mrg     return cmp0;
   3664  1.1  mrg 
   3665  1.1  mrg   if (uns_arith_operand (cmp1, VOIDmode))
   3666  1.1  mrg     return expand_binop (GET_MODE (cmp0), xor_optab, cmp0, cmp1, 0, 0,
   3667  1.1  mrg 			 OPTAB_DIRECT);
   3668  1.1  mrg 
   3669  1.1  mrg   return expand_binop (GET_MODE (cmp0), sub_optab, cmp0, cmp1, 0, 0,
   3670  1.1  mrg 		       OPTAB_DIRECT);
   3671  1.1  mrg }
   3672  1.1  mrg 
   3673  1.1  mrg /* Allocate a floating-point condition-code register of mode MODE.  */
   3674  1.1  mrg 
   3675  1.1  mrg static rtx
   3676  1.1  mrg loongarch_allocate_fcc (machine_mode mode)
   3677  1.1  mrg {
   3678  1.1  mrg   unsigned int regno, count;
   3679  1.1  mrg 
   3680  1.1  mrg   gcc_assert (TARGET_HARD_FLOAT);
   3681  1.1  mrg 
   3682  1.1  mrg   if (mode == FCCmode)
   3683  1.1  mrg     count = 1;
   3684  1.1  mrg   else
   3685  1.1  mrg     gcc_unreachable ();
   3686  1.1  mrg 
   3687  1.1  mrg   cfun->machine->next_fcc += -cfun->machine->next_fcc & (count - 1);
   3688  1.1  mrg   if (cfun->machine->next_fcc > FCC_REG_LAST - FCC_REG_FIRST)
   3689  1.1  mrg     cfun->machine->next_fcc = 0;
   3690  1.1  mrg 
   3691  1.1  mrg   regno = FCC_REG_FIRST + cfun->machine->next_fcc;
   3692  1.1  mrg   cfun->machine->next_fcc += count;
   3693  1.1  mrg   return gen_rtx_REG (mode, regno);
   3694  1.1  mrg }
   3695  1.1  mrg 
   3696  1.1  mrg /* Sign- or zero-extend OP0 and OP1 for integer comparisons.  */
   3697  1.1  mrg 
   3698  1.1  mrg static void
   3699  1.1  mrg loongarch_extend_comparands (rtx_code code, rtx *op0, rtx *op1)
   3700  1.1  mrg {
   3701  1.1  mrg   /* Comparisons consider all XLEN bits, so extend sub-XLEN values.  */
   3702  1.1  mrg   if (GET_MODE_SIZE (word_mode) > GET_MODE_SIZE (GET_MODE (*op0)))
   3703  1.1  mrg     {
   3704  1.1  mrg       /* TODO: checkout It is more profitable to zero-extend QImode values.  */
   3705  1.1  mrg       if (unsigned_condition (code) == code && GET_MODE (*op0) == QImode)
   3706  1.1  mrg 	{
   3707  1.1  mrg 	  *op0 = gen_rtx_ZERO_EXTEND (word_mode, *op0);
   3708  1.1  mrg 	  if (CONST_INT_P (*op1))
   3709  1.1  mrg 	    *op1 = GEN_INT ((uint8_t) INTVAL (*op1));
   3710  1.1  mrg 	  else
   3711  1.1  mrg 	    *op1 = gen_rtx_ZERO_EXTEND (word_mode, *op1);
   3712  1.1  mrg 	}
   3713  1.1  mrg       else
   3714  1.1  mrg 	{
   3715  1.1  mrg 	  *op0 = gen_rtx_SIGN_EXTEND (word_mode, *op0);
   3716  1.1  mrg 	  if (*op1 != const0_rtx)
   3717  1.1  mrg 	    *op1 = gen_rtx_SIGN_EXTEND (word_mode, *op1);
   3718  1.1  mrg 	}
   3719  1.1  mrg     }
   3720  1.1  mrg }
   3721  1.1  mrg 
   3722  1.1  mrg /* Convert a comparison into something that can be used in a branch.  On
   3723  1.1  mrg    entry, *OP0 and *OP1 are the values being compared and *CODE is the code
   3724  1.1  mrg    used to compare them.  Update them to describe the final comparison.  */
   3725  1.1  mrg 
   3726  1.1  mrg static void
   3727  1.1  mrg loongarch_emit_int_compare (enum rtx_code *code, rtx *op0, rtx *op1)
   3728  1.1  mrg {
   3729  1.1  mrg   static const enum rtx_code
   3730  1.1  mrg   mag_comparisons[][2] = {{LEU, LTU}, {GTU, GEU}, {LE, LT}, {GT, GE}};
   3731  1.1  mrg 
   3732  1.1  mrg   if (splittable_const_int_operand (*op1, VOIDmode))
   3733  1.1  mrg     {
   3734  1.1  mrg       HOST_WIDE_INT rhs = INTVAL (*op1);
   3735  1.1  mrg 
   3736  1.1  mrg       if (*code == EQ || *code == NE)
   3737  1.1  mrg 	{
   3738  1.1  mrg 	  /* Convert e.g. OP0 == 2048 into OP0 - 2048 == 0.  */
   3739  1.1  mrg 	  if (IMM12_OPERAND (-rhs))
   3740  1.1  mrg 	    {
   3741  1.1  mrg 	      *op0 = loongarch_force_binary (GET_MODE (*op0), PLUS, *op0,
   3742  1.1  mrg 					     GEN_INT (-rhs));
   3743  1.1  mrg 	      *op1 = const0_rtx;
   3744  1.1  mrg 	    }
   3745  1.1  mrg 	}
   3746  1.1  mrg       else
   3747  1.1  mrg 	{
   3748  1.1  mrg 	  /* Convert e.g. (OP0 <= 0xFFF) into (OP0 < 0x1000).  */
   3749  1.1  mrg 	  for (size_t i = 0; i < ARRAY_SIZE (mag_comparisons); i++)
   3750  1.1  mrg 	    {
   3751  1.1  mrg 	      HOST_WIDE_INT new_rhs;
   3752  1.1  mrg 	      bool increment = *code == mag_comparisons[i][0];
   3753  1.1  mrg 	      bool decrement = *code == mag_comparisons[i][1];
   3754  1.1  mrg 	      if (!increment && !decrement)
   3755  1.1  mrg 		continue;
   3756  1.1  mrg 
   3757  1.1  mrg 	      new_rhs = rhs + (increment ? 1 : -1);
   3758  1.1  mrg 	      if (loongarch_integer_cost (new_rhs)
   3759  1.1  mrg 		    < loongarch_integer_cost (rhs)
   3760  1.1  mrg 		  && (rhs < 0) == (new_rhs < 0))
   3761  1.1  mrg 		{
   3762  1.1  mrg 		  *op1 = GEN_INT (new_rhs);
   3763  1.1  mrg 		  *code = mag_comparisons[i][increment];
   3764  1.1  mrg 		}
   3765  1.1  mrg 	      break;
   3766  1.1  mrg 	    }
   3767  1.1  mrg 	}
   3768  1.1  mrg     }
   3769  1.1  mrg 
   3770  1.1  mrg   loongarch_extend_comparands (*code, op0, op1);
   3771  1.1  mrg 
   3772  1.1  mrg   *op0 = force_reg (word_mode, *op0);
   3773  1.1  mrg   if (*op1 != const0_rtx)
   3774  1.1  mrg     *op1 = force_reg (word_mode, *op1);
   3775  1.1  mrg }
   3776  1.1  mrg 
   3777  1.1  mrg /* Like loongarch_emit_int_compare, but for floating-point comparisons.  */
   3778  1.1  mrg 
   3779  1.1  mrg static void
   3780  1.1  mrg loongarch_emit_float_compare (enum rtx_code *code, rtx *op0, rtx *op1)
   3781  1.1  mrg {
   3782  1.1  mrg   rtx cmp_op0 = *op0;
   3783  1.1  mrg   rtx cmp_op1 = *op1;
   3784  1.1  mrg 
   3785  1.1  mrg   /* Floating-point tests use a separate FCMP.cond.fmt
   3786  1.1  mrg      comparison to set a register.  The branch or conditional move will
   3787  1.1  mrg      then compare that register against zero.
   3788  1.1  mrg 
   3789  1.1  mrg      Set CMP_CODE to the code of the comparison instruction and
   3790  1.1  mrg      *CODE to the code that the branch or move should use.  */
   3791  1.1  mrg   enum rtx_code cmp_code = *code;
   3792  1.1  mrg   /* Three FP conditions cannot be implemented by reversing the
   3793  1.1  mrg      operands for FCMP.cond.fmt, instead a reversed condition code is
   3794  1.1  mrg      required and a test for false.  */
   3795  1.1  mrg   *code = NE;
   3796  1.1  mrg   *op0 = loongarch_allocate_fcc (FCCmode);
   3797  1.1  mrg 
   3798  1.1  mrg   *op1 = const0_rtx;
   3799  1.1  mrg   loongarch_emit_binary (cmp_code, *op0, cmp_op0, cmp_op1);
   3800  1.1  mrg }
   3801  1.1  mrg 
   3802  1.1  mrg /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
   3803  1.1  mrg    and OPERAND[3].  Store the result in OPERANDS[0].
   3804  1.1  mrg 
   3805  1.1  mrg    On 64-bit targets, the mode of the comparison and target will always be
   3806  1.1  mrg    SImode, thus possibly narrower than that of the comparison's operands.  */
   3807  1.1  mrg 
   3808  1.1  mrg void
   3809  1.1  mrg loongarch_expand_scc (rtx operands[])
   3810  1.1  mrg {
   3811  1.1  mrg   rtx target = operands[0];
   3812  1.1  mrg   enum rtx_code code = GET_CODE (operands[1]);
   3813  1.1  mrg   rtx op0 = operands[2];
   3814  1.1  mrg   rtx op1 = operands[3];
   3815  1.1  mrg 
   3816  1.1  mrg   loongarch_extend_comparands (code, &op0, &op1);
   3817  1.1  mrg   op0 = force_reg (word_mode, op0);
   3818  1.1  mrg 
   3819  1.1  mrg   gcc_assert (GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT);
   3820  1.1  mrg 
   3821  1.1  mrg   if (code == EQ || code == NE)
   3822  1.1  mrg     {
   3823  1.1  mrg       rtx zie = loongarch_zero_if_equal (op0, op1);
   3824  1.1  mrg       loongarch_emit_binary (code, target, zie, const0_rtx);
   3825  1.1  mrg     }
   3826  1.1  mrg   else
   3827  1.1  mrg     loongarch_emit_int_order_test (code, 0, target, op0, op1);
   3828  1.1  mrg }
   3829  1.1  mrg 
   3830  1.1  mrg /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
   3831  1.1  mrg    CODE and jump to OPERANDS[3] if the condition holds.  */
   3832  1.1  mrg 
   3833  1.1  mrg void
   3834  1.1  mrg loongarch_expand_conditional_branch (rtx *operands)
   3835  1.1  mrg {
   3836  1.1  mrg   enum rtx_code code = GET_CODE (operands[0]);
   3837  1.1  mrg   rtx op0 = operands[1];
   3838  1.1  mrg   rtx op1 = operands[2];
   3839  1.1  mrg   rtx condition;
   3840  1.1  mrg 
   3841  1.1  mrg   if (FLOAT_MODE_P (GET_MODE (op1)))
   3842  1.1  mrg     loongarch_emit_float_compare (&code, &op0, &op1);
   3843  1.1  mrg   else
   3844  1.1  mrg     loongarch_emit_int_compare (&code, &op0, &op1);
   3845  1.1  mrg 
   3846  1.1  mrg   condition = gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
   3847  1.1  mrg   emit_jump_insn (gen_condjump (condition, operands[3]));
   3848  1.1  mrg }
   3849  1.1  mrg 
   3850  1.1  mrg /* Perform the comparison in OPERANDS[1].  Move OPERANDS[2] into OPERANDS[0]
   3851  1.1  mrg    if the condition holds, otherwise move OPERANDS[3] into OPERANDS[0].  */
   3852  1.1  mrg 
   3853  1.1  mrg void
   3854  1.1  mrg loongarch_expand_conditional_move (rtx *operands)
   3855  1.1  mrg {
   3856  1.1  mrg   enum rtx_code code = GET_CODE (operands[1]);
   3857  1.1  mrg   rtx op0 = XEXP (operands[1], 0);
   3858  1.1  mrg   rtx op1 = XEXP (operands[1], 1);
   3859  1.1  mrg 
   3860  1.1  mrg   if (FLOAT_MODE_P (GET_MODE (op1)))
   3861  1.1  mrg     loongarch_emit_float_compare (&code, &op0, &op1);
   3862  1.1  mrg   else
   3863  1.1  mrg     {
   3864  1.1  mrg       loongarch_extend_comparands (code, &op0, &op1);
   3865  1.1  mrg 
   3866  1.1  mrg       op0 = force_reg (word_mode, op0);
   3867  1.1  mrg 
   3868  1.1  mrg       if (code == EQ || code == NE)
   3869  1.1  mrg 	{
   3870  1.1  mrg 	  op0 = loongarch_zero_if_equal (op0, op1);
   3871  1.1  mrg 	  op1 = const0_rtx;
   3872  1.1  mrg 	}
   3873  1.1  mrg       else
   3874  1.1  mrg 	{
   3875  1.1  mrg 	  /* The comparison needs a separate scc instruction.  Store the
   3876  1.1  mrg 	     result of the scc in *OP0 and compare it against zero.  */
   3877  1.1  mrg 	  bool invert = false;
   3878  1.1  mrg 	  rtx target = gen_reg_rtx (GET_MODE (op0));
   3879  1.1  mrg 	  loongarch_emit_int_order_test (code, &invert, target, op0, op1);
   3880  1.1  mrg 	  code = invert ? EQ : NE;
   3881  1.1  mrg 	  op0 = target;
   3882  1.1  mrg 	  op1 = const0_rtx;
   3883  1.1  mrg 	}
   3884  1.1  mrg     }
   3885  1.1  mrg 
   3886  1.1  mrg   rtx cond = gen_rtx_fmt_ee (code, GET_MODE (op0), op0, op1);
   3887  1.1  mrg   /* There is no direct support for general conditional GP move involving
   3888  1.1  mrg      two registers using SEL.  */
   3889  1.1  mrg   if (INTEGRAL_MODE_P (GET_MODE (operands[2]))
   3890  1.1  mrg       && register_operand (operands[2], VOIDmode)
   3891  1.1  mrg       && register_operand (operands[3], VOIDmode))
   3892  1.1  mrg     {
   3893  1.1  mrg       machine_mode mode = GET_MODE (operands[0]);
   3894  1.1  mrg       rtx temp = gen_reg_rtx (mode);
   3895  1.1  mrg       rtx temp2 = gen_reg_rtx (mode);
   3896  1.1  mrg 
   3897  1.1  mrg       emit_insn (gen_rtx_SET (temp,
   3898  1.1  mrg 			      gen_rtx_IF_THEN_ELSE (mode, cond,
   3899  1.1  mrg 						    operands[2], const0_rtx)));
   3900  1.1  mrg 
   3901  1.1  mrg       /* Flip the test for the second operand.  */
   3902  1.1  mrg       cond = gen_rtx_fmt_ee ((code == EQ) ? NE : EQ, GET_MODE (op0), op0, op1);
   3903  1.1  mrg 
   3904  1.1  mrg       emit_insn (gen_rtx_SET (temp2,
   3905  1.1  mrg 			      gen_rtx_IF_THEN_ELSE (mode, cond,
   3906  1.1  mrg 						    operands[3], const0_rtx)));
   3907  1.1  mrg 
   3908  1.1  mrg       /* Merge the two results, at least one is guaranteed to be zero.  */
   3909  1.1  mrg       emit_insn (gen_rtx_SET (operands[0], gen_rtx_IOR (mode, temp, temp2)));
   3910  1.1  mrg     }
   3911  1.1  mrg   else
   3912  1.1  mrg     emit_insn (gen_rtx_SET (operands[0],
   3913  1.1  mrg 			    gen_rtx_IF_THEN_ELSE (GET_MODE (operands[0]), cond,
   3914  1.1  mrg 						  operands[2], operands[3])));
   3915  1.1  mrg }
   3916  1.1  mrg 
   3917  1.1  mrg /* Implement TARGET_EXPAND_BUILTIN_VA_START.  */
   3918  1.1  mrg 
   3919  1.1  mrg static void
   3920  1.1  mrg loongarch_va_start (tree valist, rtx nextarg)
   3921  1.1  mrg {
   3922  1.1  mrg   nextarg = plus_constant (Pmode, nextarg, -cfun->machine->varargs_size);
   3923  1.1  mrg   std_expand_builtin_va_start (valist, nextarg);
   3924  1.1  mrg }
   3925  1.1  mrg 
   3926  1.1  mrg /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL.  */
   3927  1.1  mrg 
   3928  1.1  mrg static bool
   3929  1.1  mrg loongarch_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
   3930  1.1  mrg 				   tree exp ATTRIBUTE_UNUSED)
   3931  1.1  mrg {
   3932  1.1  mrg   /* Always OK.  */
   3933  1.1  mrg   return true;
   3934  1.1  mrg }
   3935  1.1  mrg 
   3936  1.1  mrg /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
   3937  1.1  mrg    Assume that the areas do not overlap.  */
   3938  1.1  mrg 
   3939  1.1  mrg static void
   3940  1.1  mrg loongarch_block_move_straight (rtx dest, rtx src, HOST_WIDE_INT length)
   3941  1.1  mrg {
   3942  1.1  mrg   HOST_WIDE_INT offset, delta;
   3943  1.1  mrg   unsigned HOST_WIDE_INT bits;
   3944  1.1  mrg   int i;
   3945  1.1  mrg   machine_mode mode;
   3946  1.1  mrg   rtx *regs;
   3947  1.1  mrg 
   3948  1.1  mrg   bits = MIN (BITS_PER_WORD, MIN (MEM_ALIGN (src), MEM_ALIGN (dest)));
   3949  1.1  mrg 
   3950  1.1  mrg   mode = int_mode_for_size (bits, 0).require ();
   3951  1.1  mrg   delta = bits / BITS_PER_UNIT;
   3952  1.1  mrg 
   3953  1.1  mrg   /* Allocate a buffer for the temporary registers.  */
   3954  1.1  mrg   regs = XALLOCAVEC (rtx, length / delta);
   3955  1.1  mrg 
   3956  1.1  mrg   /* Load as many BITS-sized chunks as possible.  Use a normal load if
   3957  1.1  mrg      the source has enough alignment, otherwise use left/right pairs.  */
   3958  1.1  mrg   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
   3959  1.1  mrg     {
   3960  1.1  mrg       regs[i] = gen_reg_rtx (mode);
   3961  1.1  mrg       loongarch_emit_move (regs[i], adjust_address (src, mode, offset));
   3962  1.1  mrg     }
   3963  1.1  mrg 
   3964  1.1  mrg   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
   3965  1.1  mrg     loongarch_emit_move (adjust_address (dest, mode, offset), regs[i]);
   3966  1.1  mrg 
   3967  1.1  mrg   /* Mop up any left-over bytes.  */
   3968  1.1  mrg   if (offset < length)
   3969  1.1  mrg     {
   3970  1.1  mrg       src = adjust_address (src, BLKmode, offset);
   3971  1.1  mrg       dest = adjust_address (dest, BLKmode, offset);
   3972  1.1  mrg       move_by_pieces (dest, src, length - offset,
   3973  1.1  mrg 		      MIN (MEM_ALIGN (src), MEM_ALIGN (dest)),
   3974  1.1  mrg 		      (enum memop_ret) 0);
   3975  1.1  mrg     }
   3976  1.1  mrg }
   3977  1.1  mrg 
   3978  1.1  mrg /* Helper function for doing a loop-based block operation on memory
   3979  1.1  mrg    reference MEM.  Each iteration of the loop will operate on LENGTH
   3980  1.1  mrg    bytes of MEM.
   3981  1.1  mrg 
   3982  1.1  mrg    Create a new base register for use within the loop and point it to
   3983  1.1  mrg    the start of MEM.  Create a new memory reference that uses this
   3984  1.1  mrg    register.  Store them in *LOOP_REG and *LOOP_MEM respectively.  */
   3985  1.1  mrg 
   3986  1.1  mrg static void
   3987  1.1  mrg loongarch_adjust_block_mem (rtx mem, HOST_WIDE_INT length, rtx *loop_reg,
   3988  1.1  mrg 			    rtx *loop_mem)
   3989  1.1  mrg {
   3990  1.1  mrg   *loop_reg = copy_addr_to_reg (XEXP (mem, 0));
   3991  1.1  mrg 
   3992  1.1  mrg   /* Although the new mem does not refer to a known location,
   3993  1.1  mrg      it does keep up to LENGTH bytes of alignment.  */
   3994  1.1  mrg   *loop_mem = change_address (mem, BLKmode, *loop_reg);
   3995  1.1  mrg   set_mem_align (*loop_mem, MIN (MEM_ALIGN (mem), length * BITS_PER_UNIT));
   3996  1.1  mrg }
   3997  1.1  mrg 
   3998  1.1  mrg /* Move LENGTH bytes from SRC to DEST using a loop that moves BYTES_PER_ITER
   3999  1.1  mrg    bytes at a time.  LENGTH must be at least BYTES_PER_ITER.  Assume that
   4000  1.1  mrg    the memory regions do not overlap.  */
   4001  1.1  mrg 
   4002  1.1  mrg static void
   4003  1.1  mrg loongarch_block_move_loop (rtx dest, rtx src, HOST_WIDE_INT length,
   4004  1.1  mrg 			   HOST_WIDE_INT bytes_per_iter)
   4005  1.1  mrg {
   4006  1.1  mrg   rtx_code_label *label;
   4007  1.1  mrg   rtx src_reg, dest_reg, final_src, test;
   4008  1.1  mrg   HOST_WIDE_INT leftover;
   4009  1.1  mrg 
   4010  1.1  mrg   leftover = length % bytes_per_iter;
   4011  1.1  mrg   length -= leftover;
   4012  1.1  mrg 
   4013  1.1  mrg   /* Create registers and memory references for use within the loop.  */
   4014  1.1  mrg   loongarch_adjust_block_mem (src, bytes_per_iter, &src_reg, &src);
   4015  1.1  mrg   loongarch_adjust_block_mem (dest, bytes_per_iter, &dest_reg, &dest);
   4016  1.1  mrg 
   4017  1.1  mrg   /* Calculate the value that SRC_REG should have after the last iteration
   4018  1.1  mrg      of the loop.  */
   4019  1.1  mrg   final_src = expand_simple_binop (Pmode, PLUS, src_reg, GEN_INT (length), 0,
   4020  1.1  mrg 				   0, OPTAB_WIDEN);
   4021  1.1  mrg 
   4022  1.1  mrg   /* Emit the start of the loop.  */
   4023  1.1  mrg   label = gen_label_rtx ();
   4024  1.1  mrg   emit_label (label);
   4025  1.1  mrg 
   4026  1.1  mrg   /* Emit the loop body.  */
   4027  1.1  mrg   loongarch_block_move_straight (dest, src, bytes_per_iter);
   4028  1.1  mrg 
   4029  1.1  mrg   /* Move on to the next block.  */
   4030  1.1  mrg   loongarch_emit_move (src_reg,
   4031  1.1  mrg 		       plus_constant (Pmode, src_reg, bytes_per_iter));
   4032  1.1  mrg   loongarch_emit_move (dest_reg,
   4033  1.1  mrg 		       plus_constant (Pmode, dest_reg, bytes_per_iter));
   4034  1.1  mrg 
   4035  1.1  mrg   /* Emit the loop condition.  */
   4036  1.1  mrg   test = gen_rtx_NE (VOIDmode, src_reg, final_src);
   4037  1.1  mrg   if (Pmode == DImode)
   4038  1.1  mrg     emit_jump_insn (gen_cbranchdi4 (test, src_reg, final_src, label));
   4039  1.1  mrg   else
   4040  1.1  mrg     emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
   4041  1.1  mrg 
   4042  1.1  mrg   /* Mop up any left-over bytes.  */
   4043  1.1  mrg   if (leftover)
   4044  1.1  mrg     loongarch_block_move_straight (dest, src, leftover);
   4045  1.1  mrg   else
   4046  1.1  mrg     /* Temporary fix for PR79150.  */
   4047  1.1  mrg     emit_insn (gen_nop ());
   4048  1.1  mrg }
   4049  1.1  mrg 
   4050  1.1  mrg /* Expand a cpymemsi instruction, which copies LENGTH bytes from
   4051  1.1  mrg    memory reference SRC to memory reference DEST.  */
   4052  1.1  mrg 
   4053  1.1  mrg bool
   4054  1.1  mrg loongarch_expand_block_move (rtx dest, rtx src, rtx length)
   4055  1.1  mrg {
   4056  1.1  mrg   int max_move_bytes = LARCH_MAX_MOVE_BYTES_STRAIGHT;
   4057  1.1  mrg 
   4058  1.1  mrg   if (CONST_INT_P (length)
   4059  1.1  mrg       && INTVAL (length) <= loongarch_max_inline_memcpy_size)
   4060  1.1  mrg     {
   4061  1.1  mrg       if (INTVAL (length) <= max_move_bytes)
   4062  1.1  mrg 	{
   4063  1.1  mrg 	  loongarch_block_move_straight (dest, src, INTVAL (length));
   4064  1.1  mrg 	  return true;
   4065  1.1  mrg 	}
   4066  1.1  mrg       else if (optimize)
   4067  1.1  mrg 	{
   4068  1.1  mrg 	  loongarch_block_move_loop (dest, src, INTVAL (length),
   4069  1.1  mrg 				     LARCH_MAX_MOVE_BYTES_PER_LOOP_ITER);
   4070  1.1  mrg 	  return true;
   4071  1.1  mrg 	}
   4072  1.1  mrg     }
   4073  1.1  mrg   return false;
   4074  1.1  mrg }
   4075  1.1  mrg 
   4076  1.1  mrg /* Return true if loongarch_expand_block_move is the preferred
   4077  1.1  mrg    implementation of the 'cpymemsi' template.  */
   4078  1.1  mrg 
   4079  1.1  mrg bool
   4080  1.1  mrg loongarch_do_optimize_block_move_p (void)
   4081  1.1  mrg {
   4082  1.1  mrg   /* if -m[no-]memcpy is given explicitly.  */
   4083  1.1  mrg   if (target_flags_explicit & MASK_MEMCPY)
   4084  1.1  mrg     return !TARGET_MEMCPY;
   4085  1.1  mrg 
   4086  1.1  mrg   /* if not, don't optimize under -Os.  */
   4087  1.1  mrg   return !optimize_size;
   4088  1.1  mrg }
   4089  1.1  mrg 
   4090  1.1  mrg /* Expand a QI or HI mode atomic memory operation.
   4091  1.1  mrg 
   4092  1.1  mrg    GENERATOR contains a pointer to the gen_* function that generates
   4093  1.1  mrg    the SI mode underlying atomic operation using masks that we
   4094  1.1  mrg    calculate.
   4095  1.1  mrg 
   4096  1.1  mrg    RESULT is the return register for the operation.  Its value is NULL
   4097  1.1  mrg    if unused.
   4098  1.1  mrg 
   4099  1.1  mrg    MEM is the location of the atomic access.
   4100  1.1  mrg 
   4101  1.1  mrg    OLDVAL is the first operand for the operation.
   4102  1.1  mrg 
   4103  1.1  mrg    NEWVAL is the optional second operand for the operation.  Its value
   4104  1.1  mrg    is NULL if unused.  */
   4105  1.1  mrg 
   4106  1.1  mrg void
   4107  1.1  mrg loongarch_expand_atomic_qihi (union loongarch_gen_fn_ptrs generator,
   4108  1.1  mrg 			      rtx result, rtx mem, rtx oldval, rtx newval,
   4109  1.1  mrg 			      rtx model)
   4110  1.1  mrg {
   4111  1.1  mrg   rtx orig_addr, memsi_addr, memsi, shift, shiftsi, unshifted_mask;
   4112  1.1  mrg   rtx unshifted_mask_reg, mask, inverted_mask, si_op;
   4113  1.1  mrg   rtx res = NULL;
   4114  1.1  mrg   machine_mode mode;
   4115  1.1  mrg 
   4116  1.1  mrg   mode = GET_MODE (mem);
   4117  1.1  mrg 
   4118  1.1  mrg   /* Compute the address of the containing SImode value.  */
   4119  1.1  mrg   orig_addr = force_reg (Pmode, XEXP (mem, 0));
   4120  1.1  mrg   memsi_addr = loongarch_force_binary (Pmode, AND, orig_addr,
   4121  1.1  mrg 				       force_reg (Pmode, GEN_INT (-4)));
   4122  1.1  mrg 
   4123  1.1  mrg   /* Create a memory reference for it.  */
   4124  1.1  mrg   memsi = gen_rtx_MEM (SImode, memsi_addr);
   4125  1.1  mrg   set_mem_alias_set (memsi, ALIAS_SET_MEMORY_BARRIER);
   4126  1.1  mrg   MEM_VOLATILE_P (memsi) = MEM_VOLATILE_P (mem);
   4127  1.1  mrg 
   4128  1.1  mrg   /* Work out the byte offset of the QImode or HImode value,
   4129  1.1  mrg      counting from the least significant byte.  */
   4130  1.1  mrg   shift = loongarch_force_binary (Pmode, AND, orig_addr, GEN_INT (3));
   4131  1.1  mrg   /* Multiply by eight to convert the shift value from bytes to bits.  */
   4132  1.1  mrg   loongarch_emit_binary (ASHIFT, shift, shift, GEN_INT (3));
   4133  1.1  mrg 
   4134  1.1  mrg   /* Make the final shift an SImode value, so that it can be used in
   4135  1.1  mrg      SImode operations.  */
   4136  1.1  mrg   shiftsi = force_reg (SImode, gen_lowpart (SImode, shift));
   4137  1.1  mrg 
   4138  1.1  mrg   /* Set MASK to an inclusive mask of the QImode or HImode value.  */
   4139  1.1  mrg   unshifted_mask = GEN_INT (GET_MODE_MASK (mode));
   4140  1.1  mrg   unshifted_mask_reg = force_reg (SImode, unshifted_mask);
   4141  1.1  mrg   mask = loongarch_force_binary (SImode, ASHIFT, unshifted_mask_reg, shiftsi);
   4142  1.1  mrg 
   4143  1.1  mrg   /* Compute the equivalent exclusive mask.  */
   4144  1.1  mrg   inverted_mask = gen_reg_rtx (SImode);
   4145  1.1  mrg   emit_insn (gen_rtx_SET (inverted_mask, gen_rtx_NOT (SImode, mask)));
   4146  1.1  mrg 
   4147  1.1  mrg   /* Shift the old value into place.  */
   4148  1.1  mrg   if (oldval != const0_rtx)
   4149  1.1  mrg     {
   4150  1.1  mrg       oldval = convert_modes (SImode, mode, oldval, true);
   4151  1.1  mrg       oldval = force_reg (SImode, oldval);
   4152  1.1  mrg       oldval = loongarch_force_binary (SImode, ASHIFT, oldval, shiftsi);
   4153  1.1  mrg     }
   4154  1.1  mrg 
   4155  1.1  mrg   /* Do the same for the new value.  */
   4156  1.1  mrg   if (newval && newval != const0_rtx)
   4157  1.1  mrg     {
   4158  1.1  mrg       newval = convert_modes (SImode, mode, newval, true);
   4159  1.1  mrg       newval = force_reg (SImode, newval);
   4160  1.1  mrg       newval = loongarch_force_binary (SImode, ASHIFT, newval, shiftsi);
   4161  1.1  mrg     }
   4162  1.1  mrg 
   4163  1.1  mrg   /* Do the SImode atomic access.  */
   4164  1.1  mrg   if (result)
   4165  1.1  mrg     res = gen_reg_rtx (SImode);
   4166  1.1  mrg 
   4167  1.1  mrg   if (newval)
   4168  1.1  mrg     si_op = generator.fn_7 (res, memsi, mask, inverted_mask, oldval, newval,
   4169  1.1  mrg 			    model);
   4170  1.1  mrg   else if (result)
   4171  1.1  mrg     si_op = generator.fn_6 (res, memsi, mask, inverted_mask, oldval, model);
   4172  1.1  mrg   else
   4173  1.1  mrg     si_op = generator.fn_5 (memsi, mask, inverted_mask, oldval, model);
   4174  1.1  mrg 
   4175  1.1  mrg   emit_insn (si_op);
   4176  1.1  mrg 
   4177  1.1  mrg   if (result)
   4178  1.1  mrg     {
   4179  1.1  mrg       /* Shift and convert the result.  */
   4180  1.1  mrg       loongarch_emit_binary (AND, res, res, mask);
   4181  1.1  mrg       loongarch_emit_binary (LSHIFTRT, res, res, shiftsi);
   4182  1.1  mrg       loongarch_emit_move (result, gen_lowpart (GET_MODE (result), res));
   4183  1.1  mrg     }
   4184  1.1  mrg }
   4185  1.1  mrg 
   4186  1.1  mrg /* Return true if (zero_extract OP WIDTH BITPOS) can be used as the
   4187  1.1  mrg    source of an "ext" instruction or the destination of an "ins"
   4188  1.1  mrg    instruction.  OP must be a register operand and the following
   4189  1.1  mrg    conditions must hold:
   4190  1.1  mrg 
   4191  1.1  mrg    0 <= BITPOS < GET_MODE_BITSIZE (GET_MODE (op))
   4192  1.1  mrg    0 < WIDTH <= GET_MODE_BITSIZE (GET_MODE (op))
   4193  1.1  mrg    0 < BITPOS + WIDTH <= GET_MODE_BITSIZE (GET_MODE (op))
   4194  1.1  mrg 
   4195  1.1  mrg    Also reject lengths equal to a word as they are better handled
   4196  1.1  mrg    by the move patterns.  */
   4197  1.1  mrg 
   4198  1.1  mrg bool
   4199  1.1  mrg loongarch_use_ins_ext_p (rtx op, HOST_WIDE_INT width, HOST_WIDE_INT bitpos)
   4200  1.1  mrg {
   4201  1.1  mrg   if (!register_operand (op, VOIDmode)
   4202  1.1  mrg       || GET_MODE_BITSIZE (GET_MODE (op)) > BITS_PER_WORD)
   4203  1.1  mrg     return false;
   4204  1.1  mrg 
   4205  1.1  mrg   if (!IN_RANGE (width, 1, GET_MODE_BITSIZE (GET_MODE (op)) - 1))
   4206  1.1  mrg     return false;
   4207  1.1  mrg 
   4208  1.1  mrg   if (bitpos < 0 || bitpos + width > GET_MODE_BITSIZE (GET_MODE (op)))
   4209  1.1  mrg     return false;
   4210  1.1  mrg 
   4211  1.1  mrg   return true;
   4212  1.1  mrg }
   4213  1.1  mrg 
   4214  1.1  mrg /* Print the text for PRINT_OPERAND punctation character CH to FILE.
   4215  1.1  mrg    The punctuation characters are:
   4216  1.1  mrg 
   4217  1.1  mrg    '.'	Print the name of the register with a hard-wired zero (zero or $r0).
   4218  1.1  mrg    '$'	Print the name of the stack pointer register (sp or $r3).
   4219  1.1  mrg 
   4220  1.1  mrg    See also loongarch_init_print_operand_punct.  */
   4221  1.1  mrg 
   4222  1.1  mrg static void
   4223  1.1  mrg loongarch_print_operand_punctuation (FILE *file, int ch)
   4224  1.1  mrg {
   4225  1.1  mrg   switch (ch)
   4226  1.1  mrg     {
   4227  1.1  mrg     case '.':
   4228  1.1  mrg       fputs (reg_names[GP_REG_FIRST + 0], file);
   4229  1.1  mrg       break;
   4230  1.1  mrg 
   4231  1.1  mrg     case '$':
   4232  1.1  mrg       fputs (reg_names[STACK_POINTER_REGNUM], file);
   4233  1.1  mrg       break;
   4234  1.1  mrg 
   4235  1.1  mrg     default:
   4236  1.1  mrg       gcc_unreachable ();
   4237  1.1  mrg       break;
   4238  1.1  mrg     }
   4239  1.1  mrg }
   4240  1.1  mrg 
   4241  1.1  mrg /* Initialize loongarch_print_operand_punct.  */
   4242  1.1  mrg 
   4243  1.1  mrg static void
   4244  1.1  mrg loongarch_init_print_operand_punct (void)
   4245  1.1  mrg {
   4246  1.1  mrg   const char *p;
   4247  1.1  mrg 
   4248  1.1  mrg   for (p = ".$"; *p; p++)
   4249  1.1  mrg     loongarch_print_operand_punct[(unsigned char) *p] = true;
   4250  1.1  mrg }
   4251  1.1  mrg 
   4252  1.1  mrg /* PRINT_OPERAND prefix LETTER refers to the integer branch instruction
   4253  1.1  mrg    associated with condition CODE.  Print the condition part of the
   4254  1.1  mrg    opcode to FILE.  */
   4255  1.1  mrg 
   4256  1.1  mrg static void
   4257  1.1  mrg loongarch_print_int_branch_condition (FILE *file, enum rtx_code code,
   4258  1.1  mrg 				      int letter)
   4259  1.1  mrg {
   4260  1.1  mrg   switch (code)
   4261  1.1  mrg     {
   4262  1.1  mrg     case EQ:
   4263  1.1  mrg     case NE:
   4264  1.1  mrg     case GT:
   4265  1.1  mrg     case GE:
   4266  1.1  mrg     case LT:
   4267  1.1  mrg     case LE:
   4268  1.1  mrg     case GTU:
   4269  1.1  mrg     case GEU:
   4270  1.1  mrg     case LTU:
   4271  1.1  mrg     case LEU:
   4272  1.1  mrg       /* Conveniently, the LoongArch names for these conditions are the same
   4273  1.1  mrg 	 as their RTL equivalents.  */
   4274  1.1  mrg       fputs (GET_RTX_NAME (code), file);
   4275  1.1  mrg       break;
   4276  1.1  mrg 
   4277  1.1  mrg     default:
   4278  1.1  mrg       output_operand_lossage ("'%%%c' is not a valid operand prefix", letter);
   4279  1.1  mrg       break;
   4280  1.1  mrg     }
   4281  1.1  mrg }
   4282  1.1  mrg 
   4283  1.1  mrg /* Likewise floating-point branches.  */
   4284  1.1  mrg 
   4285  1.1  mrg static void
   4286  1.1  mrg loongarch_print_float_branch_condition (FILE *file, enum rtx_code code,
   4287  1.1  mrg 					int letter)
   4288  1.1  mrg {
   4289  1.1  mrg   switch (code)
   4290  1.1  mrg     {
   4291  1.1  mrg     case EQ:
   4292  1.1  mrg       fputs ("ceqz", file);
   4293  1.1  mrg       break;
   4294  1.1  mrg 
   4295  1.1  mrg     case NE:
   4296  1.1  mrg       fputs ("cnez", file);
   4297  1.1  mrg       break;
   4298  1.1  mrg 
   4299  1.1  mrg     default:
   4300  1.1  mrg       output_operand_lossage ("'%%%c' is not a valid operand prefix", letter);
   4301  1.1  mrg       break;
   4302  1.1  mrg     }
   4303  1.1  mrg }
   4304  1.1  mrg 
   4305  1.1  mrg /* Implement TARGET_PRINT_OPERAND_PUNCT_VALID_P.  */
   4306  1.1  mrg 
   4307  1.1  mrg static bool
   4308  1.1  mrg loongarch_print_operand_punct_valid_p (unsigned char code)
   4309  1.1  mrg {
   4310  1.1  mrg   return loongarch_print_operand_punct[code];
   4311  1.1  mrg }
   4312  1.1  mrg 
   4313  1.1  mrg /* Return true if a FENCE should be emitted to before a memory access to
   4314  1.1  mrg    implement the release portion of memory model MODEL.  */
   4315  1.1  mrg 
   4316  1.1  mrg static bool
   4317  1.1  mrg loongarch_memmodel_needs_rel_acq_fence (enum memmodel model)
   4318  1.1  mrg {
   4319  1.1  mrg   switch (model)
   4320  1.1  mrg     {
   4321  1.1  mrg       case MEMMODEL_ACQ_REL:
   4322  1.1  mrg       case MEMMODEL_SEQ_CST:
   4323  1.1  mrg       case MEMMODEL_SYNC_SEQ_CST:
   4324  1.1  mrg       case MEMMODEL_RELEASE:
   4325  1.1  mrg       case MEMMODEL_SYNC_RELEASE:
   4326  1.1  mrg       case MEMMODEL_ACQUIRE:
   4327  1.1  mrg       case MEMMODEL_CONSUME:
   4328  1.1  mrg       case MEMMODEL_SYNC_ACQUIRE:
   4329  1.1  mrg 	return true;
   4330  1.1  mrg 
   4331  1.1  mrg       case MEMMODEL_RELAXED:
   4332  1.1  mrg 	return false;
   4333  1.1  mrg 
   4334  1.1  mrg       default:
   4335  1.1  mrg 	gcc_unreachable ();
   4336  1.1  mrg     }
   4337  1.1  mrg }
   4338  1.1  mrg 
   4339  1.1  mrg /* Return true if a FENCE should be emitted after a failed CAS to
   4340  1.1  mrg    implement the acquire semantic of failure_memorder.  */
   4341  1.1  mrg 
   4342  1.1  mrg static bool
   4343  1.1  mrg loongarch_cas_failure_memorder_needs_acquire (enum memmodel model)
   4344  1.1  mrg {
   4345  1.1  mrg   switch (memmodel_base (model))
   4346  1.1  mrg     {
   4347  1.1  mrg     case MEMMODEL_ACQUIRE:
   4348  1.1  mrg     case MEMMODEL_ACQ_REL:
   4349  1.1  mrg     case MEMMODEL_SEQ_CST:
   4350  1.1  mrg       return true;
   4351  1.1  mrg 
   4352  1.1  mrg     case MEMMODEL_RELAXED:
   4353  1.1  mrg     case MEMMODEL_RELEASE:
   4354  1.1  mrg       return false;
   4355  1.1  mrg 
   4356  1.1  mrg     /* MEMMODEL_CONSUME is deliberately not handled because it's always
   4357  1.1  mrg        replaced by MEMMODEL_ACQUIRE as at now.  If you see an ICE caused by
   4358  1.1  mrg        MEMMODEL_CONSUME, read the change (re)introducing it carefully and
   4359  1.1  mrg        decide what to do.  See PR 59448 and get_memmodel in builtins.cc.  */
   4360  1.1  mrg     default:
   4361  1.1  mrg       gcc_unreachable ();
   4362  1.1  mrg     }
   4363  1.1  mrg }
   4364  1.1  mrg 
   4365  1.1  mrg /* Implement TARGET_PRINT_OPERAND.  The LoongArch-specific operand codes are:
   4366  1.1  mrg 
   4367  1.1  mrg    'X'	Print CONST_INT OP in hexadecimal format.
   4368  1.1  mrg    'x'	Print the low 16 bits of CONST_INT OP in hexadecimal format.
   4369  1.1  mrg    'd'	Print CONST_INT OP in decimal.
   4370  1.1  mrg    'm'	Print one less than CONST_INT OP in decimal.
   4371  1.1  mrg    'y'	Print exact log2 of CONST_INT OP in decimal.
   4372  1.1  mrg    'C'	Print the integer branch condition for comparison OP.
   4373  1.1  mrg    'N'	Print the inverse of the integer branch condition for comparison OP.
   4374  1.1  mrg    'F'	Print the FPU branch condition for comparison OP.
   4375  1.1  mrg    'W'	Print the inverse of the FPU branch condition for comparison OP.
   4376  1.1  mrg    'T'	Print 'f' for (eq:CC ...), 't' for (ne:CC ...),
   4377  1.1  mrg 	      'z' for (eq:?I ...), 'n' for (ne:?I ...).
   4378  1.1  mrg    't'	Like 'T', but with the EQ/NE cases reversed
   4379  1.1  mrg    'Y'	Print loongarch_fp_conditions[INTVAL (OP)]
   4380  1.1  mrg    'Z'	Print OP and a comma for 8CC, otherwise print nothing.
   4381  1.1  mrg    'z'	Print $0 if OP is zero, otherwise print OP normally.
   4382  1.1  mrg    'b'	Print the address of a memory operand, without offset.
   4383  1.1  mrg    'V'	Print exact log2 of CONST_INT OP element 0 of a replicated
   4384  1.1  mrg 	  CONST_VECTOR in decimal.
   4385  1.1  mrg    'A'	Print a _DB suffix if the memory model requires a release.
   4386  1.1  mrg    'G'	Print a DBAR insn for CAS failure (with an acquire semantic if
   4387  1.1  mrg 	needed, otherwise a simple load-load barrier).
   4388  1.1  mrg    'i'	Print i if the operand is not a register.  */
   4389  1.1  mrg 
   4390  1.1  mrg static void
   4391  1.1  mrg loongarch_print_operand (FILE *file, rtx op, int letter)
   4392  1.1  mrg {
   4393  1.1  mrg   enum rtx_code code;
   4394  1.1  mrg 
   4395  1.1  mrg   if (loongarch_print_operand_punct_valid_p (letter))
   4396  1.1  mrg     {
   4397  1.1  mrg       loongarch_print_operand_punctuation (file, letter);
   4398  1.1  mrg       return;
   4399  1.1  mrg     }
   4400  1.1  mrg 
   4401  1.1  mrg   gcc_assert (op);
   4402  1.1  mrg   code = GET_CODE (op);
   4403  1.1  mrg 
   4404  1.1  mrg   switch (letter)
   4405  1.1  mrg     {
   4406  1.1  mrg     case 'X':
   4407  1.1  mrg       if (CONST_INT_P (op))
   4408  1.1  mrg 	fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (op));
   4409  1.1  mrg       else
   4410  1.1  mrg 	output_operand_lossage ("invalid use of '%%%c'", letter);
   4411  1.1  mrg       break;
   4412  1.1  mrg 
   4413  1.1  mrg     case 'x':
   4414  1.1  mrg       if (CONST_INT_P (op))
   4415  1.1  mrg 	fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (op) & 0xffff);
   4416  1.1  mrg       else
   4417  1.1  mrg 	output_operand_lossage ("invalid use of '%%%c'", letter);
   4418  1.1  mrg       break;
   4419  1.1  mrg 
   4420  1.1  mrg     case 'd':
   4421  1.1  mrg       if (CONST_INT_P (op))
   4422  1.1  mrg 	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
   4423  1.1  mrg       else
   4424  1.1  mrg 	output_operand_lossage ("invalid use of '%%%c'", letter);
   4425  1.1  mrg       break;
   4426  1.1  mrg 
   4427  1.1  mrg     case 'm':
   4428  1.1  mrg       if (CONST_INT_P (op))
   4429  1.1  mrg 	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op) - 1);
   4430  1.1  mrg       else
   4431  1.1  mrg 	output_operand_lossage ("invalid use of '%%%c'", letter);
   4432  1.1  mrg       break;
   4433  1.1  mrg 
   4434  1.1  mrg     case 'y':
   4435  1.1  mrg       if (CONST_INT_P (op))
   4436  1.1  mrg 	{
   4437  1.1  mrg 	  int val = exact_log2 (INTVAL (op));
   4438  1.1  mrg 	  if (val != -1)
   4439  1.1  mrg 	    fprintf (file, "%d", val);
   4440  1.1  mrg 	  else
   4441  1.1  mrg 	    output_operand_lossage ("invalid use of '%%%c'", letter);
   4442  1.1  mrg 	}
   4443  1.1  mrg       else
   4444  1.1  mrg 	output_operand_lossage ("invalid use of '%%%c'", letter);
   4445  1.1  mrg       break;
   4446  1.1  mrg 
   4447  1.1  mrg     case 'V':
   4448  1.1  mrg       if (CONST_VECTOR_P (op))
   4449  1.1  mrg 	{
   4450  1.1  mrg 	  machine_mode mode = GET_MODE_INNER (GET_MODE (op));
   4451  1.1  mrg 	  unsigned HOST_WIDE_INT val = UINTVAL (CONST_VECTOR_ELT (op, 0));
   4452  1.1  mrg 	  int vlog2 = exact_log2 (val & GET_MODE_MASK (mode));
   4453  1.1  mrg 	  if (vlog2 != -1)
   4454  1.1  mrg 	    fprintf (file, "%d", vlog2);
   4455  1.1  mrg 	  else
   4456  1.1  mrg 	    output_operand_lossage ("invalid use of '%%%c'", letter);
   4457  1.1  mrg 	}
   4458  1.1  mrg       else
   4459  1.1  mrg 	output_operand_lossage ("invalid use of '%%%c'", letter);
   4460  1.1  mrg       break;
   4461  1.1  mrg 
   4462  1.1  mrg     case 'C':
   4463  1.1  mrg       loongarch_print_int_branch_condition (file, code, letter);
   4464  1.1  mrg       break;
   4465  1.1  mrg 
   4466  1.1  mrg     case 'N':
   4467  1.1  mrg       loongarch_print_int_branch_condition (file, reverse_condition (code),
   4468  1.1  mrg 					    letter);
   4469  1.1  mrg       break;
   4470  1.1  mrg 
   4471  1.1  mrg     case 'F':
   4472  1.1  mrg       loongarch_print_float_branch_condition (file, code, letter);
   4473  1.1  mrg       break;
   4474  1.1  mrg 
   4475  1.1  mrg     case 'W':
   4476  1.1  mrg       loongarch_print_float_branch_condition (file, reverse_condition (code),
   4477  1.1  mrg 					      letter);
   4478  1.1  mrg       break;
   4479  1.1  mrg 
   4480  1.1  mrg     case 'T':
   4481  1.1  mrg     case 't':
   4482  1.1  mrg       {
   4483  1.1  mrg 	int truth = (code == NE) == (letter == 'T');
   4484  1.1  mrg 	fputc ("zfnt"[truth * 2 + FCC_REG_P (REGNO (XEXP (op, 0)))], file);
   4485  1.1  mrg       }
   4486  1.1  mrg       break;
   4487  1.1  mrg 
   4488  1.1  mrg     case 'Y':
   4489  1.1  mrg       if (code == CONST_INT
   4490  1.1  mrg 	  && UINTVAL (op) < ARRAY_SIZE (loongarch_fp_conditions))
   4491  1.1  mrg 	fputs (loongarch_fp_conditions[UINTVAL (op)], file);
   4492  1.1  mrg       else
   4493  1.1  mrg 	output_operand_lossage ("'%%%c' is not a valid operand prefix",
   4494  1.1  mrg 				letter);
   4495  1.1  mrg       break;
   4496  1.1  mrg 
   4497  1.1  mrg     case 'Z':
   4498  1.1  mrg       loongarch_print_operand (file, op, 0);
   4499  1.1  mrg       fputc (',', file);
   4500  1.1  mrg       break;
   4501  1.1  mrg 
   4502  1.1  mrg     case 'A':
   4503  1.1  mrg       if (loongarch_memmodel_needs_rel_acq_fence ((enum memmodel) INTVAL (op)))
   4504  1.1  mrg 	fputs ("_db", file);
   4505  1.1  mrg       break;
   4506  1.1  mrg 
   4507  1.1  mrg     case 'G':
   4508  1.1  mrg       if (loongarch_cas_failure_memorder_needs_acquire (
   4509  1.1  mrg 	    memmodel_from_int (INTVAL (op))))
   4510  1.1  mrg 	fputs ("dbar\t0b10100", file);
   4511  1.1  mrg       else
   4512  1.1  mrg 	fputs ("dbar\t0x700", file);
   4513  1.1  mrg       break;
   4514  1.1  mrg 
   4515  1.1  mrg     case 'i':
   4516  1.1  mrg       if (code != REG)
   4517  1.1  mrg 	fputs ("i", file);
   4518  1.1  mrg       break;
   4519  1.1  mrg 
   4520  1.1  mrg     default:
   4521  1.1  mrg       switch (code)
   4522  1.1  mrg 	{
   4523  1.1  mrg 	case REG:
   4524  1.1  mrg 	  {
   4525  1.1  mrg 	    unsigned int regno = REGNO (op);
   4526  1.1  mrg 	    if (letter && letter != 'z')
   4527  1.1  mrg 	      output_operand_lossage ("invalid use of '%%%c'", letter);
   4528  1.1  mrg 	    fprintf (file, "%s", reg_names[regno]);
   4529  1.1  mrg 	  }
   4530  1.1  mrg 	  break;
   4531  1.1  mrg 
   4532  1.1  mrg 	case MEM:
   4533  1.1  mrg 	  if (letter == 'D')
   4534  1.1  mrg 	    output_address (GET_MODE (op),
   4535  1.1  mrg 			    plus_constant (Pmode, XEXP (op, 0), 4));
   4536  1.1  mrg 	  else if (letter == 'b')
   4537  1.1  mrg 	    {
   4538  1.1  mrg 	      gcc_assert (REG_P (XEXP (op, 0)));
   4539  1.1  mrg 	      loongarch_print_operand (file, XEXP (op, 0), 0);
   4540  1.1  mrg 	    }
   4541  1.1  mrg 	  else if (letter && letter != 'z')
   4542  1.1  mrg 	    output_operand_lossage ("invalid use of '%%%c'", letter);
   4543  1.1  mrg 	  else
   4544  1.1  mrg 	    output_address (GET_MODE (op), XEXP (op, 0));
   4545  1.1  mrg 	  break;
   4546  1.1  mrg 
   4547  1.1  mrg 	default:
   4548  1.1  mrg 	  if (letter == 'z' && op == CONST0_RTX (GET_MODE (op)))
   4549  1.1  mrg 	    fputs (reg_names[GP_REG_FIRST], file);
   4550  1.1  mrg 	  else if (letter && letter != 'z')
   4551  1.1  mrg 	    output_operand_lossage ("invalid use of '%%%c'", letter);
   4552  1.1  mrg 	  else
   4553  1.1  mrg 	    output_addr_const (file, loongarch_strip_unspec_address (op));
   4554  1.1  mrg 	  break;
   4555  1.1  mrg 	}
   4556  1.1  mrg     }
   4557  1.1  mrg }
   4558  1.1  mrg 
   4559  1.1  mrg /* Implement TARGET_PRINT_OPERAND_ADDRESS.  */
   4560  1.1  mrg 
   4561  1.1  mrg static void
   4562  1.1  mrg loongarch_print_operand_address (FILE *file, machine_mode /* mode  */, rtx x)
   4563  1.1  mrg {
   4564  1.1  mrg   struct loongarch_address_info addr;
   4565  1.1  mrg 
   4566  1.1  mrg   if (loongarch_classify_address (&addr, x, word_mode, true))
   4567  1.1  mrg     switch (addr.type)
   4568  1.1  mrg       {
   4569  1.1  mrg       case ADDRESS_REG:
   4570  1.1  mrg 	fprintf (file, "%s,", reg_names[REGNO (addr.reg)]);
   4571  1.1  mrg 	loongarch_print_operand (file, addr.offset, 0);
   4572  1.1  mrg 	return;
   4573  1.1  mrg 
   4574  1.1  mrg       case ADDRESS_REG_REG:
   4575  1.1  mrg 	fprintf (file, "%s,%s", reg_names[REGNO (addr.reg)],
   4576  1.1  mrg 				reg_names[REGNO (addr.offset)]);
   4577  1.1  mrg 	return;
   4578  1.1  mrg 
   4579  1.1  mrg       case ADDRESS_CONST_INT:
   4580  1.1  mrg 	fprintf (file, "%s,", reg_names[GP_REG_FIRST]);
   4581  1.1  mrg 	output_addr_const (file, x);
   4582  1.1  mrg 	return;
   4583  1.1  mrg 
   4584  1.1  mrg       case ADDRESS_SYMBOLIC:
   4585  1.1  mrg 	output_addr_const (file, loongarch_strip_unspec_address (x));
   4586  1.1  mrg 	return;
   4587  1.1  mrg       }
   4588  1.1  mrg   if (CONST_INT_P (x))
   4589  1.1  mrg     output_addr_const (file, x);
   4590  1.1  mrg   else
   4591  1.1  mrg     gcc_unreachable ();
   4592  1.1  mrg }
   4593  1.1  mrg 
   4594  1.1  mrg /* Implement TARGET_ASM_SELECT_RTX_SECTION.  */
   4595  1.1  mrg 
   4596  1.1  mrg static section *
   4597  1.1  mrg loongarch_select_rtx_section (machine_mode mode, rtx x,
   4598  1.1  mrg 			      unsigned HOST_WIDE_INT align)
   4599  1.1  mrg {
   4600  1.1  mrg   /* ??? Consider using mergeable small data sections.  */
   4601  1.1  mrg   if (loongarch_rtx_constant_in_small_data_p (mode))
   4602  1.1  mrg     return get_named_section (NULL, ".sdata", 0);
   4603  1.1  mrg 
   4604  1.1  mrg   return default_elf_select_rtx_section (mode, x, align);
   4605  1.1  mrg }
   4606  1.1  mrg 
   4607  1.1  mrg /* Implement TARGET_ASM_FUNCTION_RODATA_SECTION.
   4608  1.1  mrg 
   4609  1.1  mrg    The complication here is that jump tables will use absolute addresses,
   4610  1.1  mrg    and should therefore not be included in the read-only part of a DSO.
   4611  1.1  mrg    Handle such cases by selecting a normal data section instead of a
   4612  1.1  mrg    read-only one.  The logic apes that in default_function_rodata_section.  */
   4613  1.1  mrg 
   4614  1.1  mrg static section *
   4615  1.1  mrg loongarch_function_rodata_section (tree decl, bool)
   4616  1.1  mrg {
   4617  1.1  mrg   return default_function_rodata_section (decl, false);
   4618  1.1  mrg }
   4619  1.1  mrg 
   4620  1.1  mrg /* Implement TARGET_IN_SMALL_DATA_P.  */
   4621  1.1  mrg 
   4622  1.1  mrg static bool
   4623  1.1  mrg loongarch_in_small_data_p (const_tree decl)
   4624  1.1  mrg {
   4625  1.1  mrg   int size;
   4626  1.1  mrg 
   4627  1.1  mrg   if (TREE_CODE (decl) == STRING_CST || TREE_CODE (decl) == FUNCTION_DECL)
   4628  1.1  mrg     return false;
   4629  1.1  mrg 
   4630  1.1  mrg   if (VAR_P (decl) && DECL_SECTION_NAME (decl) != 0)
   4631  1.1  mrg     {
   4632  1.1  mrg       const char *name;
   4633  1.1  mrg 
   4634  1.1  mrg       /* Reject anything that isn't in a known small-data section.  */
   4635  1.1  mrg       name = DECL_SECTION_NAME (decl);
   4636  1.1  mrg       if (strcmp (name, ".sdata") != 0 && strcmp (name, ".sbss") != 0)
   4637  1.1  mrg 	return false;
   4638  1.1  mrg 
   4639  1.1  mrg       /* If a symbol is defined externally, the assembler will use the
   4640  1.1  mrg 	 usual -G rules when deciding how to implement macros.  */
   4641  1.1  mrg       if (!DECL_EXTERNAL (decl))
   4642  1.1  mrg 	return true;
   4643  1.1  mrg     }
   4644  1.1  mrg 
   4645  1.1  mrg   /* We have traditionally not treated zero-sized objects as small data,
   4646  1.1  mrg      so this is now effectively part of the ABI.  */
   4647  1.1  mrg   size = int_size_in_bytes (TREE_TYPE (decl));
   4648  1.1  mrg   return size > 0 && size <= g_switch_value;
   4649  1.1  mrg }
   4650  1.1  mrg 
   4651  1.1  mrg /* The LoongArch debug format wants all automatic variables and arguments
   4652  1.1  mrg    to be in terms of the virtual frame pointer (stack pointer before
   4653  1.1  mrg    any adjustment in the function), while the LoongArch linker wants
   4654  1.1  mrg    the frame pointer to be the stack pointer after the initial
   4655  1.1  mrg    adjustment.  So, we do the adjustment here.  The arg pointer (which
   4656  1.1  mrg    is eliminated) points to the virtual frame pointer, while the frame
   4657  1.1  mrg    pointer (which may be eliminated) points to the stack pointer after
   4658  1.1  mrg    the initial adjustments.  */
   4659  1.1  mrg 
   4660  1.1  mrg HOST_WIDE_INT
   4661  1.1  mrg loongarch_debugger_offset (rtx addr, HOST_WIDE_INT offset)
   4662  1.1  mrg {
   4663  1.1  mrg   rtx offset2 = const0_rtx;
   4664  1.1  mrg   rtx reg = eliminate_constant_term (addr, &offset2);
   4665  1.1  mrg 
   4666  1.1  mrg   if (offset == 0)
   4667  1.1  mrg     offset = INTVAL (offset2);
   4668  1.1  mrg 
   4669  1.1  mrg   if (reg == stack_pointer_rtx
   4670  1.1  mrg       || reg == frame_pointer_rtx
   4671  1.1  mrg       || reg == hard_frame_pointer_rtx)
   4672  1.1  mrg     {
   4673  1.1  mrg       offset -= cfun->machine->frame.total_size;
   4674  1.1  mrg       if (reg == hard_frame_pointer_rtx)
   4675  1.1  mrg 	offset += cfun->machine->frame.hard_frame_pointer_offset;
   4676  1.1  mrg     }
   4677  1.1  mrg 
   4678  1.1  mrg   return offset;
   4679  1.1  mrg }
   4680  1.1  mrg 
   4681  1.1  mrg /* Implement ASM_OUTPUT_EXTERNAL.  */
   4682  1.1  mrg 
   4683  1.1  mrg void
   4684  1.1  mrg loongarch_output_external (FILE *file, tree decl, const char *name)
   4685  1.1  mrg {
   4686  1.1  mrg   default_elf_asm_output_external (file, decl, name);
   4687  1.1  mrg 
   4688  1.1  mrg   /* We output the name if and only if TREE_SYMBOL_REFERENCED is
   4689  1.1  mrg      set in order to avoid putting out names that are never really
   4690  1.1  mrg      used.  */
   4691  1.1  mrg   if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
   4692  1.1  mrg     {
   4693  1.1  mrg       if (loongarch_in_small_data_p (decl))
   4694  1.1  mrg 	{
   4695  1.1  mrg 	  /* When using assembler macros, emit .extern directives for
   4696  1.1  mrg 	     all small-data externs so that the assembler knows how
   4697  1.1  mrg 	     big they are.
   4698  1.1  mrg 
   4699  1.1  mrg 	     In most cases it would be safe (though pointless) to emit
   4700  1.1  mrg 	     .externs for other symbols too.  One exception is when an
   4701  1.1  mrg 	     object is within the -G limit but declared by the user to
   4702  1.1  mrg 	     be in a section other than .sbss or .sdata.  */
   4703  1.1  mrg 	  fputs ("\t.extern\t", file);
   4704  1.1  mrg 	  assemble_name (file, name);
   4705  1.1  mrg 	  fprintf (file, ", " HOST_WIDE_INT_PRINT_DEC "\n",
   4706  1.1  mrg 		   int_size_in_bytes (TREE_TYPE (decl)));
   4707  1.1  mrg 	}
   4708  1.1  mrg     }
   4709  1.1  mrg }
   4710  1.1  mrg 
   4711  1.1  mrg /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL.  */
   4712  1.1  mrg 
   4713  1.1  mrg static void ATTRIBUTE_UNUSED
   4714  1.1  mrg loongarch_output_dwarf_dtprel (FILE *file, int size, rtx x)
   4715  1.1  mrg {
   4716  1.1  mrg   switch (size)
   4717  1.1  mrg     {
   4718  1.1  mrg     case 4:
   4719  1.1  mrg       fputs ("\t.dtprelword\t", file);
   4720  1.1  mrg       break;
   4721  1.1  mrg 
   4722  1.1  mrg     case 8:
   4723  1.1  mrg       fputs ("\t.dtpreldword\t", file);
   4724  1.1  mrg       break;
   4725  1.1  mrg 
   4726  1.1  mrg     default:
   4727  1.1  mrg       gcc_unreachable ();
   4728  1.1  mrg     }
   4729  1.1  mrg   output_addr_const (file, x);
   4730  1.1  mrg   fputs ("+0x8000", file);
   4731  1.1  mrg }
   4732  1.1  mrg 
   4733  1.1  mrg /* Implement ASM_OUTPUT_ASCII.  */
   4734  1.1  mrg 
   4735  1.1  mrg void
   4736  1.1  mrg loongarch_output_ascii (FILE *stream, const char *string, size_t len)
   4737  1.1  mrg {
   4738  1.1  mrg   size_t i;
   4739  1.1  mrg   int cur_pos;
   4740  1.1  mrg 
   4741  1.1  mrg   cur_pos = 17;
   4742  1.1  mrg   fprintf (stream, "\t.ascii\t\"");
   4743  1.1  mrg   for (i = 0; i < len; i++)
   4744  1.1  mrg     {
   4745  1.1  mrg       int c;
   4746  1.1  mrg 
   4747  1.1  mrg       c = (unsigned char) string[i];
   4748  1.1  mrg       if (ISPRINT (c))
   4749  1.1  mrg 	{
   4750  1.1  mrg 	  if (c == '\\' || c == '\"')
   4751  1.1  mrg 	    {
   4752  1.1  mrg 	      putc ('\\', stream);
   4753  1.1  mrg 	      cur_pos++;
   4754  1.1  mrg 	    }
   4755  1.1  mrg 	  putc (c, stream);
   4756  1.1  mrg 	  cur_pos++;
   4757  1.1  mrg 	}
   4758  1.1  mrg       else
   4759  1.1  mrg 	{
   4760  1.1  mrg 	  fprintf (stream, "\\%03o", c);
   4761  1.1  mrg 	  cur_pos += 4;
   4762  1.1  mrg 	}
   4763  1.1  mrg 
   4764  1.1  mrg       if (cur_pos > 72 && i + 1 < len)
   4765  1.1  mrg 	{
   4766  1.1  mrg 	  cur_pos = 17;
   4767  1.1  mrg 	  fprintf (stream, "\"\n\t.ascii\t\"");
   4768  1.1  mrg 	}
   4769  1.1  mrg     }
   4770  1.1  mrg   fprintf (stream, "\"\n");
   4771  1.1  mrg }
   4772  1.1  mrg 
   4773  1.1  mrg /* Implement TARGET_FRAME_POINTER_REQUIRED.  */
   4774  1.1  mrg 
   4775  1.1  mrg static bool
   4776  1.1  mrg loongarch_frame_pointer_required (void)
   4777  1.1  mrg {
   4778  1.1  mrg   /* If the function contains dynamic stack allocations, we need to
   4779  1.1  mrg      use the frame pointer to access the static parts of the frame.  */
   4780  1.1  mrg   if (cfun->calls_alloca)
   4781  1.1  mrg     return true;
   4782  1.1  mrg 
   4783  1.1  mrg   return false;
   4784  1.1  mrg }
   4785  1.1  mrg 
   4786  1.1  mrg /* Implement TARGET_CAN_ELIMINATE.  Make sure that we're not trying
   4787  1.1  mrg    to eliminate to the wrong hard frame pointer.  */
   4788  1.1  mrg 
   4789  1.1  mrg static bool
   4790  1.1  mrg loongarch_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
   4791  1.1  mrg {
   4792  1.1  mrg   return (to == HARD_FRAME_POINTER_REGNUM || to == STACK_POINTER_REGNUM);
   4793  1.1  mrg }
   4794  1.1  mrg 
   4795  1.1  mrg /* Implement RETURN_ADDR_RTX.  We do not support moving back to a
   4796  1.1  mrg    previous frame.  */
   4797  1.1  mrg 
   4798  1.1  mrg rtx
   4799  1.1  mrg loongarch_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
   4800  1.1  mrg {
   4801  1.1  mrg   if (count != 0)
   4802  1.1  mrg     return const0_rtx;
   4803  1.1  mrg 
   4804  1.1  mrg   return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
   4805  1.1  mrg }
   4806  1.1  mrg 
   4807  1.1  mrg /* Emit code to change the current function's return address to
   4808  1.1  mrg    ADDRESS.  SCRATCH is available as a scratch register, if needed.
   4809  1.1  mrg    ADDRESS and SCRATCH are both word-mode GPRs.  */
   4810  1.1  mrg 
   4811  1.1  mrg void
   4812  1.1  mrg loongarch_set_return_address (rtx address, rtx scratch)
   4813  1.1  mrg {
   4814  1.1  mrg   rtx slot_address;
   4815  1.1  mrg 
   4816  1.1  mrg   gcc_assert (BITSET_P (cfun->machine->frame.mask, RETURN_ADDR_REGNUM));
   4817  1.1  mrg 
   4818  1.1  mrg   if (frame_pointer_needed)
   4819  1.1  mrg     slot_address = loongarch_add_offset (scratch, hard_frame_pointer_rtx,
   4820  1.1  mrg 					 -UNITS_PER_WORD);
   4821  1.1  mrg   else
   4822  1.1  mrg     slot_address = loongarch_add_offset (scratch, stack_pointer_rtx,
   4823  1.1  mrg 					 cfun->machine->frame.gp_sp_offset);
   4824  1.1  mrg 
   4825  1.1  mrg   loongarch_emit_move (gen_frame_mem (GET_MODE (address), slot_address),
   4826  1.1  mrg 		       address);
   4827  1.1  mrg }
   4828  1.1  mrg 
   4829  1.1  mrg /* Return true if register REGNO can store a value of mode MODE.
   4830  1.1  mrg    The result of this function is cached in loongarch_hard_regno_mode_ok.  */
   4831  1.1  mrg 
   4832  1.1  mrg static bool
   4833  1.1  mrg loongarch_hard_regno_mode_ok_uncached (unsigned int regno, machine_mode mode)
   4834  1.1  mrg {
   4835  1.1  mrg   unsigned int size;
   4836  1.1  mrg   enum mode_class mclass;
   4837  1.1  mrg 
   4838  1.1  mrg   if (mode == FCCmode)
   4839  1.1  mrg     return FCC_REG_P (regno);
   4840  1.1  mrg 
   4841  1.1  mrg   size = GET_MODE_SIZE (mode);
   4842  1.1  mrg   mclass = GET_MODE_CLASS (mode);
   4843  1.1  mrg 
   4844  1.1  mrg   if (GP_REG_P (regno))
   4845  1.1  mrg     return ((regno - GP_REG_FIRST) & 1) == 0 || size <= UNITS_PER_WORD;
   4846  1.1  mrg 
   4847  1.1  mrg   if (FP_REG_P (regno))
   4848  1.1  mrg     {
   4849  1.1  mrg       if (mclass == MODE_FLOAT
   4850  1.1  mrg 	  || mclass == MODE_COMPLEX_FLOAT
   4851  1.1  mrg 	  || mclass == MODE_VECTOR_FLOAT)
   4852  1.1  mrg 	return size <= UNITS_PER_FPVALUE;
   4853  1.1  mrg 
   4854  1.1  mrg       /* Allow integer modes that fit into a single register.  We need
   4855  1.1  mrg 	 to put integers into FPRs when using instructions like CVT
   4856  1.1  mrg 	 and TRUNC.  There's no point allowing sizes smaller than a word,
   4857  1.1  mrg 	 because the FPU has no appropriate load/store instructions.  */
   4858  1.1  mrg       if (mclass == MODE_INT)
   4859  1.1  mrg 	return size >= MIN_UNITS_PER_WORD && size <= UNITS_PER_FPREG;
   4860  1.1  mrg     }
   4861  1.1  mrg 
   4862  1.1  mrg   return false;
   4863  1.1  mrg }
   4864  1.1  mrg 
   4865  1.1  mrg /* Implement TARGET_HARD_REGNO_MODE_OK.  */
   4866  1.1  mrg 
   4867  1.1  mrg static bool
   4868  1.1  mrg loongarch_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
   4869  1.1  mrg {
   4870  1.1  mrg   return loongarch_hard_regno_mode_ok_p[mode][regno];
   4871  1.1  mrg }
   4872  1.1  mrg 
   4873  1.1  mrg /* Implement TARGET_HARD_REGNO_NREGS.  */
   4874  1.1  mrg 
   4875  1.1  mrg static unsigned int
   4876  1.1  mrg loongarch_hard_regno_nregs (unsigned int regno, machine_mode mode)
   4877  1.1  mrg {
   4878  1.1  mrg   if (FCC_REG_P (regno))
   4879  1.1  mrg     /* The size of FP status registers is always 4, because they only hold
   4880  1.1  mrg        FCCmode values, and FCCmode is always considered to be 4 bytes wide.  */
   4881  1.1  mrg     return (GET_MODE_SIZE (mode) + 3) / 4;
   4882  1.1  mrg 
   4883  1.1  mrg   if (FP_REG_P (regno))
   4884  1.1  mrg     return (GET_MODE_SIZE (mode) + UNITS_PER_FPREG - 1) / UNITS_PER_FPREG;
   4885  1.1  mrg 
   4886  1.1  mrg   /* All other registers are word-sized.  */
   4887  1.1  mrg   return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
   4888  1.1  mrg }
   4889  1.1  mrg 
   4890  1.1  mrg /* Implement CLASS_MAX_NREGS, taking the maximum of the cases
   4891  1.1  mrg    in loongarch_hard_regno_nregs.  */
   4892  1.1  mrg 
   4893  1.1  mrg int
   4894  1.1  mrg loongarch_class_max_nregs (enum reg_class rclass, machine_mode mode)
   4895  1.1  mrg {
   4896  1.1  mrg   int size;
   4897  1.1  mrg   HARD_REG_SET left;
   4898  1.1  mrg 
   4899  1.1  mrg   size = 0x8000;
   4900  1.1  mrg   left = reg_class_contents[rclass];
   4901  1.1  mrg   if (hard_reg_set_intersect_p (left, reg_class_contents[(int) FCC_REGS]))
   4902  1.1  mrg     {
   4903  1.1  mrg       if (loongarch_hard_regno_mode_ok (FCC_REG_FIRST, mode))
   4904  1.1  mrg 	size = MIN (size, 4);
   4905  1.1  mrg 
   4906  1.1  mrg       left &= ~reg_class_contents[FCC_REGS];
   4907  1.1  mrg     }
   4908  1.1  mrg   if (hard_reg_set_intersect_p (left, reg_class_contents[(int) FP_REGS]))
   4909  1.1  mrg     {
   4910  1.1  mrg       if (loongarch_hard_regno_mode_ok (FP_REG_FIRST, mode))
   4911  1.1  mrg 	size = MIN (size, UNITS_PER_FPREG);
   4912  1.1  mrg 
   4913  1.1  mrg       left &= ~reg_class_contents[FP_REGS];
   4914  1.1  mrg     }
   4915  1.1  mrg   if (!hard_reg_set_empty_p (left))
   4916  1.1  mrg     size = MIN (size, UNITS_PER_WORD);
   4917  1.1  mrg   return (GET_MODE_SIZE (mode) + size - 1) / size;
   4918  1.1  mrg }
   4919  1.1  mrg 
   4920  1.1  mrg /* Implement TARGET_CAN_CHANGE_MODE_CLASS.  */
   4921  1.1  mrg 
   4922  1.1  mrg static bool
   4923  1.1  mrg loongarch_can_change_mode_class (machine_mode, machine_mode,
   4924  1.1  mrg 				 reg_class_t rclass)
   4925  1.1  mrg {
   4926  1.1  mrg   return !reg_classes_intersect_p (FP_REGS, rclass);
   4927  1.1  mrg }
   4928  1.1  mrg 
   4929  1.1  mrg /* Return true if moves in mode MODE can use the FPU's fmov.fmt instruction,
   4930  1.1  mrg */
   4931  1.1  mrg 
   4932  1.1  mrg static bool
   4933  1.1  mrg loongarch_mode_ok_for_mov_fmt_p (machine_mode mode)
   4934  1.1  mrg {
   4935  1.1  mrg   switch (mode)
   4936  1.1  mrg     {
   4937  1.1  mrg     case E_FCCmode:
   4938  1.1  mrg     case E_SFmode:
   4939  1.1  mrg       return TARGET_HARD_FLOAT;
   4940  1.1  mrg 
   4941  1.1  mrg     case E_DFmode:
   4942  1.1  mrg       return TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT;
   4943  1.1  mrg 
   4944  1.1  mrg     default:
   4945  1.1  mrg       return 0;
   4946  1.1  mrg     }
   4947  1.1  mrg }
   4948  1.1  mrg 
   4949  1.1  mrg /* Implement TARGET_MODES_TIEABLE_P.  */
   4950  1.1  mrg 
   4951  1.1  mrg static bool
   4952  1.1  mrg loongarch_modes_tieable_p (machine_mode mode1, machine_mode mode2)
   4953  1.1  mrg {
   4954  1.1  mrg   /* FPRs allow no mode punning, so it's not worth tying modes if we'd
   4955  1.1  mrg      prefer to put one of them in FPRs.  */
   4956  1.1  mrg   return (mode1 == mode2
   4957  1.1  mrg 	  || (!loongarch_mode_ok_for_mov_fmt_p (mode1)
   4958  1.1  mrg 	      && !loongarch_mode_ok_for_mov_fmt_p (mode2)));
   4959  1.1  mrg }
   4960  1.1  mrg 
   4961  1.1  mrg /* Implement TARGET_PREFERRED_RELOAD_CLASS.  */
   4962  1.1  mrg 
   4963  1.1  mrg static reg_class_t
   4964  1.1  mrg loongarch_preferred_reload_class (rtx x, reg_class_t rclass)
   4965  1.1  mrg {
   4966  1.1  mrg   if (reg_class_subset_p (FP_REGS, rclass)
   4967  1.1  mrg       && loongarch_mode_ok_for_mov_fmt_p (GET_MODE (x)))
   4968  1.1  mrg     return FP_REGS;
   4969  1.1  mrg 
   4970  1.1  mrg   if (reg_class_subset_p (GR_REGS, rclass))
   4971  1.1  mrg     rclass = GR_REGS;
   4972  1.1  mrg 
   4973  1.1  mrg   return rclass;
   4974  1.1  mrg }
   4975  1.1  mrg 
   4976  1.1  mrg /* RCLASS is a class involved in a REGISTER_MOVE_COST calculation.
   4977  1.1  mrg    Return a "canonical" class to represent it in later calculations.  */
   4978  1.1  mrg 
   4979  1.1  mrg static reg_class_t
   4980  1.1  mrg loongarch_canonicalize_move_class (reg_class_t rclass)
   4981  1.1  mrg {
   4982  1.1  mrg   if (reg_class_subset_p (rclass, GENERAL_REGS))
   4983  1.1  mrg     rclass = GENERAL_REGS;
   4984  1.1  mrg 
   4985  1.1  mrg   return rclass;
   4986  1.1  mrg }
   4987  1.1  mrg 
   4988  1.1  mrg /* Return the cost of moving a value from a register of class FROM to a GPR.
   4989  1.1  mrg    Return 0 for classes that are unions of other classes handled by this
   4990  1.1  mrg    function.  */
   4991  1.1  mrg 
   4992  1.1  mrg static int
   4993  1.1  mrg loongarch_move_to_gpr_cost (reg_class_t from)
   4994  1.1  mrg {
   4995  1.1  mrg   switch (from)
   4996  1.1  mrg     {
   4997  1.1  mrg     case GENERAL_REGS:
   4998  1.1  mrg       /* MOVE macro.  */
   4999  1.1  mrg       return 2;
   5000  1.1  mrg 
   5001  1.1  mrg     case FP_REGS:
   5002  1.1  mrg       /* MOVFR2GR, etc.  */
   5003  1.1  mrg       return 4;
   5004  1.1  mrg 
   5005  1.1  mrg     default:
   5006  1.1  mrg       return 0;
   5007  1.1  mrg     }
   5008  1.1  mrg }
   5009  1.1  mrg 
   5010  1.1  mrg /* Return the cost of moving a value from a GPR to a register of class TO.
   5011  1.1  mrg    Return 0 for classes that are unions of other classes handled by this
   5012  1.1  mrg    function.  */
   5013  1.1  mrg 
   5014  1.1  mrg static int
   5015  1.1  mrg loongarch_move_from_gpr_cost (reg_class_t to)
   5016  1.1  mrg {
   5017  1.1  mrg   switch (to)
   5018  1.1  mrg     {
   5019  1.1  mrg     case GENERAL_REGS:
   5020  1.1  mrg       /*MOVE macro.  */
   5021  1.1  mrg       return 2;
   5022  1.1  mrg 
   5023  1.1  mrg     case FP_REGS:
   5024  1.1  mrg       /* MOVGR2FR, etc.  */
   5025  1.1  mrg       return 4;
   5026  1.1  mrg 
   5027  1.1  mrg     default:
   5028  1.1  mrg       return 0;
   5029  1.1  mrg     }
   5030  1.1  mrg }
   5031  1.1  mrg 
   5032  1.1  mrg /* Implement TARGET_REGISTER_MOVE_COST.  Return 0 for classes that are the
   5033  1.1  mrg    maximum of the move costs for subclasses; regclass will work out
   5034  1.1  mrg    the maximum for us.  */
   5035  1.1  mrg 
   5036  1.1  mrg static int
   5037  1.1  mrg loongarch_register_move_cost (machine_mode mode, reg_class_t from,
   5038  1.1  mrg 			      reg_class_t to)
   5039  1.1  mrg {
   5040  1.1  mrg   reg_class_t dregs;
   5041  1.1  mrg   int cost1, cost2;
   5042  1.1  mrg 
   5043  1.1  mrg   from = loongarch_canonicalize_move_class (from);
   5044  1.1  mrg   to = loongarch_canonicalize_move_class (to);
   5045  1.1  mrg 
   5046  1.1  mrg   /* Handle moves that can be done without using general-purpose registers.  */
   5047  1.1  mrg   if (from == FP_REGS)
   5048  1.1  mrg     {
   5049  1.1  mrg       if (to == FP_REGS && loongarch_mode_ok_for_mov_fmt_p (mode))
   5050  1.1  mrg 	/* FMOV.FMT.  */
   5051  1.1  mrg 	return 4;
   5052  1.1  mrg     }
   5053  1.1  mrg 
   5054  1.1  mrg   /* Handle cases in which only one class deviates from the ideal.  */
   5055  1.1  mrg   dregs = GENERAL_REGS;
   5056  1.1  mrg   if (from == dregs)
   5057  1.1  mrg     return loongarch_move_from_gpr_cost (to);
   5058  1.1  mrg   if (to == dregs)
   5059  1.1  mrg     return loongarch_move_to_gpr_cost (from);
   5060  1.1  mrg 
   5061  1.1  mrg   /* Handles cases that require a GPR temporary.  */
   5062  1.1  mrg   cost1 = loongarch_move_to_gpr_cost (from);
   5063  1.1  mrg   if (cost1 != 0)
   5064  1.1  mrg     {
   5065  1.1  mrg       cost2 = loongarch_move_from_gpr_cost (to);
   5066  1.1  mrg       if (cost2 != 0)
   5067  1.1  mrg 	return cost1 + cost2;
   5068  1.1  mrg     }
   5069  1.1  mrg 
   5070  1.1  mrg   return 0;
   5071  1.1  mrg }
   5072  1.1  mrg 
   5073  1.1  mrg /* Implement TARGET_MEMORY_MOVE_COST.  */
   5074  1.1  mrg 
   5075  1.1  mrg static int
   5076  1.1  mrg loongarch_memory_move_cost (machine_mode mode, reg_class_t rclass, bool in)
   5077  1.1  mrg {
   5078  1.1  mrg   return (loongarch_cost->memory_latency
   5079  1.1  mrg 	  + memory_move_secondary_cost (mode, rclass, in));
   5080  1.1  mrg }
   5081  1.1  mrg 
   5082  1.1  mrg /* Return the register class required for a secondary register when
   5083  1.1  mrg    copying between one of the registers in RCLASS and value X, which
   5084  1.1  mrg    has mode MODE.  X is the source of the move if IN_P, otherwise it
   5085  1.1  mrg    is the destination.  Return NO_REGS if no secondary register is
   5086  1.1  mrg    needed.  */
   5087  1.1  mrg 
   5088  1.1  mrg static reg_class_t
   5089  1.1  mrg loongarch_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x,
   5090  1.1  mrg 			    reg_class_t rclass, machine_mode mode,
   5091  1.1  mrg 			    secondary_reload_info *sri ATTRIBUTE_UNUSED)
   5092  1.1  mrg {
   5093  1.1  mrg   int regno;
   5094  1.1  mrg 
   5095  1.1  mrg   regno = true_regnum (x);
   5096  1.1  mrg 
   5097  1.1  mrg   if (reg_class_subset_p (rclass, FP_REGS))
   5098  1.1  mrg     {
   5099  1.1  mrg       if (regno < 0
   5100  1.1  mrg 	  || (MEM_P (x)
   5101  1.1  mrg 	      && (GET_MODE_SIZE (mode) == 4 || GET_MODE_SIZE (mode) == 8)))
   5102  1.1  mrg 	/* In this case we can use fld.s, fst.s, fld.d or fst.d.  */
   5103  1.1  mrg 	return NO_REGS;
   5104  1.1  mrg 
   5105  1.1  mrg       if (GP_REG_P (regno) || x == CONST0_RTX (mode))
   5106  1.1  mrg 	/* In this case we can use movgr2fr.s, movfr2gr.s, movgr2fr.d or
   5107  1.1  mrg 	 * movfr2gr.d.  */
   5108  1.1  mrg 	return NO_REGS;
   5109  1.1  mrg 
   5110  1.1  mrg       if (CONSTANT_P (x) && !targetm.cannot_force_const_mem (mode, x))
   5111  1.1  mrg 	/* We can force the constant to memory and use fld.s
   5112  1.1  mrg 	   and fld.d.  As above, we will use pairs of lwc1s if
   5113  1.1  mrg 	   ldc1 is not supported.  */
   5114  1.1  mrg 	return NO_REGS;
   5115  1.1  mrg 
   5116  1.1  mrg       if (FP_REG_P (regno) && loongarch_mode_ok_for_mov_fmt_p (mode))
   5117  1.1  mrg 	/* In this case we can use fmov.{s/d}.  */
   5118  1.1  mrg 	return NO_REGS;
   5119  1.1  mrg 
   5120  1.1  mrg       /* Otherwise, we need to reload through an integer register.  */
   5121  1.1  mrg       return GR_REGS;
   5122  1.1  mrg     }
   5123  1.1  mrg   if (FP_REG_P (regno))
   5124  1.1  mrg     return reg_class_subset_p (rclass, GR_REGS) ? NO_REGS : GR_REGS;
   5125  1.1  mrg 
   5126  1.1  mrg   return NO_REGS;
   5127  1.1  mrg }
   5128  1.1  mrg 
   5129  1.1  mrg /* Implement TARGET_VALID_POINTER_MODE.  */
   5130  1.1  mrg 
   5131  1.1  mrg static bool
   5132  1.1  mrg loongarch_valid_pointer_mode (scalar_int_mode mode)
   5133  1.1  mrg {
   5134  1.1  mrg   return mode == SImode || (TARGET_64BIT && mode == DImode);
   5135  1.1  mrg }
   5136  1.1  mrg 
   5137  1.1  mrg /* Implement TARGET_SCALAR_MODE_SUPPORTED_P.  */
   5138  1.1  mrg 
   5139  1.1  mrg static bool
   5140  1.1  mrg loongarch_scalar_mode_supported_p (scalar_mode mode)
   5141  1.1  mrg {
   5142  1.1  mrg   if (ALL_FIXED_POINT_MODE_P (mode)
   5143  1.1  mrg       && GET_MODE_PRECISION (mode) <= 2 * BITS_PER_WORD)
   5144  1.1  mrg     return true;
   5145  1.1  mrg 
   5146  1.1  mrg   return default_scalar_mode_supported_p (mode);
   5147  1.1  mrg }
   5148  1.1  mrg 
   5149  1.1  mrg /* Return the assembly code for INSN, which has the operands given by
   5150  1.1  mrg    OPERANDS, and which branches to OPERANDS[0] if some condition is true.
   5151  1.1  mrg    BRANCH_IF_TRUE is the asm template that should be used if OPERANDS[0]
   5152  1.1  mrg    is in range of a direct branch.  BRANCH_IF_FALSE is an inverted
   5153  1.1  mrg    version of BRANCH_IF_TRUE.  */
   5154  1.1  mrg 
   5155  1.1  mrg const char *
   5156  1.1  mrg loongarch_output_conditional_branch (rtx_insn *insn, rtx *operands,
   5157  1.1  mrg 				     const char *branch_if_true,
   5158  1.1  mrg 				     const char *branch_if_false)
   5159  1.1  mrg {
   5160  1.1  mrg   unsigned int length;
   5161  1.1  mrg   rtx taken;
   5162  1.1  mrg 
   5163  1.1  mrg   gcc_assert (LABEL_P (operands[0]));
   5164  1.1  mrg 
   5165  1.1  mrg   length = get_attr_length (insn);
   5166  1.1  mrg   if (length <= 4)
   5167  1.1  mrg     {
   5168  1.1  mrg       return branch_if_true;
   5169  1.1  mrg     }
   5170  1.1  mrg 
   5171  1.1  mrg   /* Generate a reversed branch around a direct jump.  */
   5172  1.1  mrg   rtx_code_label *not_taken = gen_label_rtx ();
   5173  1.1  mrg   taken = operands[0];
   5174  1.1  mrg 
   5175  1.1  mrg   /* Generate the reversed branch to NOT_TAKEN.  */
   5176  1.1  mrg   operands[0] = not_taken;
   5177  1.1  mrg   output_asm_insn (branch_if_false, operands);
   5178  1.1  mrg 
   5179  1.1  mrg   output_asm_insn ("b\t%0", &taken);
   5180  1.1  mrg 
   5181  1.1  mrg   /* Output NOT_TAKEN.  */
   5182  1.1  mrg   targetm.asm_out.internal_label (asm_out_file, "L",
   5183  1.1  mrg 				  CODE_LABEL_NUMBER (not_taken));
   5184  1.1  mrg   return "";
   5185  1.1  mrg }
   5186  1.1  mrg 
   5187  1.1  mrg /* Return the assembly code for INSN, which branches to OPERANDS[0]
   5188  1.1  mrg    if some equality condition is true.  The condition is given by
   5189  1.1  mrg    OPERANDS[1] if !INVERTED_P, otherwise it is the inverse of
   5190  1.1  mrg    OPERANDS[1].  OPERANDS[2] is the comparison's first operand;
   5191  1.1  mrg    OPERANDS[3] is the second operand and may be zero or a register.  */
   5192  1.1  mrg 
   5193  1.1  mrg const char *
   5194  1.1  mrg loongarch_output_equal_conditional_branch (rtx_insn *insn, rtx *operands,
   5195  1.1  mrg 					   bool inverted_p)
   5196  1.1  mrg {
   5197  1.1  mrg   const char *branch[2];
   5198  1.1  mrg   if (operands[3] == const0_rtx)
   5199  1.1  mrg     {
   5200  1.1  mrg       branch[!inverted_p] = LARCH_BRANCH ("b%C1z", "%2,%0");
   5201  1.1  mrg       branch[inverted_p] = LARCH_BRANCH ("b%N1z", "%2,%0");
   5202  1.1  mrg     }
   5203  1.1  mrg   else
   5204  1.1  mrg     {
   5205  1.1  mrg       branch[!inverted_p] = LARCH_BRANCH ("b%C1", "%2,%z3,%0");
   5206  1.1  mrg       branch[inverted_p] = LARCH_BRANCH ("b%N1", "%2,%z3,%0");
   5207  1.1  mrg     }
   5208  1.1  mrg 
   5209  1.1  mrg   return loongarch_output_conditional_branch (insn, operands, branch[1],
   5210  1.1  mrg 					      branch[0]);
   5211  1.1  mrg }
   5212  1.1  mrg 
   5213  1.1  mrg /* Return the assembly code for INSN, which branches to OPERANDS[0]
   5214  1.1  mrg    if some ordering condition is true.  The condition is given by
   5215  1.1  mrg    OPERANDS[1] if !INVERTED_P, otherwise it is the inverse of
   5216  1.1  mrg    OPERANDS[1].  OPERANDS[2] is the comparison's first operand;
   5217  1.1  mrg    OPERANDS[3] is the second operand and may be zero or a register.  */
   5218  1.1  mrg 
   5219  1.1  mrg const char *
   5220  1.1  mrg loongarch_output_order_conditional_branch (rtx_insn *insn, rtx *operands,
   5221  1.1  mrg 					   bool inverted_p)
   5222  1.1  mrg {
   5223  1.1  mrg   const char *branch[2];
   5224  1.1  mrg 
   5225  1.1  mrg   /* Make BRANCH[1] branch to OPERANDS[0] when the condition is true.
   5226  1.1  mrg      Make BRANCH[0] branch on the inverse condition.  */
   5227  1.1  mrg   if (operands[3] != const0_rtx)
   5228  1.1  mrg     {
   5229  1.1  mrg       /* Handle degenerate cases that should not, but do, occur.  */
   5230  1.1  mrg       if (REGNO (operands[2]) == REGNO (operands[3]))
   5231  1.1  mrg 	{
   5232  1.1  mrg 	  switch (GET_CODE (operands[1]))
   5233  1.1  mrg 	    {
   5234  1.1  mrg 	    case LT:
   5235  1.1  mrg 	    case LTU:
   5236  1.1  mrg 	    case GT:
   5237  1.1  mrg 	    case GTU:
   5238  1.1  mrg 	      inverted_p = !inverted_p;
   5239  1.1  mrg 	      /* Fall through.  */
   5240  1.1  mrg 	    case LE:
   5241  1.1  mrg 	    case LEU:
   5242  1.1  mrg 	    case GE:
   5243  1.1  mrg 	    case GEU:
   5244  1.1  mrg 	      branch[!inverted_p] = LARCH_BRANCH ("b", "%0");
   5245  1.1  mrg 	      branch[inverted_p] = "\t# branch never";
   5246  1.1  mrg 	      break;
   5247  1.1  mrg 	    default:
   5248  1.1  mrg 	      gcc_unreachable ();
   5249  1.1  mrg 	    }
   5250  1.1  mrg 	}
   5251  1.1  mrg       else
   5252  1.1  mrg 	{
   5253  1.1  mrg 	  switch (GET_CODE (operands[1]))
   5254  1.1  mrg 	    {
   5255  1.1  mrg 	    case LE:
   5256  1.1  mrg 	    case LEU:
   5257  1.1  mrg 	    case GT:
   5258  1.1  mrg 	    case GTU:
   5259  1.1  mrg 	    case LT:
   5260  1.1  mrg 	    case LTU:
   5261  1.1  mrg 	    case GE:
   5262  1.1  mrg 	    case GEU:
   5263  1.1  mrg 	      branch[!inverted_p] = LARCH_BRANCH ("b%C1", "%2,%3,%0");
   5264  1.1  mrg 	      branch[inverted_p] = LARCH_BRANCH ("b%N1", "%2,%3,%0");
   5265  1.1  mrg 	      break;
   5266  1.1  mrg 	    default:
   5267  1.1  mrg 	      gcc_unreachable ();
   5268  1.1  mrg 	    }
   5269  1.1  mrg 	}
   5270  1.1  mrg     }
   5271  1.1  mrg   else
   5272  1.1  mrg     {
   5273  1.1  mrg       switch (GET_CODE (operands[1]))
   5274  1.1  mrg 	{
   5275  1.1  mrg 	  /* These cases are equivalent to comparisons against zero.  */
   5276  1.1  mrg 	case LEU:
   5277  1.1  mrg 	case GTU:
   5278  1.1  mrg 	case LTU:
   5279  1.1  mrg 	case GEU:
   5280  1.1  mrg 	case LE:
   5281  1.1  mrg 	case GT:
   5282  1.1  mrg 	case LT:
   5283  1.1  mrg 	case GE:
   5284  1.1  mrg 	  branch[!inverted_p] = LARCH_BRANCH ("b%C1", "%2,$r0,%0");
   5285  1.1  mrg 	  branch[inverted_p] = LARCH_BRANCH ("b%N1", "%2,$r0,%0");
   5286  1.1  mrg 	  break;
   5287  1.1  mrg 	default:
   5288  1.1  mrg 	  gcc_unreachable ();
   5289  1.1  mrg 	}
   5290  1.1  mrg     }
   5291  1.1  mrg   return loongarch_output_conditional_branch (insn, operands, branch[1],
   5292  1.1  mrg 					      branch[0]);
   5293  1.1  mrg }
   5294  1.1  mrg 
   5295  1.1  mrg /* Return the assembly code for DIV.{W/D} instruction DIVISION, which has
   5296  1.1  mrg    the operands given by OPERANDS.  Add in a divide-by-zero check if needed.
   5297  1.1  mrg    */
   5298  1.1  mrg 
   5299  1.1  mrg const char *
   5300  1.1  mrg loongarch_output_division (const char *division, rtx *operands)
   5301  1.1  mrg {
   5302  1.1  mrg   const char *s;
   5303  1.1  mrg 
   5304  1.1  mrg   s = division;
   5305  1.1  mrg   if (loongarch_check_zero_div_p ())
   5306  1.1  mrg     {
   5307  1.1  mrg       output_asm_insn (s, operands);
   5308  1.1  mrg       s = "bne\t%2,%.,1f\n\tbreak\t7\n1:";
   5309  1.1  mrg     }
   5310  1.1  mrg   return s;
   5311  1.1  mrg }
   5312  1.1  mrg 
   5313  1.1  mrg /* Implement TARGET_SCHED_ADJUST_COST.  We assume that anti and output
   5314  1.1  mrg    dependencies have no cost.  */
   5315  1.1  mrg 
   5316  1.1  mrg static int
   5317  1.1  mrg loongarch_adjust_cost (rtx_insn *, int dep_type, rtx_insn *, int cost,
   5318  1.1  mrg 		       unsigned int)
   5319  1.1  mrg {
   5320  1.1  mrg   if (dep_type != 0 && (dep_type != REG_DEP_OUTPUT))
   5321  1.1  mrg     return 0;
   5322  1.1  mrg   return cost;
   5323  1.1  mrg }
   5324  1.1  mrg 
   5325  1.1  mrg /* Return the number of instructions that can be issued per cycle.  */
   5326  1.1  mrg 
   5327  1.1  mrg static int
   5328  1.1  mrg loongarch_issue_rate (void)
   5329  1.1  mrg {
   5330  1.1  mrg   if ((unsigned long) LARCH_ACTUAL_TUNE < N_TUNE_TYPES)
   5331  1.1  mrg     return loongarch_cpu_issue_rate[LARCH_ACTUAL_TUNE];
   5332  1.1  mrg   else
   5333  1.1  mrg     return 1;
   5334  1.1  mrg }
   5335  1.1  mrg 
   5336  1.1  mrg /* Implement TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD.  This should
   5337  1.1  mrg    be as wide as the scheduling freedom in the DFA.  */
   5338  1.1  mrg 
   5339  1.1  mrg static int
   5340  1.1  mrg loongarch_multipass_dfa_lookahead (void)
   5341  1.1  mrg {
   5342  1.1  mrg   if ((unsigned long) LARCH_ACTUAL_TUNE < N_ARCH_TYPES)
   5343  1.1  mrg     return loongarch_cpu_multipass_dfa_lookahead[LARCH_ACTUAL_TUNE];
   5344  1.1  mrg   else
   5345  1.1  mrg     return 0;
   5346  1.1  mrg }
   5347  1.1  mrg 
   5348  1.1  mrg /* Implement TARGET_SCHED_REORDER.  */
   5349  1.1  mrg 
   5350  1.1  mrg static int
   5351  1.1  mrg loongarch_sched_reorder (FILE *file ATTRIBUTE_UNUSED,
   5352  1.1  mrg 			 int verbose ATTRIBUTE_UNUSED,
   5353  1.1  mrg 			 rtx_insn **ready ATTRIBUTE_UNUSED,
   5354  1.1  mrg 			 int *nreadyp ATTRIBUTE_UNUSED,
   5355  1.1  mrg 			 int cycle ATTRIBUTE_UNUSED)
   5356  1.1  mrg {
   5357  1.1  mrg   return loongarch_issue_rate ();
   5358  1.1  mrg }
   5359  1.1  mrg 
   5360  1.1  mrg /* Implement TARGET_SCHED_REORDER2.  */
   5361  1.1  mrg 
   5362  1.1  mrg static int
   5363  1.1  mrg loongarch_sched_reorder2 (FILE *file ATTRIBUTE_UNUSED,
   5364  1.1  mrg 			  int verbose ATTRIBUTE_UNUSED,
   5365  1.1  mrg 			  rtx_insn **ready ATTRIBUTE_UNUSED,
   5366  1.1  mrg 			  int *nreadyp ATTRIBUTE_UNUSED,
   5367  1.1  mrg 			  int cycle ATTRIBUTE_UNUSED)
   5368  1.1  mrg {
   5369  1.1  mrg   return cached_can_issue_more;
   5370  1.1  mrg }
   5371  1.1  mrg 
   5372  1.1  mrg /* Implement TARGET_SCHED_INIT.  */
   5373  1.1  mrg 
   5374  1.1  mrg static void
   5375  1.1  mrg loongarch_sched_init (FILE *file ATTRIBUTE_UNUSED,
   5376  1.1  mrg 		      int verbose ATTRIBUTE_UNUSED,
   5377  1.1  mrg 		      int max_ready ATTRIBUTE_UNUSED)
   5378  1.1  mrg {}
   5379  1.1  mrg 
   5380  1.1  mrg /* Implement TARGET_SCHED_VARIABLE_ISSUE.  */
   5381  1.1  mrg 
   5382  1.1  mrg static int
   5383  1.1  mrg loongarch_variable_issue (FILE *file ATTRIBUTE_UNUSED,
   5384  1.1  mrg 			  int verbose ATTRIBUTE_UNUSED, rtx_insn *insn,
   5385  1.1  mrg 			  int more)
   5386  1.1  mrg {
   5387  1.1  mrg   /* Ignore USEs and CLOBBERs; don't count them against the issue rate.  */
   5388  1.1  mrg   if (USEFUL_INSN_P (insn))
   5389  1.1  mrg     {
   5390  1.1  mrg       if (get_attr_type (insn) != TYPE_GHOST)
   5391  1.1  mrg 	more--;
   5392  1.1  mrg     }
   5393  1.1  mrg 
   5394  1.1  mrg   /* Instructions of type 'multi' should all be split before
   5395  1.1  mrg      the second scheduling pass.  */
   5396  1.1  mrg   gcc_assert (!reload_completed
   5397  1.1  mrg 	      || recog_memoized (insn) < 0
   5398  1.1  mrg 	      || get_attr_type (insn) != TYPE_MULTI);
   5399  1.1  mrg 
   5400  1.1  mrg   cached_can_issue_more = more;
   5401  1.1  mrg   return more;
   5402  1.1  mrg }
   5403  1.1  mrg 
   5404  1.1  mrg /* Given that we have an rtx of the form (prefetch ... WRITE LOCALITY),
   5405  1.1  mrg    return the first operand of the associated PREF or PREFX insn.  */
   5406  1.1  mrg 
   5407  1.1  mrg rtx
   5408  1.1  mrg loongarch_prefetch_cookie (rtx write, rtx locality)
   5409  1.1  mrg {
   5410  1.1  mrg   /* store_streamed / load_streamed.  */
   5411  1.1  mrg   if (INTVAL (locality) <= 0)
   5412  1.1  mrg     return GEN_INT (INTVAL (write) + 4);
   5413  1.1  mrg 
   5414  1.1  mrg   /* store / load.  */
   5415  1.1  mrg   if (INTVAL (locality) <= 2)
   5416  1.1  mrg     return write;
   5417  1.1  mrg 
   5418  1.1  mrg   /* store_retained / load_retained.  */
   5419  1.1  mrg   return GEN_INT (INTVAL (write) + 6);
   5420  1.1  mrg }
   5421  1.1  mrg 
   5422  1.1  mrg /* Implement TARGET_ASM_OUTPUT_MI_THUNK.  Generate rtl rather than asm text
   5423  1.1  mrg    in order to avoid duplicating too much logic from elsewhere.  */
   5424  1.1  mrg 
   5425  1.1  mrg static void
   5426  1.1  mrg loongarch_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
   5427  1.1  mrg 			   HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
   5428  1.1  mrg 			   tree function)
   5429  1.1  mrg {
   5430  1.1  mrg   const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
   5431  1.1  mrg   rtx this_rtx, temp1, temp2, fnaddr;
   5432  1.1  mrg   rtx_insn *insn;
   5433  1.1  mrg   bool use_sibcall_p;
   5434  1.1  mrg 
   5435  1.1  mrg   /* Pretend to be a post-reload pass while generating rtl.  */
   5436  1.1  mrg   reload_completed = 1;
   5437  1.1  mrg 
   5438  1.1  mrg   /* Mark the end of the (empty) prologue.  */
   5439  1.1  mrg   emit_note (NOTE_INSN_PROLOGUE_END);
   5440  1.1  mrg 
   5441  1.1  mrg   /* Determine if we can use a sibcall to call FUNCTION directly.  */
   5442  1.1  mrg   fnaddr = XEXP (DECL_RTL (function), 0);
   5443  1.1  mrg   use_sibcall_p = const_call_insn_operand (fnaddr, Pmode);
   5444  1.1  mrg 
   5445  1.1  mrg   /* We need two temporary registers in some cases.  */
   5446  1.1  mrg   temp1 = gen_rtx_REG (Pmode, 12);
   5447  1.1  mrg   temp2 = gen_rtx_REG (Pmode, 13);
   5448  1.1  mrg 
   5449  1.1  mrg   /* Find out which register contains the "this" pointer.  */
   5450  1.1  mrg   if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
   5451  1.1  mrg     this_rtx = gen_rtx_REG (Pmode, GP_ARG_FIRST + 1);
   5452  1.1  mrg   else
   5453  1.1  mrg     this_rtx = gen_rtx_REG (Pmode, GP_ARG_FIRST);
   5454  1.1  mrg 
   5455  1.1  mrg   /* Add DELTA to THIS_RTX.  */
   5456  1.1  mrg   if (delta != 0)
   5457  1.1  mrg     {
   5458  1.1  mrg       rtx offset = GEN_INT (delta);
   5459  1.1  mrg       if (!IMM12_OPERAND (delta))
   5460  1.1  mrg 	{
   5461  1.1  mrg 	  loongarch_emit_move (temp1, offset);
   5462  1.1  mrg 	  offset = temp1;
   5463  1.1  mrg 	}
   5464  1.1  mrg       emit_insn (gen_add3_insn (this_rtx, this_rtx, offset));
   5465  1.1  mrg     }
   5466  1.1  mrg 
   5467  1.1  mrg   /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX.  */
   5468  1.1  mrg   if (vcall_offset != 0)
   5469  1.1  mrg     {
   5470  1.1  mrg       rtx addr;
   5471  1.1  mrg 
   5472  1.1  mrg       /* Set TEMP1 to *THIS_RTX.  */
   5473  1.1  mrg       loongarch_emit_move (temp1, gen_rtx_MEM (Pmode, this_rtx));
   5474  1.1  mrg 
   5475  1.1  mrg       /* Set ADDR to a legitimate address for *THIS_RTX + VCALL_OFFSET.  */
   5476  1.1  mrg       addr = loongarch_add_offset (temp2, temp1, vcall_offset);
   5477  1.1  mrg 
   5478  1.1  mrg       /* Load the offset and add it to THIS_RTX.  */
   5479  1.1  mrg       loongarch_emit_move (temp1, gen_rtx_MEM (Pmode, addr));
   5480  1.1  mrg       emit_insn (gen_add3_insn (this_rtx, this_rtx, temp1));
   5481  1.1  mrg     }
   5482  1.1  mrg 
   5483  1.1  mrg   /* Jump to the target function.  Use a sibcall if direct jumps are
   5484  1.1  mrg      allowed, otherwise load the address into a register first.  */
   5485  1.1  mrg   if (use_sibcall_p)
   5486  1.1  mrg     {
   5487  1.1  mrg       insn = emit_call_insn (gen_sibcall_internal (fnaddr, const0_rtx));
   5488  1.1  mrg       SIBLING_CALL_P (insn) = 1;
   5489  1.1  mrg     }
   5490  1.1  mrg   else
   5491  1.1  mrg     {
   5492  1.1  mrg       loongarch_emit_move (temp1, fnaddr);
   5493  1.1  mrg       emit_jump_insn (gen_indirect_jump (temp1));
   5494  1.1  mrg     }
   5495  1.1  mrg 
   5496  1.1  mrg   /* Run just enough of rest_of_compilation.  This sequence was
   5497  1.1  mrg      "borrowed" from alpha.c.  */
   5498  1.1  mrg   insn = get_insns ();
   5499  1.1  mrg   split_all_insns_noflow ();
   5500  1.1  mrg   shorten_branches (insn);
   5501  1.1  mrg   assemble_start_function (thunk_fndecl, fnname);
   5502  1.1  mrg   final_start_function (insn, file, 1);
   5503  1.1  mrg   final (insn, file, 1);
   5504  1.1  mrg   final_end_function ();
   5505  1.1  mrg   assemble_end_function (thunk_fndecl, fnname);
   5506  1.1  mrg 
   5507  1.1  mrg   /* Stop pretending to be a post-reload pass.  */
   5508  1.1  mrg   reload_completed = 0;
   5509  1.1  mrg }
   5510  1.1  mrg 
   5511  1.1  mrg /* Allocate a chunk of memory for per-function machine-dependent data.  */
   5512  1.1  mrg 
   5513  1.1  mrg static struct machine_function *
   5514  1.1  mrg loongarch_init_machine_status (void)
   5515  1.1  mrg {
   5516  1.1  mrg   return ggc_cleared_alloc<machine_function> ();
   5517  1.1  mrg }
   5518  1.1  mrg 
   5519  1.1  mrg static void
   5520  1.1  mrg loongarch_option_override_internal (struct gcc_options *opts,
   5521  1.1  mrg 				    struct gcc_options *opts_set)
   5522  1.1  mrg {
   5523  1.1  mrg   int i, regno, mode;
   5524  1.1  mrg 
   5525  1.1  mrg   if (flag_pic)
   5526  1.1  mrg     g_switch_value = 0;
   5527  1.1  mrg 
   5528  1.1  mrg   /* Handle target-specific options: compute defaults/conflicts etc.  */
   5529  1.1  mrg   loongarch_config_target (&la_target, la_opt_switches,
   5530  1.1  mrg 			   la_opt_cpu_arch, la_opt_cpu_tune, la_opt_fpu,
   5531  1.1  mrg 			   la_opt_abi_base, la_opt_abi_ext, la_opt_cmodel, 0);
   5532  1.1  mrg 
   5533  1.1  mrg   loongarch_update_gcc_opt_status (&la_target, opts, opts_set);
   5534  1.1  mrg 
   5535  1.1  mrg   if (TARGET_ABI_LP64)
   5536  1.1  mrg     flag_pcc_struct_return = 0;
   5537  1.1  mrg 
   5538  1.1  mrg   /* Decide which rtx_costs structure to use.  */
   5539  1.1  mrg   if (optimize_size)
   5540  1.1  mrg     loongarch_cost = &loongarch_rtx_cost_optimize_size;
   5541  1.1  mrg   else
   5542  1.1  mrg     loongarch_cost = &loongarch_cpu_rtx_cost_data[LARCH_ACTUAL_TUNE];
   5543  1.1  mrg 
   5544  1.1  mrg   /* If the user hasn't specified a branch cost, use the processor's
   5545  1.1  mrg      default.  */
   5546  1.1  mrg   if (loongarch_branch_cost == 0)
   5547  1.1  mrg     loongarch_branch_cost = loongarch_cost->branch_cost;
   5548  1.1  mrg 
   5549  1.1  mrg 
   5550  1.1  mrg   switch (la_target.cmodel)
   5551  1.1  mrg     {
   5552  1.1  mrg       case CMODEL_TINY_STATIC:
   5553  1.1  mrg       case CMODEL_EXTREME:
   5554  1.1  mrg 	if (opts->x_flag_plt)
   5555  1.1  mrg 	  error ("code model %qs and %qs not support %s mode",
   5556  1.1  mrg 		 "tiny-static", "extreme", "plt");
   5557  1.1  mrg 	break;
   5558  1.1  mrg 
   5559  1.1  mrg       case CMODEL_NORMAL:
   5560  1.1  mrg       case CMODEL_TINY:
   5561  1.1  mrg       case CMODEL_LARGE:
   5562  1.1  mrg 	break;
   5563  1.1  mrg 
   5564  1.1  mrg       default:
   5565  1.1  mrg 	gcc_unreachable ();
   5566  1.1  mrg     }
   5567  1.1  mrg 
   5568  1.1  mrg   loongarch_init_print_operand_punct ();
   5569  1.1  mrg 
   5570  1.1  mrg   /* Set up array to map GCC register number to debug register number.
   5571  1.1  mrg      Ignore the special purpose register numbers.  */
   5572  1.1  mrg 
   5573  1.1  mrg   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
   5574  1.1  mrg     {
   5575  1.1  mrg       if (GP_REG_P (i) || FP_REG_P (i))
   5576  1.1  mrg 	loongarch_dwarf_regno[i] = i;
   5577  1.1  mrg       else
   5578  1.1  mrg 	loongarch_dwarf_regno[i] = INVALID_REGNUM;
   5579  1.1  mrg     }
   5580  1.1  mrg 
   5581  1.1  mrg   /* Set up loongarch_hard_regno_mode_ok.  */
   5582  1.1  mrg   for (mode = 0; mode < MAX_MACHINE_MODE; mode++)
   5583  1.1  mrg     for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
   5584  1.1  mrg       loongarch_hard_regno_mode_ok_p[mode][regno]
   5585  1.1  mrg 	= loongarch_hard_regno_mode_ok_uncached (regno, (machine_mode) mode);
   5586  1.1  mrg 
   5587  1.1  mrg   /* Function to allocate machine-dependent function status.  */
   5588  1.1  mrg   init_machine_status = &loongarch_init_machine_status;
   5589  1.1  mrg }
   5590  1.1  mrg 
   5591  1.1  mrg 
   5592  1.1  mrg /* Implement TARGET_OPTION_OVERRIDE.  */
   5593  1.1  mrg 
   5594  1.1  mrg static void
   5595  1.1  mrg loongarch_option_override (void)
   5596  1.1  mrg {
   5597  1.1  mrg   loongarch_option_override_internal (&global_options, &global_options_set);
   5598  1.1  mrg }
   5599  1.1  mrg 
   5600  1.1  mrg /* Implement TARGET_OPTION_SAVE.  */
   5601  1.1  mrg static void
   5602  1.1  mrg loongarch_option_save (struct cl_target_option *,
   5603  1.1  mrg 		       struct gcc_options *opts,
   5604  1.1  mrg 		       struct gcc_options *opts_set)
   5605  1.1  mrg {
   5606  1.1  mrg   loongarch_update_gcc_opt_status (&la_target, opts, opts_set);
   5607  1.1  mrg }
   5608  1.1  mrg 
   5609  1.1  mrg /* Implement TARGET_OPTION_RESTORE.  */
   5610  1.1  mrg static void
   5611  1.1  mrg loongarch_option_restore (struct gcc_options *,
   5612  1.1  mrg 			  struct gcc_options *,
   5613  1.1  mrg 			  struct cl_target_option *ptr)
   5614  1.1  mrg {
   5615  1.1  mrg   la_target.cpu_arch = ptr->x_la_opt_cpu_arch;
   5616  1.1  mrg   la_target.cpu_tune = ptr->x_la_opt_cpu_tune;
   5617  1.1  mrg 
   5618  1.1  mrg   la_target.isa.fpu = ptr->x_la_opt_fpu;
   5619  1.1  mrg 
   5620  1.1  mrg   la_target.cmodel = ptr->x_la_opt_cmodel;
   5621  1.1  mrg }
   5622  1.1  mrg 
   5623  1.1  mrg /* Implement TARGET_CONDITIONAL_REGISTER_USAGE.  */
   5624  1.1  mrg 
   5625  1.1  mrg static void
   5626  1.1  mrg loongarch_conditional_register_usage (void)
   5627  1.1  mrg {
   5628  1.1  mrg   if (!TARGET_HARD_FLOAT)
   5629  1.1  mrg     accessible_reg_set &= ~(reg_class_contents[FP_REGS]
   5630  1.1  mrg 			    | reg_class_contents[FCC_REGS]);
   5631  1.1  mrg }
   5632  1.1  mrg 
   5633  1.1  mrg /* Implement EH_USES.  */
   5634  1.1  mrg 
   5635  1.1  mrg bool
   5636  1.1  mrg loongarch_eh_uses (unsigned int regno ATTRIBUTE_UNUSED)
   5637  1.1  mrg {
   5638  1.1  mrg   return false;
   5639  1.1  mrg }
   5640  1.1  mrg 
   5641  1.1  mrg /* Implement EPILOGUE_USES.  */
   5642  1.1  mrg 
   5643  1.1  mrg bool
   5644  1.1  mrg loongarch_epilogue_uses (unsigned int regno)
   5645  1.1  mrg {
   5646  1.1  mrg   /* Say that the epilogue uses the return address register.  Note that
   5647  1.1  mrg      in the case of sibcalls, the values "used by the epilogue" are
   5648  1.1  mrg      considered live at the start of the called function.  */
   5649  1.1  mrg   if (regno == RETURN_ADDR_REGNUM)
   5650  1.1  mrg     return true;
   5651  1.1  mrg 
   5652  1.1  mrg   return false;
   5653  1.1  mrg }
   5654  1.1  mrg 
   5655  1.1  mrg bool
   5656  1.1  mrg loongarch_load_store_bonding_p (rtx *operands, machine_mode mode, bool load_p)
   5657  1.1  mrg {
   5658  1.1  mrg   rtx reg1, reg2, mem1, mem2, base1, base2;
   5659  1.1  mrg   enum reg_class rc1, rc2;
   5660  1.1  mrg   HOST_WIDE_INT offset1, offset2;
   5661  1.1  mrg 
   5662  1.1  mrg   if (load_p)
   5663  1.1  mrg     {
   5664  1.1  mrg       reg1 = operands[0];
   5665  1.1  mrg       reg2 = operands[2];
   5666  1.1  mrg       mem1 = operands[1];
   5667  1.1  mrg       mem2 = operands[3];
   5668  1.1  mrg     }
   5669  1.1  mrg   else
   5670  1.1  mrg     {
   5671  1.1  mrg       reg1 = operands[1];
   5672  1.1  mrg       reg2 = operands[3];
   5673  1.1  mrg       mem1 = operands[0];
   5674  1.1  mrg       mem2 = operands[2];
   5675  1.1  mrg     }
   5676  1.1  mrg 
   5677  1.1  mrg   if (loongarch_address_insns (XEXP (mem1, 0), mode, false) == 0
   5678  1.1  mrg       || loongarch_address_insns (XEXP (mem2, 0), mode, false) == 0)
   5679  1.1  mrg     return false;
   5680  1.1  mrg 
   5681  1.1  mrg   loongarch_split_plus (XEXP (mem1, 0), &base1, &offset1);
   5682  1.1  mrg   loongarch_split_plus (XEXP (mem2, 0), &base2, &offset2);
   5683  1.1  mrg 
   5684  1.1  mrg   /* Base regs do not match.  */
   5685  1.1  mrg   if (!REG_P (base1) || !rtx_equal_p (base1, base2))
   5686  1.1  mrg     return false;
   5687  1.1  mrg 
   5688  1.1  mrg   /* Either of the loads is clobbering base register.  It is legitimate to bond
   5689  1.1  mrg      loads if second load clobbers base register.  However, hardware does not
   5690  1.1  mrg      support such bonding.  */
   5691  1.1  mrg   if (load_p
   5692  1.1  mrg       && (REGNO (reg1) == REGNO (base1) || (REGNO (reg2) == REGNO (base1))))
   5693  1.1  mrg     return false;
   5694  1.1  mrg 
   5695  1.1  mrg   /* Loading in same registers.  */
   5696  1.1  mrg   if (load_p && REGNO (reg1) == REGNO (reg2))
   5697  1.1  mrg     return false;
   5698  1.1  mrg 
   5699  1.1  mrg   /* The loads/stores are not of same type.  */
   5700  1.1  mrg   rc1 = REGNO_REG_CLASS (REGNO (reg1));
   5701  1.1  mrg   rc2 = REGNO_REG_CLASS (REGNO (reg2));
   5702  1.1  mrg   if (rc1 != rc2 && !reg_class_subset_p (rc1, rc2)
   5703  1.1  mrg       && !reg_class_subset_p (rc2, rc1))
   5704  1.1  mrg     return false;
   5705  1.1  mrg 
   5706  1.1  mrg   if (abs (offset1 - offset2) != GET_MODE_SIZE (mode))
   5707  1.1  mrg     return false;
   5708  1.1  mrg 
   5709  1.1  mrg   return true;
   5710  1.1  mrg }
   5711  1.1  mrg 
   5712  1.1  mrg /* Implement TARGET_TRAMPOLINE_INIT.  */
   5713  1.1  mrg 
   5714  1.1  mrg static void
   5715  1.1  mrg loongarch_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
   5716  1.1  mrg {
   5717  1.1  mrg   rtx addr, end_addr, mem;
   5718  1.1  mrg   rtx trampoline[8];
   5719  1.1  mrg   unsigned int i, j;
   5720  1.1  mrg   HOST_WIDE_INT end_addr_offset, static_chain_offset, target_function_offset;
   5721  1.1  mrg 
   5722  1.1  mrg   /* Work out the offsets of the pointers from the start of the
   5723  1.1  mrg      trampoline code.  */
   5724  1.1  mrg   end_addr_offset = TRAMPOLINE_CODE_SIZE;
   5725  1.1  mrg   static_chain_offset = end_addr_offset;
   5726  1.1  mrg   target_function_offset = static_chain_offset + GET_MODE_SIZE (ptr_mode);
   5727  1.1  mrg 
   5728  1.1  mrg   /* Get pointers to the beginning and end of the code block.  */
   5729  1.1  mrg   addr = force_reg (Pmode, XEXP (m_tramp, 0));
   5730  1.1  mrg   end_addr
   5731  1.1  mrg     = loongarch_force_binary (Pmode, PLUS, addr, GEN_INT (end_addr_offset));
   5732  1.1  mrg 
   5733  1.1  mrg #define OP(X) gen_int_mode (X, SImode)
   5734  1.1  mrg 
   5735  1.1  mrg   /* Build up the code in TRAMPOLINE.  */
   5736  1.1  mrg   i = 0;
   5737  1.1  mrg   /*pcaddi $static_chain,0
   5738  1.1  mrg     ld.[dw] $tmp,$static_chain,target_function_offset
   5739  1.1  mrg     ld.[dw] $static_chain,$static_chain,static_chain_offset
   5740  1.1  mrg     jirl $r0,$tmp,0  */
   5741  1.1  mrg   trampoline[i++] = OP (0x18000000 | (STATIC_CHAIN_REGNUM - GP_REG_FIRST));
   5742  1.1  mrg   trampoline[i++] = OP ((ptr_mode == DImode ? 0x28c00000 : 0x28800000)
   5743  1.1  mrg 			| 19 /* $t7  */
   5744  1.1  mrg 			| ((STATIC_CHAIN_REGNUM - GP_REG_FIRST) << 5)
   5745  1.1  mrg 			| ((target_function_offset & 0xfff) << 10));
   5746  1.1  mrg   trampoline[i++] = OP ((ptr_mode == DImode ? 0x28c00000 : 0x28800000)
   5747  1.1  mrg 			| (STATIC_CHAIN_REGNUM - GP_REG_FIRST)
   5748  1.1  mrg 			| ((STATIC_CHAIN_REGNUM - GP_REG_FIRST) << 5)
   5749  1.1  mrg 			| ((static_chain_offset & 0xfff) << 10));
   5750  1.1  mrg   trampoline[i++] = OP (0x4c000000 | (19 << 5));
   5751  1.1  mrg #undef OP
   5752  1.1  mrg 
   5753  1.1  mrg   for (j = 0; j < i; j++)
   5754  1.1  mrg    {
   5755  1.1  mrg      mem = adjust_address (m_tramp, SImode, j * GET_MODE_SIZE (SImode));
   5756  1.1  mrg      loongarch_emit_move (mem, trampoline[j]);
   5757  1.1  mrg    }
   5758  1.1  mrg 
   5759  1.1  mrg   /* Set up the static chain pointer field.  */
   5760  1.1  mrg   mem = adjust_address (m_tramp, ptr_mode, static_chain_offset);
   5761  1.1  mrg   loongarch_emit_move (mem, chain_value);
   5762  1.1  mrg 
   5763  1.1  mrg   /* Set up the target function field.  */
   5764  1.1  mrg   mem = adjust_address (m_tramp, ptr_mode, target_function_offset);
   5765  1.1  mrg   loongarch_emit_move (mem, XEXP (DECL_RTL (fndecl), 0));
   5766  1.1  mrg 
   5767  1.1  mrg   /* Flush the code part of the trampoline.  */
   5768  1.1  mrg   emit_insn (gen_add3_insn (end_addr, addr, GEN_INT (TRAMPOLINE_SIZE)));
   5769  1.1  mrg   emit_insn (gen_clear_cache (addr, end_addr));
   5770  1.1  mrg }
   5771  1.1  mrg 
   5772  1.1  mrg /* Implement HARD_REGNO_CALLER_SAVE_MODE.  */
   5773  1.1  mrg 
   5774  1.1  mrg machine_mode
   5775  1.1  mrg loongarch_hard_regno_caller_save_mode (unsigned int regno, unsigned int nregs,
   5776  1.1  mrg 				       machine_mode mode)
   5777  1.1  mrg {
   5778  1.1  mrg   /* For performance, avoid saving/restoring upper parts of a register
   5779  1.1  mrg      by returning MODE as save mode when the mode is known.  */
   5780  1.1  mrg   if (mode == VOIDmode)
   5781  1.1  mrg     return choose_hard_reg_mode (regno, nregs, NULL);
   5782  1.1  mrg   else
   5783  1.1  mrg     return mode;
   5784  1.1  mrg }
   5785  1.1  mrg 
   5786  1.1  mrg /* Implement TARGET_SPILL_CLASS.  */
   5787  1.1  mrg 
   5788  1.1  mrg static reg_class_t
   5789  1.1  mrg loongarch_spill_class (reg_class_t rclass ATTRIBUTE_UNUSED,
   5790  1.1  mrg 		       machine_mode mode ATTRIBUTE_UNUSED)
   5791  1.1  mrg {
   5792  1.1  mrg   return NO_REGS;
   5793  1.1  mrg }
   5794  1.1  mrg 
   5795  1.1  mrg /* Implement TARGET_PROMOTE_FUNCTION_MODE.  */
   5796  1.1  mrg 
   5797  1.1  mrg /* This function is equivalent to default_promote_function_mode_always_promote
   5798  1.1  mrg    except that it returns a promoted mode even if type is NULL_TREE.  This is
   5799  1.1  mrg    needed by libcalls which have no type (only a mode) such as fixed conversion
   5800  1.1  mrg    routines that take a signed or unsigned char/short argument and convert it
   5801  1.1  mrg    to a fixed type.  */
   5802  1.1  mrg 
   5803  1.1  mrg static machine_mode
   5804  1.1  mrg loongarch_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
   5805  1.1  mrg 				 machine_mode mode,
   5806  1.1  mrg 				 int *punsignedp ATTRIBUTE_UNUSED,
   5807  1.1  mrg 				 const_tree fntype ATTRIBUTE_UNUSED,
   5808  1.1  mrg 				 int for_return ATTRIBUTE_UNUSED)
   5809  1.1  mrg {
   5810  1.1  mrg   int unsignedp;
   5811  1.1  mrg 
   5812  1.1  mrg   if (type != NULL_TREE)
   5813  1.1  mrg     return promote_mode (type, mode, punsignedp);
   5814  1.1  mrg 
   5815  1.1  mrg   unsignedp = *punsignedp;
   5816  1.1  mrg   PROMOTE_MODE (mode, unsignedp, type);
   5817  1.1  mrg   *punsignedp = unsignedp;
   5818  1.1  mrg   return mode;
   5819  1.1  mrg }
   5820  1.1  mrg 
   5821  1.1  mrg /* Implement TARGET_STARTING_FRAME_OFFSET.  See loongarch_compute_frame_info
   5822  1.1  mrg    for details about the frame layout.  */
   5823  1.1  mrg 
   5824  1.1  mrg static HOST_WIDE_INT
   5825  1.1  mrg loongarch_starting_frame_offset (void)
   5826  1.1  mrg {
   5827  1.1  mrg   if (FRAME_GROWS_DOWNWARD)
   5828  1.1  mrg     return 0;
   5829  1.1  mrg   return crtl->outgoing_args_size;
   5830  1.1  mrg }
   5831  1.1  mrg 
   5832  1.1  mrg /* Initialize the GCC target structure.  */
   5833  1.1  mrg #undef TARGET_ASM_ALIGNED_HI_OP
   5834  1.1  mrg #define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
   5835  1.1  mrg #undef TARGET_ASM_ALIGNED_SI_OP
   5836  1.1  mrg #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
   5837  1.1  mrg #undef TARGET_ASM_ALIGNED_DI_OP
   5838  1.1  mrg #define TARGET_ASM_ALIGNED_DI_OP "\t.dword\t"
   5839  1.1  mrg 
   5840  1.1  mrg #undef TARGET_OPTION_OVERRIDE
   5841  1.1  mrg #define TARGET_OPTION_OVERRIDE loongarch_option_override
   5842  1.1  mrg #define TARGET_OPTION_SAVE loongarch_option_save
   5843  1.1  mrg #undef TARGET_OPTION_RESTORE
   5844  1.1  mrg #define TARGET_OPTION_RESTORE loongarch_option_restore
   5845  1.1  mrg 
   5846  1.1  mrg 
   5847  1.1  mrg #undef TARGET_LEGITIMIZE_ADDRESS
   5848  1.1  mrg #define TARGET_LEGITIMIZE_ADDRESS loongarch_legitimize_address
   5849  1.1  mrg 
   5850  1.1  mrg #undef TARGET_ASM_SELECT_RTX_SECTION
   5851  1.1  mrg #define TARGET_ASM_SELECT_RTX_SECTION loongarch_select_rtx_section
   5852  1.1  mrg #undef TARGET_ASM_FUNCTION_RODATA_SECTION
   5853  1.1  mrg #define TARGET_ASM_FUNCTION_RODATA_SECTION loongarch_function_rodata_section
   5854  1.1  mrg 
   5855  1.1  mrg #undef TARGET_SCHED_INIT
   5856  1.1  mrg #define TARGET_SCHED_INIT loongarch_sched_init
   5857  1.1  mrg #undef TARGET_SCHED_REORDER
   5858  1.1  mrg #define TARGET_SCHED_REORDER loongarch_sched_reorder
   5859  1.1  mrg #undef TARGET_SCHED_REORDER2
   5860  1.1  mrg #define TARGET_SCHED_REORDER2 loongarch_sched_reorder2
   5861  1.1  mrg #undef TARGET_SCHED_VARIABLE_ISSUE
   5862  1.1  mrg #define TARGET_SCHED_VARIABLE_ISSUE loongarch_variable_issue
   5863  1.1  mrg #undef TARGET_SCHED_ADJUST_COST
   5864  1.1  mrg #define TARGET_SCHED_ADJUST_COST loongarch_adjust_cost
   5865  1.1  mrg #undef TARGET_SCHED_ISSUE_RATE
   5866  1.1  mrg #define TARGET_SCHED_ISSUE_RATE loongarch_issue_rate
   5867  1.1  mrg #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
   5868  1.1  mrg #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD \
   5869  1.1  mrg   loongarch_multipass_dfa_lookahead
   5870  1.1  mrg 
   5871  1.1  mrg #undef TARGET_FUNCTION_OK_FOR_SIBCALL
   5872  1.1  mrg #define TARGET_FUNCTION_OK_FOR_SIBCALL loongarch_function_ok_for_sibcall
   5873  1.1  mrg 
   5874  1.1  mrg #undef TARGET_VALID_POINTER_MODE
   5875  1.1  mrg #define TARGET_VALID_POINTER_MODE loongarch_valid_pointer_mode
   5876  1.1  mrg #undef TARGET_REGISTER_MOVE_COST
   5877  1.1  mrg #define TARGET_REGISTER_MOVE_COST loongarch_register_move_cost
   5878  1.1  mrg #undef TARGET_MEMORY_MOVE_COST
   5879  1.1  mrg #define TARGET_MEMORY_MOVE_COST loongarch_memory_move_cost
   5880  1.1  mrg #undef TARGET_RTX_COSTS
   5881  1.1  mrg #define TARGET_RTX_COSTS loongarch_rtx_costs
   5882  1.1  mrg #undef TARGET_ADDRESS_COST
   5883  1.1  mrg #define TARGET_ADDRESS_COST loongarch_address_cost
   5884  1.1  mrg 
   5885  1.1  mrg #undef TARGET_IN_SMALL_DATA_P
   5886  1.1  mrg #define TARGET_IN_SMALL_DATA_P loongarch_in_small_data_p
   5887  1.1  mrg 
   5888  1.1  mrg #undef TARGET_PREFERRED_RELOAD_CLASS
   5889  1.1  mrg #define TARGET_PREFERRED_RELOAD_CLASS loongarch_preferred_reload_class
   5890  1.1  mrg 
   5891  1.1  mrg #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
   5892  1.1  mrg #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
   5893  1.1  mrg 
   5894  1.1  mrg #undef TARGET_EXPAND_BUILTIN_VA_START
   5895  1.1  mrg #define TARGET_EXPAND_BUILTIN_VA_START loongarch_va_start
   5896  1.1  mrg 
   5897  1.1  mrg #undef TARGET_PROMOTE_FUNCTION_MODE
   5898  1.1  mrg #define TARGET_PROMOTE_FUNCTION_MODE loongarch_promote_function_mode
   5899  1.1  mrg #undef TARGET_RETURN_IN_MEMORY
   5900  1.1  mrg #define TARGET_RETURN_IN_MEMORY loongarch_return_in_memory
   5901  1.1  mrg 
   5902  1.1  mrg #undef TARGET_FUNCTION_VALUE
   5903  1.1  mrg #define TARGET_FUNCTION_VALUE loongarch_function_value
   5904  1.1  mrg #undef TARGET_LIBCALL_VALUE
   5905  1.1  mrg #define TARGET_LIBCALL_VALUE loongarch_libcall_value
   5906  1.1  mrg 
   5907  1.1  mrg #undef TARGET_ASM_OUTPUT_MI_THUNK
   5908  1.1  mrg #define TARGET_ASM_OUTPUT_MI_THUNK loongarch_output_mi_thunk
   5909  1.1  mrg #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
   5910  1.1  mrg #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
   5911  1.1  mrg   hook_bool_const_tree_hwi_hwi_const_tree_true
   5912  1.1  mrg 
   5913  1.1  mrg #undef TARGET_PRINT_OPERAND
   5914  1.1  mrg #define TARGET_PRINT_OPERAND loongarch_print_operand
   5915  1.1  mrg #undef TARGET_PRINT_OPERAND_ADDRESS
   5916  1.1  mrg #define TARGET_PRINT_OPERAND_ADDRESS loongarch_print_operand_address
   5917  1.1  mrg #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
   5918  1.1  mrg #define TARGET_PRINT_OPERAND_PUNCT_VALID_P \
   5919  1.1  mrg   loongarch_print_operand_punct_valid_p
   5920  1.1  mrg 
   5921  1.1  mrg #undef TARGET_SETUP_INCOMING_VARARGS
   5922  1.1  mrg #define TARGET_SETUP_INCOMING_VARARGS loongarch_setup_incoming_varargs
   5923  1.1  mrg #undef TARGET_STRICT_ARGUMENT_NAMING
   5924  1.1  mrg #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
   5925  1.1  mrg #undef TARGET_MUST_PASS_IN_STACK
   5926  1.1  mrg #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
   5927  1.1  mrg #undef TARGET_PASS_BY_REFERENCE
   5928  1.1  mrg #define TARGET_PASS_BY_REFERENCE loongarch_pass_by_reference
   5929  1.1  mrg #undef TARGET_ARG_PARTIAL_BYTES
   5930  1.1  mrg #define TARGET_ARG_PARTIAL_BYTES loongarch_arg_partial_bytes
   5931  1.1  mrg #undef TARGET_FUNCTION_ARG
   5932  1.1  mrg #define TARGET_FUNCTION_ARG loongarch_function_arg
   5933  1.1  mrg #undef TARGET_FUNCTION_ARG_ADVANCE
   5934  1.1  mrg #define TARGET_FUNCTION_ARG_ADVANCE loongarch_function_arg_advance
   5935  1.1  mrg #undef TARGET_FUNCTION_ARG_BOUNDARY
   5936  1.1  mrg #define TARGET_FUNCTION_ARG_BOUNDARY loongarch_function_arg_boundary
   5937  1.1  mrg 
   5938  1.1  mrg #undef TARGET_SCALAR_MODE_SUPPORTED_P
   5939  1.1  mrg #define TARGET_SCALAR_MODE_SUPPORTED_P loongarch_scalar_mode_supported_p
   5940  1.1  mrg 
   5941  1.1  mrg #undef TARGET_INIT_BUILTINS
   5942  1.1  mrg #define TARGET_INIT_BUILTINS loongarch_init_builtins
   5943  1.1  mrg #undef TARGET_BUILTIN_DECL
   5944  1.1  mrg #define TARGET_BUILTIN_DECL loongarch_builtin_decl
   5945  1.1  mrg #undef TARGET_EXPAND_BUILTIN
   5946  1.1  mrg #define TARGET_EXPAND_BUILTIN loongarch_expand_builtin
   5947  1.1  mrg 
   5948  1.1  mrg /* The generic ELF target does not always have TLS support.  */
   5949  1.1  mrg #ifdef HAVE_AS_TLS
   5950  1.1  mrg #undef TARGET_HAVE_TLS
   5951  1.1  mrg #define TARGET_HAVE_TLS HAVE_AS_TLS
   5952  1.1  mrg #endif
   5953  1.1  mrg 
   5954  1.1  mrg #undef TARGET_CANNOT_FORCE_CONST_MEM
   5955  1.1  mrg #define TARGET_CANNOT_FORCE_CONST_MEM loongarch_cannot_force_const_mem
   5956  1.1  mrg 
   5957  1.1  mrg #undef TARGET_LEGITIMATE_CONSTANT_P
   5958  1.1  mrg #define TARGET_LEGITIMATE_CONSTANT_P loongarch_legitimate_constant_p
   5959  1.1  mrg 
   5960  1.1  mrg #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
   5961  1.1  mrg #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
   5962  1.1  mrg 
   5963  1.1  mrg #ifdef HAVE_AS_DTPRELWORD
   5964  1.1  mrg #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
   5965  1.1  mrg #define TARGET_ASM_OUTPUT_DWARF_DTPREL loongarch_output_dwarf_dtprel
   5966  1.1  mrg #endif
   5967  1.1  mrg 
   5968  1.1  mrg #undef TARGET_LEGITIMATE_ADDRESS_P
   5969  1.1  mrg #define TARGET_LEGITIMATE_ADDRESS_P loongarch_legitimate_address_p
   5970  1.1  mrg 
   5971  1.1  mrg #undef TARGET_FRAME_POINTER_REQUIRED
   5972  1.1  mrg #define TARGET_FRAME_POINTER_REQUIRED loongarch_frame_pointer_required
   5973  1.1  mrg 
   5974  1.1  mrg #undef TARGET_CAN_ELIMINATE
   5975  1.1  mrg #define TARGET_CAN_ELIMINATE loongarch_can_eliminate
   5976  1.1  mrg 
   5977  1.1  mrg #undef TARGET_CONDITIONAL_REGISTER_USAGE
   5978  1.1  mrg #define TARGET_CONDITIONAL_REGISTER_USAGE loongarch_conditional_register_usage
   5979  1.1  mrg 
   5980  1.1  mrg #undef TARGET_TRAMPOLINE_INIT
   5981  1.1  mrg #define TARGET_TRAMPOLINE_INIT loongarch_trampoline_init
   5982  1.1  mrg 
   5983  1.1  mrg #undef TARGET_ATOMIC_ASSIGN_EXPAND_FENV
   5984  1.1  mrg #define TARGET_ATOMIC_ASSIGN_EXPAND_FENV loongarch_atomic_assign_expand_fenv
   5985  1.1  mrg 
   5986  1.1  mrg #undef TARGET_CALL_FUSAGE_CONTAINS_NON_CALLEE_CLOBBERS
   5987  1.1  mrg #define TARGET_CALL_FUSAGE_CONTAINS_NON_CALLEE_CLOBBERS true
   5988  1.1  mrg 
   5989  1.1  mrg #undef TARGET_SPILL_CLASS
   5990  1.1  mrg #define TARGET_SPILL_CLASS loongarch_spill_class
   5991  1.1  mrg 
   5992  1.1  mrg #undef TARGET_HARD_REGNO_NREGS
   5993  1.1  mrg #define TARGET_HARD_REGNO_NREGS loongarch_hard_regno_nregs
   5994  1.1  mrg #undef TARGET_HARD_REGNO_MODE_OK
   5995  1.1  mrg #define TARGET_HARD_REGNO_MODE_OK loongarch_hard_regno_mode_ok
   5996  1.1  mrg 
   5997  1.1  mrg #undef TARGET_MODES_TIEABLE_P
   5998  1.1  mrg #define TARGET_MODES_TIEABLE_P loongarch_modes_tieable_p
   5999  1.1  mrg 
   6000  1.1  mrg #undef TARGET_CUSTOM_FUNCTION_DESCRIPTORS
   6001  1.1  mrg #define TARGET_CUSTOM_FUNCTION_DESCRIPTORS 2
   6002  1.1  mrg 
   6003  1.1  mrg #undef TARGET_CAN_CHANGE_MODE_CLASS
   6004  1.1  mrg #define TARGET_CAN_CHANGE_MODE_CLASS loongarch_can_change_mode_class
   6005  1.1  mrg 
   6006  1.1  mrg #undef TARGET_CONSTANT_ALIGNMENT
   6007  1.1  mrg #define TARGET_CONSTANT_ALIGNMENT loongarch_constant_alignment
   6008  1.1  mrg 
   6009  1.1  mrg #undef TARGET_STARTING_FRAME_OFFSET
   6010  1.1  mrg #define TARGET_STARTING_FRAME_OFFSET loongarch_starting_frame_offset
   6011  1.1  mrg 
   6012  1.1  mrg #undef TARGET_SECONDARY_RELOAD
   6013  1.1  mrg #define TARGET_SECONDARY_RELOAD loongarch_secondary_reload
   6014  1.1  mrg 
   6015  1.1  mrg #undef  TARGET_HAVE_SPECULATION_SAFE_VALUE
   6016  1.1  mrg #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
   6017  1.1  mrg 
   6018  1.1  mrg struct gcc_target targetm = TARGET_INITIALIZER;
   6019  1.1  mrg 
   6020  1.1  mrg #include "gt-loongarch.h"
   6021