Lines Matching refs:xi
1115 WIDE_INT_REF_FOR (T) xi (x);
1116 precision = xi.precision;
1117 wi::copy (*this, xi);
1126 WIDE_INT_REF_FOR (T) xi (x);
1127 precision = xi.precision;
1128 wi::copy (*this, xi);
1454 WIDE_INT_REF_FOR (T) xi (x, m_precision);
1455 wi::copy (*this, xi);
1811 WIDE_INT_REF_FOR (T) xi (x);
1812 return xi.len == 1;
1821 WIDE_INT_REF_FOR (T) xi (x);
1822 if (xi.precision <= HOST_BITS_PER_WIDE_INT)
1824 if (xi.len == 1)
1825 return xi.slow () >= 0;
1826 return xi.len == 2 && xi.uhigh () == 0;
1835 WIDE_INT_REF_FOR (T) xi (x);
1838 return xi.sign_mask () < 0;
1846 WIDE_INT_REF_FOR (T) xi (x);
1847 return xi.sign_mask ();
1856 WIDE_INT_REF_FOR (T1) xi (x, precision);
1858 if (xi.is_sign_extended && yi.is_sign_extended)
1861 if (xi.len != yi.len)
1865 if (xi.val[i] != yi.val[i])
1867 while (++i != xi.len);
1872 /* XI is only equal to YI if it too has a single HWI. */
1873 if (xi.len != 1)
1875 /* Excess bits in xi.val[0] will be signs or zeros, so comparisons
1878 return xi.val[0] == 0;
1880 unsigned HOST_WIDE_INT diff = xi.val[0] ^ yi.val[0];
1886 return eq_p_large (xi.val, xi.len, yi.val, yi.len, precision);
1903 WIDE_INT_REF_FOR (T1) xi (x, precision);
1910 return neg_p (xi);
1912 if (wi::fits_shwi_p (xi))
1913 return xi.to_shwi () < yi.to_shwi ();
1916 if (neg_p (xi))
1923 if (STATIC_CONSTANT_P (xi.len == 1))
1927 return lts_p_large (xi.val, xi.len, precision, yi.val, yi.len);
1936 WIDE_INT_REF_FOR (T1) xi (x, precision);
1940 return xi.len == 1 && xi.to_uhwi () < (unsigned HOST_WIDE_INT) yi.val[0];
1941 if (STATIC_CONSTANT_P (xi.len == 1 && xi.val[0] >= 0))
1942 xi.val[0];
1946 if (__builtin_expect (xi.len + yi.len == 2, true))
1948 unsigned HOST_WIDE_INT xl = xi.to_uhwi ();
1952 return ltu_p_large (xi.val, xi.len, precision, yi.val, yi.len);
2054 WIDE_INT_REF_FOR (T1) xi (x, precision);
2060 return neg_p (xi) ? -1 : !(xi.len == 1 && xi.val[0] == 0);
2062 if (wi::fits_shwi_p (xi))
2064 HOST_WIDE_INT xl = xi.to_shwi ();
2070 if (neg_p (xi))
2077 if (STATIC_CONSTANT_P (xi.len == 1))
2081 return cmps_large (xi.val, xi.len, precision, yi.val, yi.len);
2091 WIDE_INT_REF_FOR (T1) xi (x, precision);
2096 /* If XI doesn't fit in a HWI then it must be larger than YI. */
2097 if (xi.len != 1)
2100 unsigned HOST_WIDE_INT xl = xi.to_uhwi ();
2104 if (STATIC_CONSTANT_P (xi.len == 1 && xi.val[0] >= 0))
2106 /* If YI doesn't fit in a HWI then it must be larger than XI. */
2110 unsigned HOST_WIDE_INT xl = xi.val[0];
2117 if (__builtin_expect (xi.len + yi.len == 2, true))
2119 unsigned HOST_WIDE_INT xl = xi.to_uhwi ();
2123 return cmpu_large (xi.val, xi.len, precision, yi.val, yi.len);
2144 WIDE_INT_REF_FOR (T) xi (x, get_precision (result));
2145 for (unsigned int i = 0; i < xi.len; ++i)
2146 val[i] = ~xi.val[i];
2147 result.set_len (xi.len);
2184 WIDE_INT_REF_FOR (T) xi (x, precision);
2188 val[0] = sext_hwi (xi.ulow (), offset);
2192 result.set_len (sext_large (val, xi.val, xi.len, precision, offset));
2203 WIDE_INT_REF_FOR (T) xi (x, precision);
2209 wi::copy (result, xi);
2217 val[0] = zext_hwi (xi.ulow (), offset);
2221 result.set_len (zext_large (val, xi.val, xi.len, precision, offset), true);
2241 WIDE_INT_REF_FOR (T) xi (x, precision);
2244 val[0] = xi.ulow () | (HOST_WIDE_INT_1U << bit);
2248 result.set_len (set_bit_large (val, xi.val, xi.len, precision, bit));
2321 WIDE_INT_REF_FOR (T1) xi (x, precision);
2323 bool is_sign_extended = xi.is_sign_extended && yi.is_sign_extended;
2324 if (__builtin_expect (xi.len + yi.len == 2, true))
2326 val[0] = xi.ulow () & yi.ulow ();
2330 result.set_len (and_large (val, xi.val, xi.len, yi.val, yi.len,
2342 WIDE_INT_REF_FOR (T1) xi (x, precision);
2344 bool is_sign_extended = xi.is_sign_extended && yi.is_sign_extended;
2345 if (__builtin_expect (xi.len + yi.len == 2, true))
2347 val[0] = xi.ulow () & ~yi.ulow ();
2351 result.set_len (and_not_large (val, xi.val, xi.len, yi.val, yi.len,
2363 WIDE_INT_REF_FOR (T1) xi (x, precision);
2365 bool is_sign_extended = xi.is_sign_extended && yi.is_sign_extended;
2366 if (__builtin_expect (xi.len + yi.len == 2, true))
2368 val[0] = xi.ulow () | yi.ulow ();
2372 result.set_len (or_large (val, xi.val, xi.len,
2384 WIDE_INT_REF_FOR (T1) xi (x, precision);
2386 bool is_sign_extended = xi.is_sign_extended && yi.is_sign_extended;
2387 if (__builtin_expect (xi.len + yi.len == 2, true))
2389 val[0] = xi.ulow () | ~yi.ulow ();
2393 result.set_len (or_not_large (val, xi.val, xi.len, yi.val, yi.len,
2405 WIDE_INT_REF_FOR (T1) xi (x, precision);
2407 bool is_sign_extended = xi.is_sign_extended && yi.is_sign_extended;
2408 if (__builtin_expect (xi.len + yi.len == 2, true))
2410 val[0] = xi.ulow () ^ yi.ulow ();
2414 result.set_len (xor_large (val, xi.val, xi.len,
2426 WIDE_INT_REF_FOR (T1) xi (x, precision);
2430 val[0] = xi.ulow () + yi.ulow ();
2444 && __builtin_expect (xi.len + yi.len == 2, true))
2446 unsigned HOST_WIDE_INT xl = xi.ulow ();
2455 result.set_len (add_large (val, xi.val, xi.len,
2469 WIDE_INT_REF_FOR (T1) xi (x, precision);
2473 unsigned HOST_WIDE_INT xl = xi.ulow ();
2499 result.set_len (add_large (val, xi.val, xi.len,
2512 WIDE_INT_REF_FOR (T1) xi (x, precision);
2516 val[0] = xi.ulow () - yi.ulow ();
2530 && __builtin_expect (xi.len + yi.len == 2, true))
2532 unsigned HOST_WIDE_INT xl = xi.ulow ();
2541 result.set_len (sub_large (val, xi.val, xi.len,
2555 WIDE_INT_REF_FOR (T1) xi (x, precision);
2559 unsigned HOST_WIDE_INT xl = xi.ulow ();
2584 result.set_len (sub_large (val, xi.val, xi.len,
2597 WIDE_INT_REF_FOR (T1) xi (x, precision);
2601 val[0] = xi.ulow () * yi.ulow ();
2605 result.set_len (mul_internal (val, xi.val, xi.len, yi.val, yi.len,
2618 WIDE_INT_REF_FOR (T1) xi (x, precision);
2620 result.set_len (mul_internal (val, xi.val, xi.len,
2652 WIDE_INT_REF_FOR (T1) xi (x, precision);
2654 result.set_len (mul_internal (val, xi.val, xi.len,
2669 WIDE_INT_REF_FOR (T1) xi (x, precision);
2672 quotient.set_len (divmod_internal (quotient_val, 0, 0, xi.val, xi.len,
2705 WIDE_INT_REF_FOR (T1) xi (x, precision);
2711 xi.val, xi.len, precision,
2747 WIDE_INT_REF_FOR (T1) xi (x, precision);
2753 xi.val, xi.len, precision,
2780 WIDE_INT_REF_FOR (T1) xi (x, precision);
2786 xi.val, xi.len, precision,
2823 WIDE_INT_REF_FOR (T1) xi (x, precision);
2829 xi.val, xi.len, precision,
2867 WIDE_INT_REF_FOR (T1) xi (x, precision);
2872 xi.val, xi.len, precision,
2907 WIDE_INT_REF_FOR (T1) xi (x, precision);
2913 xi.val, xi.len, precision,
2943 WIDE_INT_REF_FOR (T1) xi (x, precision);
2949 xi.val, xi.len, precision,
2969 WIDE_INT_REF_FOR (T1) xi (x, precision);
2975 xi.val, xi.len, precision,
3037 WIDE_INT_REF_FOR (T1) xi (x, precision);
3056 if (STATIC_CONSTANT_P (xi.precision > HOST_BITS_PER_WIDE_INT)
3058 && xi.len == 1
3059 && IN_RANGE (xi.val[0], 0, HOST_WIDE_INT_MAX >> shift))
3062 val[0] = xi.ulow () << shift;
3066 result.set_len (lshift_large (val, xi.val, xi.len,
3081 WIDE_INT_REF_FOR (T1) xi (x);
3084 if (geu_p (yi, xi.precision))
3100 if (STATIC_CONSTANT_P (xi.precision > HOST_BITS_PER_WIDE_INT)
3102 && xi.len == 1
3103 && xi.val[0] >= 0)
3104 : xi.precision <= HOST_BITS_PER_WIDE_INT)
3106 val[0] = xi.to_uhwi () >> shift;
3110 result.set_len (lrshift_large (val, xi.val, xi.len, xi.precision,
3125 WIDE_INT_REF_FOR (T1) xi (x);
3128 if (geu_p (yi, xi.precision))
3136 if (xi.precision <= HOST_BITS_PER_WIDE_INT)
3138 val[0] = sext_hwi (xi.ulow () >> shift, xi.precision - shift);
3142 result.set_len (arshift_large (val, xi.val, xi.len, xi.precision,
3215 WIDE_INT_REF_FOR (T) xi (x, precision);
3224 unsigned HOST_WIDE_INT res = xi.elt (start);
3228 unsigned HOST_WIDE_INT upper = xi.elt (start + 1);