📄 gmp.h
字号:
/************ Low level positive-integer (i.e. N) routines. ************/
/* This is ugly, but we need to make user calls reach the prefixed function. */
#define mpn_add __MPN(add)
#if __GMP_INLINE_PROTOTYPES || defined (__GMP_FORCE_mpn_add)
__GMP_DECLSPEC mp_limb_t mpn_add __GMP_PROTO ((mp_ptr, mp_srcptr, mp_size_t, mp_srcptr,mp_size_t));
#endif
#define mpn_add_1 __MPN(add_1)
#if __GMP_INLINE_PROTOTYPES || defined (__GMP_FORCE_mpn_add_1)
__GMP_DECLSPEC mp_limb_t mpn_add_1 __GMP_PROTO ((mp_ptr, mp_srcptr, mp_size_t, mp_limb_t)) __GMP_NOTHROW;
#endif
#define mpn_add_n __MPN(add_n)
__GMP_DECLSPEC mp_limb_t mpn_add_n __GMP_PROTO ((mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
#define mpn_addmul_1 __MPN(addmul_1)
__GMP_DECLSPEC mp_limb_t mpn_addmul_1 __GMP_PROTO ((mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
#define mpn_bdivmod __MPN(bdivmod)
__GMP_DECLSPEC mp_limb_t mpn_bdivmod __GMP_PROTO ((mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, unsigned long int));
#define mpn_cmp __MPN(cmp)
#if __GMP_INLINE_PROTOTYPES || defined (__GMP_FORCE_mpn_cmp)
__GMP_DECLSPEC int mpn_cmp __GMP_PROTO ((mp_srcptr, mp_srcptr, mp_size_t)) __GMP_NOTHROW __GMP_ATTRIBUTE_PURE;
#endif
#define mpn_divexact_by3(dst,src,size) \
mpn_divexact_by3c (dst, src, size, __GMP_CAST (mp_limb_t, 0))
#define mpn_divexact_by3c __MPN(divexact_by3c)
__GMP_DECLSPEC mp_limb_t mpn_divexact_by3c __GMP_PROTO ((mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
#define mpn_divmod_1(qp,np,nsize,dlimb) \
mpn_divrem_1 (qp, __GMP_CAST (mp_size_t, 0), np, nsize, dlimb)
#define mpn_divrem __MPN(divrem)
__GMP_DECLSPEC mp_limb_t mpn_divrem __GMP_PROTO ((mp_ptr, mp_size_t, mp_ptr, mp_size_t, mp_srcptr, mp_size_t));
#define mpn_divrem_1 __MPN(divrem_1)
__GMP_DECLSPEC mp_limb_t mpn_divrem_1 __GMP_PROTO ((mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_limb_t));
#define mpn_divrem_2 __MPN(divrem_2)
__GMP_DECLSPEC mp_limb_t mpn_divrem_2 __GMP_PROTO ((mp_ptr, mp_size_t, mp_ptr, mp_size_t, mp_srcptr));
#define mpn_gcd __MPN(gcd)
__GMP_DECLSPEC mp_size_t mpn_gcd __GMP_PROTO ((mp_ptr, mp_ptr, mp_size_t, mp_ptr, mp_size_t));
#define mpn_gcd_1 __MPN(gcd_1)
__GMP_DECLSPEC mp_limb_t mpn_gcd_1 __GMP_PROTO ((mp_srcptr, mp_size_t, mp_limb_t)) __GMP_ATTRIBUTE_PURE;
#define mpn_gcdext __MPN(gcdext)
__GMP_DECLSPEC mp_size_t mpn_gcdext __GMP_PROTO ((mp_ptr, mp_ptr, mp_size_t *, mp_ptr, mp_size_t, mp_ptr, mp_size_t));
#define mpn_get_str __MPN(get_str)
__GMP_DECLSPEC size_t mpn_get_str __GMP_PROTO ((unsigned char *, int, mp_ptr, mp_size_t));
#define mpn_hamdist __MPN(hamdist)
__GMP_DECLSPEC unsigned long int mpn_hamdist __GMP_PROTO ((mp_srcptr, mp_srcptr, mp_size_t)) __GMP_NOTHROW __GMP_ATTRIBUTE_PURE;
#define mpn_lshift __MPN(lshift)
__GMP_DECLSPEC mp_limb_t mpn_lshift __GMP_PROTO ((mp_ptr, mp_srcptr, mp_size_t, unsigned int));
#define mpn_mod_1 __MPN(mod_1)
__GMP_DECLSPEC mp_limb_t mpn_mod_1 __GMP_PROTO ((mp_srcptr, mp_size_t, mp_limb_t)) __GMP_ATTRIBUTE_PURE;
#define mpn_mul __MPN(mul)
__GMP_DECLSPEC mp_limb_t mpn_mul __GMP_PROTO ((mp_ptr, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t));
#define mpn_mul_1 __MPN(mul_1)
__GMP_DECLSPEC mp_limb_t mpn_mul_1 __GMP_PROTO ((mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
#define mpn_mul_n __MPN(mul_n)
__GMP_DECLSPEC void mpn_mul_n __GMP_PROTO ((mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
#define mpn_perfect_square_p __MPN(perfect_square_p)
__GMP_DECLSPEC int mpn_perfect_square_p __GMP_PROTO ((mp_srcptr, mp_size_t)) __GMP_ATTRIBUTE_PURE;
#define mpn_popcount __MPN(popcount)
__GMP_DECLSPEC unsigned long int mpn_popcount __GMP_PROTO ((mp_srcptr, mp_size_t)) __GMP_NOTHROW __GMP_ATTRIBUTE_PURE;
#define mpn_pow_1 __MPN(pow_1)
__GMP_DECLSPEC mp_size_t mpn_pow_1 __GMP_PROTO ((mp_ptr, mp_srcptr, mp_size_t, mp_limb_t, mp_ptr));
/* undocumented now, but retained here for upward compatibility */
#define mpn_preinv_mod_1 __MPN(preinv_mod_1)
__GMP_DECLSPEC mp_limb_t mpn_preinv_mod_1 __GMP_PROTO ((mp_srcptr, mp_size_t, mp_limb_t, mp_limb_t)) __GMP_ATTRIBUTE_PURE;
#define mpn_random __MPN(random)
__GMP_DECLSPEC void mpn_random __GMP_PROTO ((mp_ptr, mp_size_t));
#define mpn_random2 __MPN(random2)
__GMP_DECLSPEC void mpn_random2 __GMP_PROTO ((mp_ptr, mp_size_t));
#define mpn_rshift __MPN(rshift)
__GMP_DECLSPEC mp_limb_t mpn_rshift __GMP_PROTO ((mp_ptr, mp_srcptr, mp_size_t, unsigned int));
#define mpn_scan0 __MPN(scan0)
__GMP_DECLSPEC unsigned long int mpn_scan0 __GMP_PROTO ((mp_srcptr, unsigned long int)) __GMP_ATTRIBUTE_PURE;
#define mpn_scan1 __MPN(scan1)
__GMP_DECLSPEC unsigned long int mpn_scan1 __GMP_PROTO ((mp_srcptr, unsigned long int)) __GMP_ATTRIBUTE_PURE;
#define mpn_set_str __MPN(set_str)
__GMP_DECLSPEC mp_size_t mpn_set_str __GMP_PROTO ((mp_ptr, __gmp_const unsigned char *, size_t, int));
#define mpn_sqrtrem __MPN(sqrtrem)
__GMP_DECLSPEC mp_size_t mpn_sqrtrem __GMP_PROTO ((mp_ptr, mp_ptr, mp_srcptr, mp_size_t));
#define mpn_sub __MPN(sub)
#if __GMP_INLINE_PROTOTYPES || defined (__GMP_FORCE_mpn_sub)
__GMP_DECLSPEC mp_limb_t mpn_sub __GMP_PROTO ((mp_ptr, mp_srcptr, mp_size_t, mp_srcptr,mp_size_t));
#endif
#define mpn_sub_1 __MPN(sub_1)
#if __GMP_INLINE_PROTOTYPES || defined (__GMP_FORCE_mpn_sub_1)
__GMP_DECLSPEC mp_limb_t mpn_sub_1 __GMP_PROTO ((mp_ptr, mp_srcptr, mp_size_t, mp_limb_t)) __GMP_NOTHROW;
#endif
#define mpn_sub_n __MPN(sub_n)
__GMP_DECLSPEC mp_limb_t mpn_sub_n __GMP_PROTO ((mp_ptr, mp_srcptr, mp_srcptr, mp_size_t));
#define mpn_submul_1 __MPN(submul_1)
__GMP_DECLSPEC mp_limb_t mpn_submul_1 __GMP_PROTO ((mp_ptr, mp_srcptr, mp_size_t, mp_limb_t));
#define mpn_tdiv_qr __MPN(tdiv_qr)
__GMP_DECLSPEC void mpn_tdiv_qr __GMP_PROTO ((mp_ptr, mp_ptr, mp_size_t, mp_srcptr, mp_size_t, mp_srcptr, mp_size_t));
/**************** mpz inlines ****************/
/* The following are provided as inlines where possible, but always exist as
library functions too, for binary compatibility.
Within gmp itself this inlining generally isn't relied on, since it
doesn't get done for all compilers, whereas if something is worth
inlining then it's worth arranging always.
There are two styles of inlining here. When the same bit of code is
wanted for the inline as for the library version, then __GMP_FORCE_foo
arranges for that code to be emitted and the __GMP_EXTERN_INLINE
directive suppressed, eg. mpz_fits_uint_p. When a different bit of code
is wanted for the inline than for the library version, then
__GMP_FORCE_foo arranges the inline to be suppressed, eg. mpz_abs. */
#if defined (__GMP_EXTERN_INLINE) && ! defined (__GMP_FORCE_mpz_abs)
__GMP_EXTERN_INLINE void
mpz_abs (mpz_ptr __gmp_w, mpz_srcptr __gmp_u)
{
if (__gmp_w != __gmp_u)
mpz_set (__gmp_w, __gmp_u);
__gmp_w->_mp_size = __GMP_ABS (__gmp_w->_mp_size);
}
#endif
#if GMP_NAIL_BITS == 0
#define __GMPZ_FITS_UTYPE_P(z,maxval) \
mp_size_t __gmp_n = z->_mp_size; \
mp_ptr __gmp_p = z->_mp_d; \
return (__gmp_n == 0 || (__gmp_n == 1 && __gmp_p[0] <= maxval));
#else
#define __GMPZ_FITS_UTYPE_P(z,maxval) \
mp_size_t __gmp_n = z->_mp_size; \
mp_ptr __gmp_p = z->_mp_d; \
return (__gmp_n == 0 || (__gmp_n == 1 && __gmp_p[0] <= maxval) \
|| (__gmp_n == 2 && __gmp_p[1] <= ((mp_limb_t) maxval >> GMP_NUMB_BITS)));
#endif
#if defined (__GMP_EXTERN_INLINE) || defined (__GMP_FORCE_mpz_fits_uint_p)
#if ! defined (__GMP_FORCE_mpz_fits_uint_p)
__GMP_EXTERN_INLINE
#endif
int
mpz_fits_uint_p (mpz_srcptr __gmp_z) __GMP_NOTHROW
{
__GMPZ_FITS_UTYPE_P (__gmp_z, __GMP_UINT_MAX);
}
#endif
#if defined (__GMP_EXTERN_INLINE) || defined (__GMP_FORCE_mpz_fits_ulong_p)
#if ! defined (__GMP_FORCE_mpz_fits_ulong_p)
__GMP_EXTERN_INLINE
#endif
int
mpz_fits_ulong_p (mpz_srcptr __gmp_z) __GMP_NOTHROW
{
__GMPZ_FITS_UTYPE_P (__gmp_z, __GMP_ULONG_MAX);
}
#endif
#if defined (__GMP_EXTERN_INLINE) || defined (__GMP_FORCE_mpz_fits_ushort_p)
#if ! defined (__GMP_FORCE_mpz_fits_ushort_p)
__GMP_EXTERN_INLINE
#endif
int
mpz_fits_ushort_p (mpz_srcptr __gmp_z) __GMP_NOTHROW
{
__GMPZ_FITS_UTYPE_P (__gmp_z, __GMP_USHRT_MAX);
}
#endif
#if defined (__GMP_EXTERN_INLINE) || defined (__GMP_FORCE_mpz_get_ui)
#if ! defined (__GMP_FORCE_mpz_get_ui)
__GMP_EXTERN_INLINE
#endif
unsigned long
mpz_get_ui (mpz_srcptr __gmp_z) __GMP_NOTHROW
{
mp_ptr __gmp_p = __gmp_z->_mp_d;
mp_size_t __gmp_n = __gmp_z->_mp_size;
mp_limb_t __gmp_l = __gmp_p[0];
if (__GMP_ULONG_MAX <= GMP_NUMB_MASK)
return __gmp_l & (-(mp_limb_t) (__gmp_n != 0));
#if GMP_NAIL_BITS != 0 /* redundant #if, shuts up compiler warnings */
else /* happens for nails, but not if LONG_LONG_LIMB */
{ /* assume two limbs are enough to fill an ulong */
__gmp_n = __GMP_ABS (__gmp_n);
if (__gmp_n <= 1)
return __gmp_l & (-(mp_limb_t) (__gmp_n != 0));
else
return __gmp_l + (__gmp_p[1] << GMP_NUMB_BITS);
}
#endif
}
#endif
#if defined (__GMP_EXTERN_INLINE) || defined (__GMP_FORCE_mpz_getlimbn)
#if ! defined (__GMP_FORCE_mpz_getlimbn)
__GMP_EXTERN_INLINE
#endif
mp_limb_t
mpz_getlimbn (mpz_srcptr __gmp_z, mp_size_t __gmp_n) __GMP_NOTHROW
{
if (__GMP_ABS (__gmp_z->_mp_size) <= __gmp_n || __gmp_n < 0)
return 0;
else
return __gmp_z->_mp_d[__gmp_n];
}
#endif
#if defined (__GMP_EXTERN_INLINE) && ! defined (__GMP_FORCE_mpz_neg)
__GMP_EXTERN_INLINE void
mpz_neg (mpz_ptr __gmp_w, mpz_srcptr __gmp_u)
{
if (__gmp_w != __gmp_u)
mpz_set (__gmp_w, __gmp_u);
__gmp_w->_mp_size = - __gmp_w->_mp_size;
}
#endif
#if defined (__GMP_EXTERN_INLINE) || defined (__GMP_FORCE_mpz_perfect_square_p)
#if ! defined (__GMP_FORCE_mpz_perfect_square_p)
__GMP_EXTERN_INLINE
#endif
int
mpz_perfect_square_p (mpz_srcptr __gmp_a)
{
mp_size_t __gmp_asize = __gmp_a->_mp_size;
if (__gmp_asize <= 0)
return (__gmp_asize == 0); /* zero is a square, negatives are not */
else
return mpn_perfect_square_p (__gmp_a->_mp_d, __gmp_asize);
}
#endif
#if defined (__GMP_EXTERN_INLINE) || defined (__GMP_FORCE_mpz_popcount)
#if ! defined (__GMP_FORCE_mpz_popcount)
__GMP_EXTERN_INLINE
#endif
unsigned long
mpz_popcount (mpz_srcptr __gmp_u) __GMP_NOTHROW
{
mp_size_t __gmp_usize = __gmp_u->_mp_size;
if (__gmp_usize <= 0)
return (__gmp_usize < 0 ? __GMP_ULONG_MAX : 0);
else
return mpn_popcount (__gmp_u->_mp_d, __gmp_usize);
}
#endif
#if defined (__GMP_EXTERN_INLINE) || defined (__GMP_FORCE_mpz_set_q)
#if ! defined (__GMP_FORCE_mpz_set_q)
__GMP_EXTERN_INLINE
#endif
void
mpz_set_q (mpz_ptr __gmp_w, mpq_srcptr __gmp_u)
{
mpz_tdiv_q (__gmp_w, mpq_numref (__gmp_u), mpq_denref (__gmp_u));
}
#endif
#if defined (__GMP_EXTERN_INLINE) || defined (__GMP_FORCE_mpz_size)
#if ! defined (__GMP_FORCE_mpz_size)
__GMP_EXTERN_INLINE
#endif
size_t
mpz_size (mpz_srcptr __gmp_z) __GMP_NOTHROW
{
return __GMP_ABS (__gmp_z->_mp_size);
}
#endif
/**************** mpq inlines ****************/
#if defined (__GMP_EXTERN_INLINE) && ! defined (__GMP_FORCE_mpq_abs)
__GMP_EXTERN_INLINE void
mpq_abs (mpq_ptr __gmp_w, mpq_srcptr __gmp_u)
{
if (__gmp_w != __gmp_u)
mpq_set (__gmp_w, __gmp_u);
__gmp_w->_mp_num._mp_size = __GMP_ABS (__gmp_w->_mp_num._mp_size);
}
#endif
#if defined (__GMP_EXTERN_INLINE) && ! defined (__GMP_FORCE_mpq_neg)
__GMP_EXTERN_INLINE void
mpq_neg (mpq_ptr __gmp_w, mpq_srcptr __gmp_u)
{
if (__gmp_w != __gmp_u)
mpq_set (__gmp_w, __gmp_u);
__gmp_w->_mp_num._mp_size = - __gmp_w->_mp_num._mp_size;
}
#endif
/**************** mpn inlines ****************/
/* The comments with __GMPN_ADD_1 below apply here too.
The test for FUNCTION returning 0 should predict well. If it's assumed
{yp,ysize} will usually have a random number of bits then the high limb
won't be full and a carry out will occur a good deal less than 50% of the
time.
ysize==0 isn't a documented feature, but is used internally in a few
places.
Producing cout last stops it using up a register during the main part of
the calculation, though gcc (as of 3.0) on an "if (mpn_add (...))"
doesn't seem able to move the true and fal
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -