/* #define MPMONT_DISABLE */
+#define MPMONT_KTHRESH (16*MPK_THRESH)
+
/*----- Low-level implementation ------------------------------------------*/
#ifndef MPMONT_DISABLE
MAYBE_REDC4(x86_sse2)
#endif
+#if CPUFAM_AMD64
+ MAYBE_REDC4(amd64_sse2)
+#endif
+
static redccore__functype *pick_redccore(void)
{
#if CPUFAM_X86
DISPATCH_PICK_COND(mpmont_reduce, maybe_redc4_x86_sse2,
cpu_feature_p(CPUFEAT_X86_SSE2));
#endif
+#if CPUFAM_AMD64
+ DISPATCH_PICK_COND(mpmont_reduce, maybe_redc4_amd64_sse2,
+ cpu_feature_p(CPUFEAT_X86_SSE2));
+#endif
DISPATCH_PICK_FALLBACK(mpmont_reduce, simple_redccore);
}
MAYBE_MUL4(x86_sse2)
#endif
+#if CPUFAM_AMD64
+ MAYBE_MUL4(amd64_sse2)
+#endif
+
static mulcore__functype *pick_mulcore(void)
{
#if CPUFAM_X86
DISPATCH_PICK_COND(mpmont_mul, maybe_mul4_x86_sse2,
cpu_feature_p(CPUFEAT_X86_SSE2));
#endif
+#if CPUFAM_AMD64
+ DISPATCH_PICK_COND(mpmont_mul, maybe_mul4_amd64_sse2,
+ cpu_feature_p(CPUFEAT_X86_SSE2));
+#endif
DISPATCH_PICK_FALLBACK(mpmont_mul, simple_mulcore);
}
/* --- Check for serious Karatsuba reduction --- */
- if (n > MPK_THRESH * 3) {
+ if (n > MPMONT_KTHRESH) {
mp al;
mpw *vl;
mp *u;
{
size_t n = mm->n;
- if (n > MPK_THRESH * 3) {
+ if (n > MPMONT_KTHRESH) {
d = mp_mul(d, a, b);
d = mpmont_reduce(mm, d, d);
} else {