|
|
|
@ -959,70 +959,34 @@ vfp_single_multiply(struct vfp_single *vsd, struct vfp_single *vsn, struct vfp_s |
|
|
|
static u32 |
|
|
|
vfp_single_multiply_accumulate(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr, u32 negate, const char *func) |
|
|
|
{ |
|
|
|
|
|
|
|
{ |
|
|
|
struct vfp_single vsd, vsp, vsn, vsm; |
|
|
|
u32 exceptions; |
|
|
|
s32 v; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
v = vfp_get_float(state, sn); |
|
|
|
pr_debug("VFP: s%u = %08x\n", sn, v); |
|
|
|
vfp_single_unpack(&vsn, v); |
|
|
|
if (vsn.exponent == 0 && vsn.significand) |
|
|
|
vfp_single_normalise_denormal(&vsn); |
|
|
|
|
|
|
|
vfp_single_unpack(&vsm, m); |
|
|
|
if (vsm.exponent == 0 && vsm.significand) |
|
|
|
vfp_single_normalise_denormal(&vsm); |
|
|
|
|
|
|
|
exceptions = vfp_single_multiply(&vsp, &vsn, &vsm, fpscr); |
|
|
|
|
|
|
|
if (negate & NEG_MULTIPLY) |
|
|
|
vsp.sign = vfp_sign_negate(vsp.sign); |
|
|
|
|
|
|
|
v = vfp_get_float(state, sd); |
|
|
|
pr_debug("VFP: s%u = %08x\n", sd, v); |
|
|
|
vfp_single_unpack(&vsn, v); |
|
|
|
if (negate & NEG_SUBTRACT) |
|
|
|
vsn.sign = vfp_sign_negate(vsn.sign); |
|
|
|
|
|
|
|
exceptions |= vfp_single_add(&vsd, &vsn, &vsp, fpscr); |
|
|
|
|
|
|
|
return vfp_single_normaliseround(state, sd, &vsd, fpscr, exceptions, func); |
|
|
|
} |
|
|
|
|
|
|
|
struct vfp_double vsd, vsp, vsn, vsm; |
|
|
|
vfp_single vsd, vsp, vsn, vsm; |
|
|
|
u32 exceptions; |
|
|
|
s32 v; |
|
|
|
s64 vd; |
|
|
|
s64 md; |
|
|
|
|
|
|
|
v = vfp_get_float(state, sn); |
|
|
|
vd = vfp_single_to_doubleintern(state, v, fpscr); |
|
|
|
vfp_double_unpack(&vsn, vd); |
|
|
|
pr_debug("VFP: s%u = %08x\n", sn, v); |
|
|
|
vfp_single_unpack(&vsn, v); |
|
|
|
if (vsn.exponent == 0 && vsn.significand) |
|
|
|
vfp_single_normalise_denormal(&vsn); |
|
|
|
|
|
|
|
vfp_single_unpack(&vsm, m); |
|
|
|
if (vsm.exponent == 0 && vsm.significand) |
|
|
|
vfp_single_normalise_denormal(&vsm); |
|
|
|
|
|
|
|
md = vfp_single_to_doubleintern(state, m, fpscr); |
|
|
|
vfp_double_unpack(&vsm, md); |
|
|
|
exceptions = vfp_single_multiply(&vsp, &vsn, &vsm, fpscr); |
|
|
|
|
|
|
|
exceptions = vfp_double_multiply(&vsp, &vsn, &vsm, fpscr); |
|
|
|
if (negate & NEG_MULTIPLY) |
|
|
|
vsp.sign = vfp_sign_negate(vsp.sign); |
|
|
|
|
|
|
|
v = vfp_get_float(state, sd); |
|
|
|
vd = vfp_single_to_doubleintern(state, v, fpscr); |
|
|
|
vfp_double_unpack(&vsn, vd); |
|
|
|
|
|
|
|
pr_debug("VFP: s%u = %08x\n", sd, v); |
|
|
|
vfp_single_unpack(&vsn, v); |
|
|
|
if (negate & NEG_SUBTRACT) |
|
|
|
vsn.sign = vfp_sign_negate(vsn.sign); |
|
|
|
|
|
|
|
exceptions |= vfp_double_add(&vsd, &vsn, &vsp, fpscr); |
|
|
|
|
|
|
|
s64 debug = vfp_double_pack(&vsd); |
|
|
|
|
|
|
|
return vfp_double_fcvtsinterncutting(state, sd, &vsd, fpscr); |
|
|
|
exceptions |= vfp_single_add(&vsd, &vsn, &vsp, fpscr); |
|
|
|
|
|
|
|
return vfp_single_normaliseround(state, sd, &vsd, fpscr, exceptions, func); |
|
|
|
} |
|
|
|
|
|
|
|
/*
|
|
|
|
|