/* Select the best insn combination to perform the */ \
/* actual __m * __n / (__p << 64) operation. */ \
if (!__c) { \
- asm ( "umull %Q0, %R0, %1, %Q2\n\t" \
+ asm ( "umull %Q0, %R0, %Q1, %Q2\n\t" \
"mov %Q0, #0" \
: "=&r" (__res) \
: "r" (__m), "r" (__n) \
__res = __m; \
asm ( "umlal %Q0, %R0, %Q1, %Q2\n\t" \
"mov %Q0, #0" \
- : "+r" (__res) \
+ : "+&r" (__res) \
: "r" (__m), "r" (__n) \
: "cc" ); \
} else { \
"umlal %R0, %Q0, %Q1, %R2\n\t" \
"mov %R0, #0\n\t" \
"umlal %Q0, %R0, %R1, %R2" \
- : "+r" (__res) \
+ : "+&r" (__res) \
: "r" (__m), "r" (__n) \
: "cc" ); \
} else { \
"adds %Q0, %1, %Q0\n\t" \
"adc %R0, %R0, #0\n\t" \
"umlal %Q0, %R0, %R2, %R3" \
- : "+r" (__res), "+r" (__z) \
+ : "+&r" (__res), "+&r" (__z) \
: "r" (__m), "r" (__n) \
: "cc" ); \
} \