Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ericvh...
[pandora-kernel.git] / arch / powerpc / kvm / book3s_paired_singles.c
1 /*
2  * This program is free software; you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License, version 2, as
4  * published by the Free Software Foundation.
5  *
6  * This program is distributed in the hope that it will be useful,
7  * but WITHOUT ANY WARRANTY; without even the implied warranty of
8  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
9  * GNU General Public License for more details.
10  *
11  * You should have received a copy of the GNU General Public License
12  * along with this program; if not, write to the Free Software
13  * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
14  *
15  * Copyright Novell Inc 2010
16  *
17  * Authors: Alexander Graf <agraf@suse.de>
18  */
19
20 #include <asm/kvm.h>
21 #include <asm/kvm_ppc.h>
22 #include <asm/disassemble.h>
23 #include <asm/kvm_book3s.h>
24 #include <asm/kvm_fpu.h>
25 #include <asm/reg.h>
26 #include <asm/cacheflush.h>
27 #include <linux/vmalloc.h>
28
29 /* #define DEBUG */
30
31 #ifdef DEBUG
32 #define dprintk printk
33 #else
34 #define dprintk(...) do { } while(0);
35 #endif
36
37 #define OP_LFS                  48
38 #define OP_LFSU                 49
39 #define OP_LFD                  50
40 #define OP_LFDU                 51
41 #define OP_STFS                 52
42 #define OP_STFSU                53
43 #define OP_STFD                 54
44 #define OP_STFDU                55
45 #define OP_PSQ_L                56
46 #define OP_PSQ_LU               57
47 #define OP_PSQ_ST               60
48 #define OP_PSQ_STU              61
49
50 #define OP_31_LFSX              535
51 #define OP_31_LFSUX             567
52 #define OP_31_LFDX              599
53 #define OP_31_LFDUX             631
54 #define OP_31_STFSX             663
55 #define OP_31_STFSUX            695
56 #define OP_31_STFX              727
57 #define OP_31_STFUX             759
58 #define OP_31_LWIZX             887
59 #define OP_31_STFIWX            983
60
61 #define OP_59_FADDS             21
62 #define OP_59_FSUBS             20
63 #define OP_59_FSQRTS            22
64 #define OP_59_FDIVS             18
65 #define OP_59_FRES              24
66 #define OP_59_FMULS             25
67 #define OP_59_FRSQRTES          26
68 #define OP_59_FMSUBS            28
69 #define OP_59_FMADDS            29
70 #define OP_59_FNMSUBS           30
71 #define OP_59_FNMADDS           31
72
73 #define OP_63_FCMPU             0
74 #define OP_63_FCPSGN            8
75 #define OP_63_FRSP              12
76 #define OP_63_FCTIW             14
77 #define OP_63_FCTIWZ            15
78 #define OP_63_FDIV              18
79 #define OP_63_FADD              21
80 #define OP_63_FSQRT             22
81 #define OP_63_FSEL              23
82 #define OP_63_FRE               24
83 #define OP_63_FMUL              25
84 #define OP_63_FRSQRTE           26
85 #define OP_63_FMSUB             28
86 #define OP_63_FMADD             29
87 #define OP_63_FNMSUB            30
88 #define OP_63_FNMADD            31
89 #define OP_63_FCMPO             32
90 #define OP_63_MTFSB1            38 // XXX
91 #define OP_63_FSUB              20
92 #define OP_63_FNEG              40
93 #define OP_63_MCRFS             64
94 #define OP_63_MTFSB0            70
95 #define OP_63_FMR               72
96 #define OP_63_MTFSFI            134
97 #define OP_63_FABS              264
98 #define OP_63_MFFS              583
99 #define OP_63_MTFSF             711
100
101 #define OP_4X_PS_CMPU0          0
102 #define OP_4X_PSQ_LX            6
103 #define OP_4XW_PSQ_STX          7
104 #define OP_4A_PS_SUM0           10
105 #define OP_4A_PS_SUM1           11
106 #define OP_4A_PS_MULS0          12
107 #define OP_4A_PS_MULS1          13
108 #define OP_4A_PS_MADDS0         14
109 #define OP_4A_PS_MADDS1         15
110 #define OP_4A_PS_DIV            18
111 #define OP_4A_PS_SUB            20
112 #define OP_4A_PS_ADD            21
113 #define OP_4A_PS_SEL            23
114 #define OP_4A_PS_RES            24
115 #define OP_4A_PS_MUL            25
116 #define OP_4A_PS_RSQRTE         26
117 #define OP_4A_PS_MSUB           28
118 #define OP_4A_PS_MADD           29
119 #define OP_4A_PS_NMSUB          30
120 #define OP_4A_PS_NMADD          31
121 #define OP_4X_PS_CMPO0          32
122 #define OP_4X_PSQ_LUX           38
123 #define OP_4XW_PSQ_STUX         39
124 #define OP_4X_PS_NEG            40
125 #define OP_4X_PS_CMPU1          64
126 #define OP_4X_PS_MR             72
127 #define OP_4X_PS_CMPO1          96
128 #define OP_4X_PS_NABS           136
129 #define OP_4X_PS_ABS            264
130 #define OP_4X_PS_MERGE00        528
131 #define OP_4X_PS_MERGE01        560
132 #define OP_4X_PS_MERGE10        592
133 #define OP_4X_PS_MERGE11        624
134
135 #define SCALAR_NONE             0
136 #define SCALAR_HIGH             (1 << 0)
137 #define SCALAR_LOW              (1 << 1)
138 #define SCALAR_NO_PS0           (1 << 2)
139 #define SCALAR_NO_PS1           (1 << 3)
140
141 #define GQR_ST_TYPE_MASK        0x00000007
142 #define GQR_ST_TYPE_SHIFT       0
143 #define GQR_ST_SCALE_MASK       0x00003f00
144 #define GQR_ST_SCALE_SHIFT      8
145 #define GQR_LD_TYPE_MASK        0x00070000
146 #define GQR_LD_TYPE_SHIFT       16
147 #define GQR_LD_SCALE_MASK       0x3f000000
148 #define GQR_LD_SCALE_SHIFT      24
149
150 #define GQR_QUANTIZE_FLOAT      0
151 #define GQR_QUANTIZE_U8         4
152 #define GQR_QUANTIZE_U16        5
153 #define GQR_QUANTIZE_S8         6
154 #define GQR_QUANTIZE_S16        7
155
156 #define FPU_LS_SINGLE           0
157 #define FPU_LS_DOUBLE           1
158 #define FPU_LS_SINGLE_LOW       2
159
160 static inline void kvmppc_sync_qpr(struct kvm_vcpu *vcpu, int rt)
161 {
162         struct thread_struct t;
163
164         t.fpscr.val = vcpu->arch.fpscr;
165         cvt_df((double*)&vcpu->arch.fpr[rt], (float*)&vcpu->arch.qpr[rt], &t);
166 }
167
168 static void kvmppc_inject_pf(struct kvm_vcpu *vcpu, ulong eaddr, bool is_store)
169 {
170         u64 dsisr;
171
172         vcpu->arch.msr = kvmppc_set_field(vcpu->arch.msr, 33, 36, 0);
173         vcpu->arch.msr = kvmppc_set_field(vcpu->arch.msr, 42, 47, 0);
174         vcpu->arch.dear = eaddr;
175         /* Page Fault */
176         dsisr = kvmppc_set_field(0, 33, 33, 1);
177         if (is_store)
178                 to_book3s(vcpu)->dsisr = kvmppc_set_field(dsisr, 38, 38, 1);
179         kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_DATA_STORAGE);
180 }
181
182 static int kvmppc_emulate_fpr_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
183                                    int rs, ulong addr, int ls_type)
184 {
185         int emulated = EMULATE_FAIL;
186         struct thread_struct t;
187         int r;
188         char tmp[8];
189         int len = sizeof(u32);
190
191         if (ls_type == FPU_LS_DOUBLE)
192                 len = sizeof(u64);
193
194         t.fpscr.val = vcpu->arch.fpscr;
195
196         /* read from memory */
197         r = kvmppc_ld(vcpu, &addr, len, tmp, true);
198         vcpu->arch.paddr_accessed = addr;
199
200         if (r < 0) {
201                 kvmppc_inject_pf(vcpu, addr, false);
202                 goto done_load;
203         } else if (r == EMULATE_DO_MMIO) {
204                 emulated = kvmppc_handle_load(run, vcpu, KVM_REG_FPR | rs, len, 1);
205                 goto done_load;
206         }
207
208         emulated = EMULATE_DONE;
209
210         /* put in registers */
211         switch (ls_type) {
212         case FPU_LS_SINGLE:
213                 cvt_fd((float*)tmp, (double*)&vcpu->arch.fpr[rs], &t);
214                 vcpu->arch.qpr[rs] = *((u32*)tmp);
215                 break;
216         case FPU_LS_DOUBLE:
217                 vcpu->arch.fpr[rs] = *((u64*)tmp);
218                 break;
219         }
220
221         dprintk(KERN_INFO "KVM: FPR_LD [0x%llx] at 0x%lx (%d)\n", *(u64*)tmp,
222                           addr, len);
223
224 done_load:
225         return emulated;
226 }
227
228 static int kvmppc_emulate_fpr_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
229                                     int rs, ulong addr, int ls_type)
230 {
231         int emulated = EMULATE_FAIL;
232         struct thread_struct t;
233         int r;
234         char tmp[8];
235         u64 val;
236         int len;
237
238         t.fpscr.val = vcpu->arch.fpscr;
239
240         switch (ls_type) {
241         case FPU_LS_SINGLE:
242                 cvt_df((double*)&vcpu->arch.fpr[rs], (float*)tmp, &t);
243                 val = *((u32*)tmp);
244                 len = sizeof(u32);
245                 break;
246         case FPU_LS_SINGLE_LOW:
247                 *((u32*)tmp) = vcpu->arch.fpr[rs];
248                 val = vcpu->arch.fpr[rs] & 0xffffffff;
249                 len = sizeof(u32);
250                 break;
251         case FPU_LS_DOUBLE:
252                 *((u64*)tmp) = vcpu->arch.fpr[rs];
253                 val = vcpu->arch.fpr[rs];
254                 len = sizeof(u64);
255                 break;
256         default:
257                 val = 0;
258                 len = 0;
259         }
260
261         r = kvmppc_st(vcpu, &addr, len, tmp, true);
262         vcpu->arch.paddr_accessed = addr;
263         if (r < 0) {
264                 kvmppc_inject_pf(vcpu, addr, true);
265         } else if (r == EMULATE_DO_MMIO) {
266                 emulated = kvmppc_handle_store(run, vcpu, val, len, 1);
267         } else {
268                 emulated = EMULATE_DONE;
269         }
270
271         dprintk(KERN_INFO "KVM: FPR_ST [0x%llx] at 0x%lx (%d)\n",
272                           val, addr, len);
273
274         return emulated;
275 }
276
277 static int kvmppc_emulate_psq_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
278                                    int rs, ulong addr, bool w, int i)
279 {
280         int emulated = EMULATE_FAIL;
281         struct thread_struct t;
282         int r;
283         float one = 1.0;
284         u32 tmp[2];
285
286         t.fpscr.val = vcpu->arch.fpscr;
287
288         /* read from memory */
289         if (w) {
290                 r = kvmppc_ld(vcpu, &addr, sizeof(u32), tmp, true);
291                 memcpy(&tmp[1], &one, sizeof(u32));
292         } else {
293                 r = kvmppc_ld(vcpu, &addr, sizeof(u32) * 2, tmp, true);
294         }
295         vcpu->arch.paddr_accessed = addr;
296         if (r < 0) {
297                 kvmppc_inject_pf(vcpu, addr, false);
298                 goto done_load;
299         } else if ((r == EMULATE_DO_MMIO) && w) {
300                 emulated = kvmppc_handle_load(run, vcpu, KVM_REG_FPR | rs, 4, 1);
301                 vcpu->arch.qpr[rs] = tmp[1];
302                 goto done_load;
303         } else if (r == EMULATE_DO_MMIO) {
304                 emulated = kvmppc_handle_load(run, vcpu, KVM_REG_FQPR | rs, 8, 1);
305                 goto done_load;
306         }
307
308         emulated = EMULATE_DONE;
309
310         /* put in registers */
311         cvt_fd((float*)&tmp[0], (double*)&vcpu->arch.fpr[rs], &t);
312         vcpu->arch.qpr[rs] = tmp[1];
313
314         dprintk(KERN_INFO "KVM: PSQ_LD [0x%x, 0x%x] at 0x%lx (%d)\n", tmp[0],
315                           tmp[1], addr, w ? 4 : 8);
316
317 done_load:
318         return emulated;
319 }
320
321 static int kvmppc_emulate_psq_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
322                                     int rs, ulong addr, bool w, int i)
323 {
324         int emulated = EMULATE_FAIL;
325         struct thread_struct t;
326         int r;
327         u32 tmp[2];
328         int len = w ? sizeof(u32) : sizeof(u64);
329
330         t.fpscr.val = vcpu->arch.fpscr;
331
332         cvt_df((double*)&vcpu->arch.fpr[rs], (float*)&tmp[0], &t);
333         tmp[1] = vcpu->arch.qpr[rs];
334
335         r = kvmppc_st(vcpu, &addr, len, tmp, true);
336         vcpu->arch.paddr_accessed = addr;
337         if (r < 0) {
338                 kvmppc_inject_pf(vcpu, addr, true);
339         } else if ((r == EMULATE_DO_MMIO) && w) {
340                 emulated = kvmppc_handle_store(run, vcpu, tmp[0], 4, 1);
341         } else if (r == EMULATE_DO_MMIO) {
342                 u64 val = ((u64)tmp[0] << 32) | tmp[1];
343                 emulated = kvmppc_handle_store(run, vcpu, val, 8, 1);
344         } else {
345                 emulated = EMULATE_DONE;
346         }
347
348         dprintk(KERN_INFO "KVM: PSQ_ST [0x%x, 0x%x] at 0x%lx (%d)\n",
349                           tmp[0], tmp[1], addr, len);
350
351         return emulated;
352 }
353
354 /*
355  * Cuts out inst bits with ordering according to spec.
356  * That means the leftmost bit is zero. All given bits are included.
357  */
358 static inline u32 inst_get_field(u32 inst, int msb, int lsb)
359 {
360         return kvmppc_get_field(inst, msb + 32, lsb + 32);
361 }
362
363 /*
364  * Replaces inst bits with ordering according to spec.
365  */
366 static inline u32 inst_set_field(u32 inst, int msb, int lsb, int value)
367 {
368         return kvmppc_set_field(inst, msb + 32, lsb + 32, value);
369 }
370
371 bool kvmppc_inst_is_paired_single(struct kvm_vcpu *vcpu, u32 inst)
372 {
373         if (!(vcpu->arch.hflags & BOOK3S_HFLAG_PAIRED_SINGLE))
374                 return false;
375
376         switch (get_op(inst)) {
377         case OP_PSQ_L:
378         case OP_PSQ_LU:
379         case OP_PSQ_ST:
380         case OP_PSQ_STU:
381         case OP_LFS:
382         case OP_LFSU:
383         case OP_LFD:
384         case OP_LFDU:
385         case OP_STFS:
386         case OP_STFSU:
387         case OP_STFD:
388         case OP_STFDU:
389                 return true;
390         case 4:
391                 /* X form */
392                 switch (inst_get_field(inst, 21, 30)) {
393                 case OP_4X_PS_CMPU0:
394                 case OP_4X_PSQ_LX:
395                 case OP_4X_PS_CMPO0:
396                 case OP_4X_PSQ_LUX:
397                 case OP_4X_PS_NEG:
398                 case OP_4X_PS_CMPU1:
399                 case OP_4X_PS_MR:
400                 case OP_4X_PS_CMPO1:
401                 case OP_4X_PS_NABS:
402                 case OP_4X_PS_ABS:
403                 case OP_4X_PS_MERGE00:
404                 case OP_4X_PS_MERGE01:
405                 case OP_4X_PS_MERGE10:
406                 case OP_4X_PS_MERGE11:
407                         return true;
408                 }
409                 /* XW form */
410                 switch (inst_get_field(inst, 25, 30)) {
411                 case OP_4XW_PSQ_STX:
412                 case OP_4XW_PSQ_STUX:
413                         return true;
414                 }
415                 /* A form */
416                 switch (inst_get_field(inst, 26, 30)) {
417                 case OP_4A_PS_SUM1:
418                 case OP_4A_PS_SUM0:
419                 case OP_4A_PS_MULS0:
420                 case OP_4A_PS_MULS1:
421                 case OP_4A_PS_MADDS0:
422                 case OP_4A_PS_MADDS1:
423                 case OP_4A_PS_DIV:
424                 case OP_4A_PS_SUB:
425                 case OP_4A_PS_ADD:
426                 case OP_4A_PS_SEL:
427                 case OP_4A_PS_RES:
428                 case OP_4A_PS_MUL:
429                 case OP_4A_PS_RSQRTE:
430                 case OP_4A_PS_MSUB:
431                 case OP_4A_PS_MADD:
432                 case OP_4A_PS_NMSUB:
433                 case OP_4A_PS_NMADD:
434                         return true;
435                 }
436                 break;
437         case 59:
438                 switch (inst_get_field(inst, 21, 30)) {
439                 case OP_59_FADDS:
440                 case OP_59_FSUBS:
441                 case OP_59_FDIVS:
442                 case OP_59_FRES:
443                 case OP_59_FRSQRTES:
444                         return true;
445                 }
446                 switch (inst_get_field(inst, 26, 30)) {
447                 case OP_59_FMULS:
448                 case OP_59_FMSUBS:
449                 case OP_59_FMADDS:
450                 case OP_59_FNMSUBS:
451                 case OP_59_FNMADDS:
452                         return true;
453                 }
454                 break;
455         case 63:
456                 switch (inst_get_field(inst, 21, 30)) {
457                 case OP_63_MTFSB0:
458                 case OP_63_MTFSB1:
459                 case OP_63_MTFSF:
460                 case OP_63_MTFSFI:
461                 case OP_63_MCRFS:
462                 case OP_63_MFFS:
463                 case OP_63_FCMPU:
464                 case OP_63_FCMPO:
465                 case OP_63_FNEG:
466                 case OP_63_FMR:
467                 case OP_63_FABS:
468                 case OP_63_FRSP:
469                 case OP_63_FDIV:
470                 case OP_63_FADD:
471                 case OP_63_FSUB:
472                 case OP_63_FCTIW:
473                 case OP_63_FCTIWZ:
474                 case OP_63_FRSQRTE:
475                 case OP_63_FCPSGN:
476                         return true;
477                 }
478                 switch (inst_get_field(inst, 26, 30)) {
479                 case OP_63_FMUL:
480                 case OP_63_FSEL:
481                 case OP_63_FMSUB:
482                 case OP_63_FMADD:
483                 case OP_63_FNMSUB:
484                 case OP_63_FNMADD:
485                         return true;
486                 }
487                 break;
488         case 31:
489                 switch (inst_get_field(inst, 21, 30)) {
490                 case OP_31_LFSX:
491                 case OP_31_LFSUX:
492                 case OP_31_LFDX:
493                 case OP_31_LFDUX:
494                 case OP_31_STFSX:
495                 case OP_31_STFSUX:
496                 case OP_31_STFX:
497                 case OP_31_STFUX:
498                 case OP_31_STFIWX:
499                         return true;
500                 }
501                 break;
502         }
503
504         return false;
505 }
506
507 static int get_d_signext(u32 inst)
508 {
509         int d = inst & 0x8ff;
510
511         if (d & 0x800)
512                 return -(d & 0x7ff);
513
514         return (d & 0x7ff);
515 }
516
517 static int kvmppc_ps_three_in(struct kvm_vcpu *vcpu, bool rc,
518                                       int reg_out, int reg_in1, int reg_in2,
519                                       int reg_in3, int scalar,
520                                       void (*func)(struct thread_struct *t,
521                                                  u32 *dst, u32 *src1,
522                                                  u32 *src2, u32 *src3))
523 {
524         u32 *qpr = vcpu->arch.qpr;
525         u64 *fpr = vcpu->arch.fpr;
526         u32 ps0_out;
527         u32 ps0_in1, ps0_in2, ps0_in3;
528         u32 ps1_in1, ps1_in2, ps1_in3;
529         struct thread_struct t;
530         t.fpscr.val = vcpu->arch.fpscr;
531
532         /* RC */
533         WARN_ON(rc);
534
535         /* PS0 */
536         cvt_df((double*)&fpr[reg_in1], (float*)&ps0_in1, &t);
537         cvt_df((double*)&fpr[reg_in2], (float*)&ps0_in2, &t);
538         cvt_df((double*)&fpr[reg_in3], (float*)&ps0_in3, &t);
539
540         if (scalar & SCALAR_LOW)
541                 ps0_in2 = qpr[reg_in2];
542
543         func(&t, &ps0_out, &ps0_in1, &ps0_in2, &ps0_in3);
544
545         dprintk(KERN_INFO "PS3 ps0 -> f(0x%x, 0x%x, 0x%x) = 0x%x\n",
546                           ps0_in1, ps0_in2, ps0_in3, ps0_out);
547
548         if (!(scalar & SCALAR_NO_PS0))
549                 cvt_fd((float*)&ps0_out, (double*)&fpr[reg_out], &t);
550
551         /* PS1 */
552         ps1_in1 = qpr[reg_in1];
553         ps1_in2 = qpr[reg_in2];
554         ps1_in3 = qpr[reg_in3];
555
556         if (scalar & SCALAR_HIGH)
557                 ps1_in2 = ps0_in2;
558
559         if (!(scalar & SCALAR_NO_PS1))
560                 func(&t, &qpr[reg_out], &ps1_in1, &ps1_in2, &ps1_in3);
561
562         dprintk(KERN_INFO "PS3 ps1 -> f(0x%x, 0x%x, 0x%x) = 0x%x\n",
563                           ps1_in1, ps1_in2, ps1_in3, qpr[reg_out]);
564
565         return EMULATE_DONE;
566 }
567
568 static int kvmppc_ps_two_in(struct kvm_vcpu *vcpu, bool rc,
569                                     int reg_out, int reg_in1, int reg_in2,
570                                     int scalar,
571                                     void (*func)(struct thread_struct *t,
572                                                  u32 *dst, u32 *src1,
573                                                  u32 *src2))
574 {
575         u32 *qpr = vcpu->arch.qpr;
576         u64 *fpr = vcpu->arch.fpr;
577         u32 ps0_out;
578         u32 ps0_in1, ps0_in2;
579         u32 ps1_out;
580         u32 ps1_in1, ps1_in2;
581         struct thread_struct t;
582         t.fpscr.val = vcpu->arch.fpscr;
583
584         /* RC */
585         WARN_ON(rc);
586
587         /* PS0 */
588         cvt_df((double*)&fpr[reg_in1], (float*)&ps0_in1, &t);
589
590         if (scalar & SCALAR_LOW)
591                 ps0_in2 = qpr[reg_in2];
592         else
593                 cvt_df((double*)&fpr[reg_in2], (float*)&ps0_in2, &t);
594
595         func(&t, &ps0_out, &ps0_in1, &ps0_in2);
596
597         if (!(scalar & SCALAR_NO_PS0)) {
598                 dprintk(KERN_INFO "PS2 ps0 -> f(0x%x, 0x%x) = 0x%x\n",
599                                   ps0_in1, ps0_in2, ps0_out);
600
601                 cvt_fd((float*)&ps0_out, (double*)&fpr[reg_out], &t);
602         }
603
604         /* PS1 */
605         ps1_in1 = qpr[reg_in1];
606         ps1_in2 = qpr[reg_in2];
607
608         if (scalar & SCALAR_HIGH)
609                 ps1_in2 = ps0_in2;
610
611         func(&t, &ps1_out, &ps1_in1, &ps1_in2);
612
613         if (!(scalar & SCALAR_NO_PS1)) {
614                 qpr[reg_out] = ps1_out;
615
616                 dprintk(KERN_INFO "PS2 ps1 -> f(0x%x, 0x%x) = 0x%x\n",
617                                   ps1_in1, ps1_in2, qpr[reg_out]);
618         }
619
620         return EMULATE_DONE;
621 }
622
623 static int kvmppc_ps_one_in(struct kvm_vcpu *vcpu, bool rc,
624                                     int reg_out, int reg_in,
625                                     void (*func)(struct thread_struct *t,
626                                                  u32 *dst, u32 *src1))
627 {
628         u32 *qpr = vcpu->arch.qpr;
629         u64 *fpr = vcpu->arch.fpr;
630         u32 ps0_out, ps0_in;
631         u32 ps1_in;
632         struct thread_struct t;
633         t.fpscr.val = vcpu->arch.fpscr;
634
635         /* RC */
636         WARN_ON(rc);
637
638         /* PS0 */
639         cvt_df((double*)&fpr[reg_in], (float*)&ps0_in, &t);
640         func(&t, &ps0_out, &ps0_in);
641
642         dprintk(KERN_INFO "PS1 ps0 -> f(0x%x) = 0x%x\n",
643                           ps0_in, ps0_out);
644
645         cvt_fd((float*)&ps0_out, (double*)&fpr[reg_out], &t);
646
647         /* PS1 */
648         ps1_in = qpr[reg_in];
649         func(&t, &qpr[reg_out], &ps1_in);
650
651         dprintk(KERN_INFO "PS1 ps1 -> f(0x%x) = 0x%x\n",
652                           ps1_in, qpr[reg_out]);
653
654         return EMULATE_DONE;
655 }
656
657 int kvmppc_emulate_paired_single(struct kvm_run *run, struct kvm_vcpu *vcpu)
658 {
659         u32 inst = kvmppc_get_last_inst(vcpu);
660         enum emulation_result emulated = EMULATE_DONE;
661
662         int ax_rd = inst_get_field(inst, 6, 10);
663         int ax_ra = inst_get_field(inst, 11, 15);
664         int ax_rb = inst_get_field(inst, 16, 20);
665         int ax_rc = inst_get_field(inst, 21, 25);
666         short full_d = inst_get_field(inst, 16, 31);
667
668         u64 *fpr_d = &vcpu->arch.fpr[ax_rd];
669         u64 *fpr_a = &vcpu->arch.fpr[ax_ra];
670         u64 *fpr_b = &vcpu->arch.fpr[ax_rb];
671         u64 *fpr_c = &vcpu->arch.fpr[ax_rc];
672
673         bool rcomp = (inst & 1) ? true : false;
674         u32 cr = kvmppc_get_cr(vcpu);
675         struct thread_struct t;
676 #ifdef DEBUG
677         int i;
678 #endif
679
680         t.fpscr.val = vcpu->arch.fpscr;
681
682         if (!kvmppc_inst_is_paired_single(vcpu, inst))
683                 return EMULATE_FAIL;
684
685         if (!(vcpu->arch.msr & MSR_FP)) {
686                 kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_FP_UNAVAIL);
687                 return EMULATE_AGAIN;
688         }
689
690         kvmppc_giveup_ext(vcpu, MSR_FP);
691         preempt_disable();
692         enable_kernel_fp();
693         /* Do we need to clear FE0 / FE1 here? Don't think so. */
694
695 #ifdef DEBUG
696         for (i = 0; i < ARRAY_SIZE(vcpu->arch.fpr); i++) {
697                 u32 f;
698                 cvt_df((double*)&vcpu->arch.fpr[i], (float*)&f, &t);
699                 dprintk(KERN_INFO "FPR[%d] = 0x%x / 0x%llx    QPR[%d] = 0x%x\n",
700                         i, f, vcpu->arch.fpr[i], i, vcpu->arch.qpr[i]);
701         }
702 #endif
703
704         switch (get_op(inst)) {
705         case OP_PSQ_L:
706         {
707                 ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
708                 bool w = inst_get_field(inst, 16, 16) ? true : false;
709                 int i = inst_get_field(inst, 17, 19);
710
711                 addr += get_d_signext(inst);
712                 emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
713                 break;
714         }
715         case OP_PSQ_LU:
716         {
717                 ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
718                 bool w = inst_get_field(inst, 16, 16) ? true : false;
719                 int i = inst_get_field(inst, 17, 19);
720
721                 addr += get_d_signext(inst);
722                 emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
723
724                 if (emulated == EMULATE_DONE)
725                         kvmppc_set_gpr(vcpu, ax_ra, addr);
726                 break;
727         }
728         case OP_PSQ_ST:
729         {
730                 ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
731                 bool w = inst_get_field(inst, 16, 16) ? true : false;
732                 int i = inst_get_field(inst, 17, 19);
733
734                 addr += get_d_signext(inst);
735                 emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
736                 break;
737         }
738         case OP_PSQ_STU:
739         {
740                 ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
741                 bool w = inst_get_field(inst, 16, 16) ? true : false;
742                 int i = inst_get_field(inst, 17, 19);
743
744                 addr += get_d_signext(inst);
745                 emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
746
747                 if (emulated == EMULATE_DONE)
748                         kvmppc_set_gpr(vcpu, ax_ra, addr);
749                 break;
750         }
751         case 4:
752                 /* X form */
753                 switch (inst_get_field(inst, 21, 30)) {
754                 case OP_4X_PS_CMPU0:
755                         /* XXX */
756                         emulated = EMULATE_FAIL;
757                         break;
758                 case OP_4X_PSQ_LX:
759                 {
760                         ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
761                         bool w = inst_get_field(inst, 21, 21) ? true : false;
762                         int i = inst_get_field(inst, 22, 24);
763
764                         addr += kvmppc_get_gpr(vcpu, ax_rb);
765                         emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
766                         break;
767                 }
768                 case OP_4X_PS_CMPO0:
769                         /* XXX */
770                         emulated = EMULATE_FAIL;
771                         break;
772                 case OP_4X_PSQ_LUX:
773                 {
774                         ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
775                         bool w = inst_get_field(inst, 21, 21) ? true : false;
776                         int i = inst_get_field(inst, 22, 24);
777
778                         addr += kvmppc_get_gpr(vcpu, ax_rb);
779                         emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
780
781                         if (emulated == EMULATE_DONE)
782                                 kvmppc_set_gpr(vcpu, ax_ra, addr);
783                         break;
784                 }
785                 case OP_4X_PS_NEG:
786                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_rb];
787                         vcpu->arch.fpr[ax_rd] ^= 0x8000000000000000ULL;
788                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
789                         vcpu->arch.qpr[ax_rd] ^= 0x80000000;
790                         break;
791                 case OP_4X_PS_CMPU1:
792                         /* XXX */
793                         emulated = EMULATE_FAIL;
794                         break;
795                 case OP_4X_PS_MR:
796                         WARN_ON(rcomp);
797                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_rb];
798                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
799                         break;
800                 case OP_4X_PS_CMPO1:
801                         /* XXX */
802                         emulated = EMULATE_FAIL;
803                         break;
804                 case OP_4X_PS_NABS:
805                         WARN_ON(rcomp);
806                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_rb];
807                         vcpu->arch.fpr[ax_rd] |= 0x8000000000000000ULL;
808                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
809                         vcpu->arch.qpr[ax_rd] |= 0x80000000;
810                         break;
811                 case OP_4X_PS_ABS:
812                         WARN_ON(rcomp);
813                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_rb];
814                         vcpu->arch.fpr[ax_rd] &= ~0x8000000000000000ULL;
815                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
816                         vcpu->arch.qpr[ax_rd] &= ~0x80000000;
817                         break;
818                 case OP_4X_PS_MERGE00:
819                         WARN_ON(rcomp);
820                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_ra];
821                         /* vcpu->arch.qpr[ax_rd] = vcpu->arch.fpr[ax_rb]; */
822                         cvt_df((double*)&vcpu->arch.fpr[ax_rb],
823                                (float*)&vcpu->arch.qpr[ax_rd], &t);
824                         break;
825                 case OP_4X_PS_MERGE01:
826                         WARN_ON(rcomp);
827                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_ra];
828                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
829                         break;
830                 case OP_4X_PS_MERGE10:
831                         WARN_ON(rcomp);
832                         /* vcpu->arch.fpr[ax_rd] = vcpu->arch.qpr[ax_ra]; */
833                         cvt_fd((float*)&vcpu->arch.qpr[ax_ra],
834                                (double*)&vcpu->arch.fpr[ax_rd], &t);
835                         /* vcpu->arch.qpr[ax_rd] = vcpu->arch.fpr[ax_rb]; */
836                         cvt_df((double*)&vcpu->arch.fpr[ax_rb],
837                                (float*)&vcpu->arch.qpr[ax_rd], &t);
838                         break;
839                 case OP_4X_PS_MERGE11:
840                         WARN_ON(rcomp);
841                         /* vcpu->arch.fpr[ax_rd] = vcpu->arch.qpr[ax_ra]; */
842                         cvt_fd((float*)&vcpu->arch.qpr[ax_ra],
843                                (double*)&vcpu->arch.fpr[ax_rd], &t);
844                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
845                         break;
846                 }
847                 /* XW form */
848                 switch (inst_get_field(inst, 25, 30)) {
849                 case OP_4XW_PSQ_STX:
850                 {
851                         ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
852                         bool w = inst_get_field(inst, 21, 21) ? true : false;
853                         int i = inst_get_field(inst, 22, 24);
854
855                         addr += kvmppc_get_gpr(vcpu, ax_rb);
856                         emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
857                         break;
858                 }
859                 case OP_4XW_PSQ_STUX:
860                 {
861                         ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
862                         bool w = inst_get_field(inst, 21, 21) ? true : false;
863                         int i = inst_get_field(inst, 22, 24);
864
865                         addr += kvmppc_get_gpr(vcpu, ax_rb);
866                         emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
867
868                         if (emulated == EMULATE_DONE)
869                                 kvmppc_set_gpr(vcpu, ax_ra, addr);
870                         break;
871                 }
872                 }
873                 /* A form */
874                 switch (inst_get_field(inst, 26, 30)) {
875                 case OP_4A_PS_SUM1:
876                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
877                                         ax_rb, ax_ra, SCALAR_NO_PS0 | SCALAR_HIGH, fps_fadds);
878                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_rc];
879                         break;
880                 case OP_4A_PS_SUM0:
881                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
882                                         ax_ra, ax_rb, SCALAR_NO_PS1 | SCALAR_LOW, fps_fadds);
883                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rc];
884                         break;
885                 case OP_4A_PS_MULS0:
886                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
887                                         ax_ra, ax_rc, SCALAR_HIGH, fps_fmuls);
888                         break;
889                 case OP_4A_PS_MULS1:
890                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
891                                         ax_ra, ax_rc, SCALAR_LOW, fps_fmuls);
892                         break;
893                 case OP_4A_PS_MADDS0:
894                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
895                                         ax_ra, ax_rc, ax_rb, SCALAR_HIGH, fps_fmadds);
896                         break;
897                 case OP_4A_PS_MADDS1:
898                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
899                                         ax_ra, ax_rc, ax_rb, SCALAR_LOW, fps_fmadds);
900                         break;
901                 case OP_4A_PS_DIV:
902                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
903                                         ax_ra, ax_rb, SCALAR_NONE, fps_fdivs);
904                         break;
905                 case OP_4A_PS_SUB:
906                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
907                                         ax_ra, ax_rb, SCALAR_NONE, fps_fsubs);
908                         break;
909                 case OP_4A_PS_ADD:
910                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
911                                         ax_ra, ax_rb, SCALAR_NONE, fps_fadds);
912                         break;
913                 case OP_4A_PS_SEL:
914                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
915                                         ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fsel);
916                         break;
917                 case OP_4A_PS_RES:
918                         emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd,
919                                         ax_rb, fps_fres);
920                         break;
921                 case OP_4A_PS_MUL:
922                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
923                                         ax_ra, ax_rc, SCALAR_NONE, fps_fmuls);
924                         break;
925                 case OP_4A_PS_RSQRTE:
926                         emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd,
927                                         ax_rb, fps_frsqrte);
928                         break;
929                 case OP_4A_PS_MSUB:
930                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
931                                         ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fmsubs);
932                         break;
933                 case OP_4A_PS_MADD:
934                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
935                                         ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fmadds);
936                         break;
937                 case OP_4A_PS_NMSUB:
938                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
939                                         ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fnmsubs);
940                         break;
941                 case OP_4A_PS_NMADD:
942                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
943                                         ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fnmadds);
944                         break;
945                 }
946                 break;
947
948         /* Real FPU operations */
949
950         case OP_LFS:
951         {
952                 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
953
954                 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
955                                                    FPU_LS_SINGLE);
956                 break;
957         }
958         case OP_LFSU:
959         {
960                 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
961
962                 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
963                                                    FPU_LS_SINGLE);
964
965                 if (emulated == EMULATE_DONE)
966                         kvmppc_set_gpr(vcpu, ax_ra, addr);
967                 break;
968         }
969         case OP_LFD:
970         {
971                 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
972
973                 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
974                                                    FPU_LS_DOUBLE);
975                 break;
976         }
977         case OP_LFDU:
978         {
979                 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
980
981                 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
982                                                    FPU_LS_DOUBLE);
983
984                 if (emulated == EMULATE_DONE)
985                         kvmppc_set_gpr(vcpu, ax_ra, addr);
986                 break;
987         }
988         case OP_STFS:
989         {
990                 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
991
992                 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
993                                                     FPU_LS_SINGLE);
994                 break;
995         }
996         case OP_STFSU:
997         {
998                 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
999
1000                 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
1001                                                     FPU_LS_SINGLE);
1002
1003                 if (emulated == EMULATE_DONE)
1004                         kvmppc_set_gpr(vcpu, ax_ra, addr);
1005                 break;
1006         }
1007         case OP_STFD:
1008         {
1009                 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
1010
1011                 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
1012                                                     FPU_LS_DOUBLE);
1013                 break;
1014         }
1015         case OP_STFDU:
1016         {
1017                 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
1018
1019                 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
1020                                                     FPU_LS_DOUBLE);
1021
1022                 if (emulated == EMULATE_DONE)
1023                         kvmppc_set_gpr(vcpu, ax_ra, addr);
1024                 break;
1025         }
1026         case 31:
1027                 switch (inst_get_field(inst, 21, 30)) {
1028                 case OP_31_LFSX:
1029                 {
1030                         ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
1031
1032                         addr += kvmppc_get_gpr(vcpu, ax_rb);
1033                         emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1034                                                            addr, FPU_LS_SINGLE);
1035                         break;
1036                 }
1037                 case OP_31_LFSUX:
1038                 {
1039                         ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1040                                      kvmppc_get_gpr(vcpu, ax_rb);
1041
1042                         emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1043                                                            addr, FPU_LS_SINGLE);
1044
1045                         if (emulated == EMULATE_DONE)
1046                                 kvmppc_set_gpr(vcpu, ax_ra, addr);
1047                         break;
1048                 }
1049                 case OP_31_LFDX:
1050                 {
1051                         ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1052                                      kvmppc_get_gpr(vcpu, ax_rb);
1053
1054                         emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1055                                                            addr, FPU_LS_DOUBLE);
1056                         break;
1057                 }
1058                 case OP_31_LFDUX:
1059                 {
1060                         ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1061                                      kvmppc_get_gpr(vcpu, ax_rb);
1062
1063                         emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1064                                                            addr, FPU_LS_DOUBLE);
1065
1066                         if (emulated == EMULATE_DONE)
1067                                 kvmppc_set_gpr(vcpu, ax_ra, addr);
1068                         break;
1069                 }
1070                 case OP_31_STFSX:
1071                 {
1072                         ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1073                                      kvmppc_get_gpr(vcpu, ax_rb);
1074
1075                         emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1076                                                             addr, FPU_LS_SINGLE);
1077                         break;
1078                 }
1079                 case OP_31_STFSUX:
1080                 {
1081                         ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1082                                      kvmppc_get_gpr(vcpu, ax_rb);
1083
1084                         emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1085                                                             addr, FPU_LS_SINGLE);
1086
1087                         if (emulated == EMULATE_DONE)
1088                                 kvmppc_set_gpr(vcpu, ax_ra, addr);
1089                         break;
1090                 }
1091                 case OP_31_STFX:
1092                 {
1093                         ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1094                                      kvmppc_get_gpr(vcpu, ax_rb);
1095
1096                         emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1097                                                             addr, FPU_LS_DOUBLE);
1098                         break;
1099                 }
1100                 case OP_31_STFUX:
1101                 {
1102                         ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1103                                      kvmppc_get_gpr(vcpu, ax_rb);
1104
1105                         emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1106                                                             addr, FPU_LS_DOUBLE);
1107
1108                         if (emulated == EMULATE_DONE)
1109                                 kvmppc_set_gpr(vcpu, ax_ra, addr);
1110                         break;
1111                 }
1112                 case OP_31_STFIWX:
1113                 {
1114                         ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1115                                      kvmppc_get_gpr(vcpu, ax_rb);
1116
1117                         emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1118                                                             addr,
1119                                                             FPU_LS_SINGLE_LOW);
1120                         break;
1121                 }
1122                         break;
1123                 }
1124                 break;
1125         case 59:
1126                 switch (inst_get_field(inst, 21, 30)) {
1127                 case OP_59_FADDS:
1128                         fpd_fadds(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1129                         kvmppc_sync_qpr(vcpu, ax_rd);
1130                         break;
1131                 case OP_59_FSUBS:
1132                         fpd_fsubs(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1133                         kvmppc_sync_qpr(vcpu, ax_rd);
1134                         break;
1135                 case OP_59_FDIVS:
1136                         fpd_fdivs(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1137                         kvmppc_sync_qpr(vcpu, ax_rd);
1138                         break;
1139                 case OP_59_FRES:
1140                         fpd_fres(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1141                         kvmppc_sync_qpr(vcpu, ax_rd);
1142                         break;
1143                 case OP_59_FRSQRTES:
1144                         fpd_frsqrtes(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1145                         kvmppc_sync_qpr(vcpu, ax_rd);
1146                         break;
1147                 }
1148                 switch (inst_get_field(inst, 26, 30)) {
1149                 case OP_59_FMULS:
1150                         fpd_fmuls(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c);
1151                         kvmppc_sync_qpr(vcpu, ax_rd);
1152                         break;
1153                 case OP_59_FMSUBS:
1154                         fpd_fmsubs(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1155                         kvmppc_sync_qpr(vcpu, ax_rd);
1156                         break;
1157                 case OP_59_FMADDS:
1158                         fpd_fmadds(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1159                         kvmppc_sync_qpr(vcpu, ax_rd);
1160                         break;
1161                 case OP_59_FNMSUBS:
1162                         fpd_fnmsubs(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1163                         kvmppc_sync_qpr(vcpu, ax_rd);
1164                         break;
1165                 case OP_59_FNMADDS:
1166                         fpd_fnmadds(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1167                         kvmppc_sync_qpr(vcpu, ax_rd);
1168                         break;
1169                 }
1170                 break;
1171         case 63:
1172                 switch (inst_get_field(inst, 21, 30)) {
1173                 case OP_63_MTFSB0:
1174                 case OP_63_MTFSB1:
1175                 case OP_63_MCRFS:
1176                 case OP_63_MTFSFI:
1177                         /* XXX need to implement */
1178                         break;
1179                 case OP_63_MFFS:
1180                         /* XXX missing CR */
1181                         *fpr_d = vcpu->arch.fpscr;
1182                         break;
1183                 case OP_63_MTFSF:
1184                         /* XXX missing fm bits */
1185                         /* XXX missing CR */
1186                         vcpu->arch.fpscr = *fpr_b;
1187                         break;
1188                 case OP_63_FCMPU:
1189                 {
1190                         u32 tmp_cr;
1191                         u32 cr0_mask = 0xf0000000;
1192                         u32 cr_shift = inst_get_field(inst, 6, 8) * 4;
1193
1194                         fpd_fcmpu(&vcpu->arch.fpscr, &tmp_cr, fpr_a, fpr_b);
1195                         cr &= ~(cr0_mask >> cr_shift);
1196                         cr |= (cr & cr0_mask) >> cr_shift;
1197                         break;
1198                 }
1199                 case OP_63_FCMPO:
1200                 {
1201                         u32 tmp_cr;
1202                         u32 cr0_mask = 0xf0000000;
1203                         u32 cr_shift = inst_get_field(inst, 6, 8) * 4;
1204
1205                         fpd_fcmpo(&vcpu->arch.fpscr, &tmp_cr, fpr_a, fpr_b);
1206                         cr &= ~(cr0_mask >> cr_shift);
1207                         cr |= (cr & cr0_mask) >> cr_shift;
1208                         break;
1209                 }
1210                 case OP_63_FNEG:
1211                         fpd_fneg(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1212                         break;
1213                 case OP_63_FMR:
1214                         *fpr_d = *fpr_b;
1215                         break;
1216                 case OP_63_FABS:
1217                         fpd_fabs(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1218                         break;
1219                 case OP_63_FCPSGN:
1220                         fpd_fcpsgn(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1221                         break;
1222                 case OP_63_FDIV:
1223                         fpd_fdiv(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1224                         break;
1225                 case OP_63_FADD:
1226                         fpd_fadd(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1227                         break;
1228                 case OP_63_FSUB:
1229                         fpd_fsub(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1230                         break;
1231                 case OP_63_FCTIW:
1232                         fpd_fctiw(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1233                         break;
1234                 case OP_63_FCTIWZ:
1235                         fpd_fctiwz(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1236                         break;
1237                 case OP_63_FRSP:
1238                         fpd_frsp(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1239                         kvmppc_sync_qpr(vcpu, ax_rd);
1240                         break;
1241                 case OP_63_FRSQRTE:
1242                 {
1243                         double one = 1.0f;
1244
1245                         /* fD = sqrt(fB) */
1246                         fpd_fsqrt(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1247                         /* fD = 1.0f / fD */
1248                         fpd_fdiv(&vcpu->arch.fpscr, &cr, fpr_d, (u64*)&one, fpr_d);
1249                         break;
1250                 }
1251                 }
1252                 switch (inst_get_field(inst, 26, 30)) {
1253                 case OP_63_FMUL:
1254                         fpd_fmul(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c);
1255                         break;
1256                 case OP_63_FSEL:
1257                         fpd_fsel(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1258                         break;
1259                 case OP_63_FMSUB:
1260                         fpd_fmsub(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1261                         break;
1262                 case OP_63_FMADD:
1263                         fpd_fmadd(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1264                         break;
1265                 case OP_63_FNMSUB:
1266                         fpd_fnmsub(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1267                         break;
1268                 case OP_63_FNMADD:
1269                         fpd_fnmadd(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1270                         break;
1271                 }
1272                 break;
1273         }
1274
1275 #ifdef DEBUG
1276         for (i = 0; i < ARRAY_SIZE(vcpu->arch.fpr); i++) {
1277                 u32 f;
1278                 cvt_df((double*)&vcpu->arch.fpr[i], (float*)&f, &t);
1279                 dprintk(KERN_INFO "FPR[%d] = 0x%x\n", i, f);
1280         }
1281 #endif
1282
1283         if (rcomp)
1284                 kvmppc_set_cr(vcpu, cr);
1285
1286         preempt_enable();
1287
1288         return emulated;
1289 }