Merge branch 'agp-next' of git://git.kernel.org/pub/scm/linux/kernel/git/airlied...
[pandora-kernel.git] / arch / x86 / crypto / aesni-intel_glue.c
1 /*
2  * Support for Intel AES-NI instructions. This file contains glue
3  * code, the real AES implementation is in intel-aes_asm.S.
4  *
5  * Copyright (C) 2008, Intel Corp.
6  *    Author: Huang Ying <ying.huang@intel.com>
7  *
8  * This program is free software; you can redistribute it and/or modify
9  * it under the terms of the GNU General Public License as published by
10  * the Free Software Foundation; either version 2 of the License, or
11  * (at your option) any later version.
12  */
13
14 #include <linux/hardirq.h>
15 #include <linux/types.h>
16 #include <linux/crypto.h>
17 #include <linux/err.h>
18 #include <crypto/algapi.h>
19 #include <crypto/aes.h>
20 #include <crypto/cryptd.h>
21 #include <asm/i387.h>
22 #include <asm/aes.h>
23
24 #if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)
25 #define HAS_CTR
26 #endif
27
28 #if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)
29 #define HAS_LRW
30 #endif
31
32 #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
33 #define HAS_PCBC
34 #endif
35
36 #if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)
37 #define HAS_XTS
38 #endif
39
40 struct async_aes_ctx {
41         struct cryptd_ablkcipher *cryptd_tfm;
42 };
43
44 #define AESNI_ALIGN     16
45 #define AES_BLOCK_MASK  (~(AES_BLOCK_SIZE-1))
46
47 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
48                              unsigned int key_len);
49 asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
50                           const u8 *in);
51 asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
52                           const u8 *in);
53 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
54                               const u8 *in, unsigned int len);
55 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
56                               const u8 *in, unsigned int len);
57 asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
58                               const u8 *in, unsigned int len, u8 *iv);
59 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
60                               const u8 *in, unsigned int len, u8 *iv);
61
62 static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
63 {
64         unsigned long addr = (unsigned long)raw_ctx;
65         unsigned long align = AESNI_ALIGN;
66
67         if (align <= crypto_tfm_ctx_alignment())
68                 align = 1;
69         return (struct crypto_aes_ctx *)ALIGN(addr, align);
70 }
71
72 static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
73                               const u8 *in_key, unsigned int key_len)
74 {
75         struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
76         u32 *flags = &tfm->crt_flags;
77         int err;
78
79         if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
80             key_len != AES_KEYSIZE_256) {
81                 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
82                 return -EINVAL;
83         }
84
85         if (irq_fpu_usable())
86                 err = crypto_aes_expand_key(ctx, in_key, key_len);
87         else {
88                 kernel_fpu_begin();
89                 err = aesni_set_key(ctx, in_key, key_len);
90                 kernel_fpu_end();
91         }
92
93         return err;
94 }
95
96 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
97                        unsigned int key_len)
98 {
99         return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
100 }
101
102 static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
103 {
104         struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
105
106         if (irq_fpu_usable())
107                 crypto_aes_encrypt_x86(ctx, dst, src);
108         else {
109                 kernel_fpu_begin();
110                 aesni_enc(ctx, dst, src);
111                 kernel_fpu_end();
112         }
113 }
114
115 static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
116 {
117         struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
118
119         if (irq_fpu_usable())
120                 crypto_aes_decrypt_x86(ctx, dst, src);
121         else {
122                 kernel_fpu_begin();
123                 aesni_dec(ctx, dst, src);
124                 kernel_fpu_end();
125         }
126 }
127
128 static struct crypto_alg aesni_alg = {
129         .cra_name               = "aes",
130         .cra_driver_name        = "aes-aesni",
131         .cra_priority           = 300,
132         .cra_flags              = CRYPTO_ALG_TYPE_CIPHER,
133         .cra_blocksize          = AES_BLOCK_SIZE,
134         .cra_ctxsize            = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
135         .cra_alignmask          = 0,
136         .cra_module             = THIS_MODULE,
137         .cra_list               = LIST_HEAD_INIT(aesni_alg.cra_list),
138         .cra_u  = {
139                 .cipher = {
140                         .cia_min_keysize        = AES_MIN_KEY_SIZE,
141                         .cia_max_keysize        = AES_MAX_KEY_SIZE,
142                         .cia_setkey             = aes_set_key,
143                         .cia_encrypt            = aes_encrypt,
144                         .cia_decrypt            = aes_decrypt
145                 }
146         }
147 };
148
149 static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
150 {
151         struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
152
153         aesni_enc(ctx, dst, src);
154 }
155
156 static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
157 {
158         struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
159
160         aesni_dec(ctx, dst, src);
161 }
162
163 static struct crypto_alg __aesni_alg = {
164         .cra_name               = "__aes-aesni",
165         .cra_driver_name        = "__driver-aes-aesni",
166         .cra_priority           = 0,
167         .cra_flags              = CRYPTO_ALG_TYPE_CIPHER,
168         .cra_blocksize          = AES_BLOCK_SIZE,
169         .cra_ctxsize            = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
170         .cra_alignmask          = 0,
171         .cra_module             = THIS_MODULE,
172         .cra_list               = LIST_HEAD_INIT(__aesni_alg.cra_list),
173         .cra_u  = {
174                 .cipher = {
175                         .cia_min_keysize        = AES_MIN_KEY_SIZE,
176                         .cia_max_keysize        = AES_MAX_KEY_SIZE,
177                         .cia_setkey             = aes_set_key,
178                         .cia_encrypt            = __aes_encrypt,
179                         .cia_decrypt            = __aes_decrypt
180                 }
181         }
182 };
183
184 static int ecb_encrypt(struct blkcipher_desc *desc,
185                        struct scatterlist *dst, struct scatterlist *src,
186                        unsigned int nbytes)
187 {
188         struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
189         struct blkcipher_walk walk;
190         int err;
191
192         blkcipher_walk_init(&walk, dst, src, nbytes);
193         err = blkcipher_walk_virt(desc, &walk);
194         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
195
196         kernel_fpu_begin();
197         while ((nbytes = walk.nbytes)) {
198                 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
199                               nbytes & AES_BLOCK_MASK);
200                 nbytes &= AES_BLOCK_SIZE - 1;
201                 err = blkcipher_walk_done(desc, &walk, nbytes);
202         }
203         kernel_fpu_end();
204
205         return err;
206 }
207
208 static int ecb_decrypt(struct blkcipher_desc *desc,
209                        struct scatterlist *dst, struct scatterlist *src,
210                        unsigned int nbytes)
211 {
212         struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
213         struct blkcipher_walk walk;
214         int err;
215
216         blkcipher_walk_init(&walk, dst, src, nbytes);
217         err = blkcipher_walk_virt(desc, &walk);
218         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
219
220         kernel_fpu_begin();
221         while ((nbytes = walk.nbytes)) {
222                 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
223                               nbytes & AES_BLOCK_MASK);
224                 nbytes &= AES_BLOCK_SIZE - 1;
225                 err = blkcipher_walk_done(desc, &walk, nbytes);
226         }
227         kernel_fpu_end();
228
229         return err;
230 }
231
232 static struct crypto_alg blk_ecb_alg = {
233         .cra_name               = "__ecb-aes-aesni",
234         .cra_driver_name        = "__driver-ecb-aes-aesni",
235         .cra_priority           = 0,
236         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
237         .cra_blocksize          = AES_BLOCK_SIZE,
238         .cra_ctxsize            = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
239         .cra_alignmask          = 0,
240         .cra_type               = &crypto_blkcipher_type,
241         .cra_module             = THIS_MODULE,
242         .cra_list               = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
243         .cra_u = {
244                 .blkcipher = {
245                         .min_keysize    = AES_MIN_KEY_SIZE,
246                         .max_keysize    = AES_MAX_KEY_SIZE,
247                         .setkey         = aes_set_key,
248                         .encrypt        = ecb_encrypt,
249                         .decrypt        = ecb_decrypt,
250                 },
251         },
252 };
253
254 static int cbc_encrypt(struct blkcipher_desc *desc,
255                        struct scatterlist *dst, struct scatterlist *src,
256                        unsigned int nbytes)
257 {
258         struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
259         struct blkcipher_walk walk;
260         int err;
261
262         blkcipher_walk_init(&walk, dst, src, nbytes);
263         err = blkcipher_walk_virt(desc, &walk);
264         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
265
266         kernel_fpu_begin();
267         while ((nbytes = walk.nbytes)) {
268                 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
269                               nbytes & AES_BLOCK_MASK, walk.iv);
270                 nbytes &= AES_BLOCK_SIZE - 1;
271                 err = blkcipher_walk_done(desc, &walk, nbytes);
272         }
273         kernel_fpu_end();
274
275         return err;
276 }
277
278 static int cbc_decrypt(struct blkcipher_desc *desc,
279                        struct scatterlist *dst, struct scatterlist *src,
280                        unsigned int nbytes)
281 {
282         struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
283         struct blkcipher_walk walk;
284         int err;
285
286         blkcipher_walk_init(&walk, dst, src, nbytes);
287         err = blkcipher_walk_virt(desc, &walk);
288         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
289
290         kernel_fpu_begin();
291         while ((nbytes = walk.nbytes)) {
292                 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
293                               nbytes & AES_BLOCK_MASK, walk.iv);
294                 nbytes &= AES_BLOCK_SIZE - 1;
295                 err = blkcipher_walk_done(desc, &walk, nbytes);
296         }
297         kernel_fpu_end();
298
299         return err;
300 }
301
302 static struct crypto_alg blk_cbc_alg = {
303         .cra_name               = "__cbc-aes-aesni",
304         .cra_driver_name        = "__driver-cbc-aes-aesni",
305         .cra_priority           = 0,
306         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
307         .cra_blocksize          = AES_BLOCK_SIZE,
308         .cra_ctxsize            = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
309         .cra_alignmask          = 0,
310         .cra_type               = &crypto_blkcipher_type,
311         .cra_module             = THIS_MODULE,
312         .cra_list               = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
313         .cra_u = {
314                 .blkcipher = {
315                         .min_keysize    = AES_MIN_KEY_SIZE,
316                         .max_keysize    = AES_MAX_KEY_SIZE,
317                         .setkey         = aes_set_key,
318                         .encrypt        = cbc_encrypt,
319                         .decrypt        = cbc_decrypt,
320                 },
321         },
322 };
323
324 static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
325                         unsigned int key_len)
326 {
327         struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
328         struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base;
329         int err;
330
331         crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
332         crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm)
333                                     & CRYPTO_TFM_REQ_MASK);
334         err = crypto_ablkcipher_setkey(child, key, key_len);
335         crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child)
336                                     & CRYPTO_TFM_RES_MASK);
337         return err;
338 }
339
340 static int ablk_encrypt(struct ablkcipher_request *req)
341 {
342         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
343         struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
344
345         if (irq_fpu_usable()) {
346                 struct ablkcipher_request *cryptd_req =
347                         ablkcipher_request_ctx(req);
348                 memcpy(cryptd_req, req, sizeof(*req));
349                 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
350                 return crypto_ablkcipher_encrypt(cryptd_req);
351         } else {
352                 struct blkcipher_desc desc;
353                 desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
354                 desc.info = req->info;
355                 desc.flags = 0;
356                 return crypto_blkcipher_crt(desc.tfm)->encrypt(
357                         &desc, req->dst, req->src, req->nbytes);
358         }
359 }
360
361 static int ablk_decrypt(struct ablkcipher_request *req)
362 {
363         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
364         struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
365
366         if (irq_fpu_usable()) {
367                 struct ablkcipher_request *cryptd_req =
368                         ablkcipher_request_ctx(req);
369                 memcpy(cryptd_req, req, sizeof(*req));
370                 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
371                 return crypto_ablkcipher_decrypt(cryptd_req);
372         } else {
373                 struct blkcipher_desc desc;
374                 desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
375                 desc.info = req->info;
376                 desc.flags = 0;
377                 return crypto_blkcipher_crt(desc.tfm)->decrypt(
378                         &desc, req->dst, req->src, req->nbytes);
379         }
380 }
381
382 static void ablk_exit(struct crypto_tfm *tfm)
383 {
384         struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
385
386         cryptd_free_ablkcipher(ctx->cryptd_tfm);
387 }
388
389 static void ablk_init_common(struct crypto_tfm *tfm,
390                              struct cryptd_ablkcipher *cryptd_tfm)
391 {
392         struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
393
394         ctx->cryptd_tfm = cryptd_tfm;
395         tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +
396                 crypto_ablkcipher_reqsize(&cryptd_tfm->base);
397 }
398
399 static int ablk_ecb_init(struct crypto_tfm *tfm)
400 {
401         struct cryptd_ablkcipher *cryptd_tfm;
402
403         cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
404         if (IS_ERR(cryptd_tfm))
405                 return PTR_ERR(cryptd_tfm);
406         ablk_init_common(tfm, cryptd_tfm);
407         return 0;
408 }
409
410 static struct crypto_alg ablk_ecb_alg = {
411         .cra_name               = "ecb(aes)",
412         .cra_driver_name        = "ecb-aes-aesni",
413         .cra_priority           = 400,
414         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
415         .cra_blocksize          = AES_BLOCK_SIZE,
416         .cra_ctxsize            = sizeof(struct async_aes_ctx),
417         .cra_alignmask          = 0,
418         .cra_type               = &crypto_ablkcipher_type,
419         .cra_module             = THIS_MODULE,
420         .cra_list               = LIST_HEAD_INIT(ablk_ecb_alg.cra_list),
421         .cra_init               = ablk_ecb_init,
422         .cra_exit               = ablk_exit,
423         .cra_u = {
424                 .ablkcipher = {
425                         .min_keysize    = AES_MIN_KEY_SIZE,
426                         .max_keysize    = AES_MAX_KEY_SIZE,
427                         .setkey         = ablk_set_key,
428                         .encrypt        = ablk_encrypt,
429                         .decrypt        = ablk_decrypt,
430                 },
431         },
432 };
433
434 static int ablk_cbc_init(struct crypto_tfm *tfm)
435 {
436         struct cryptd_ablkcipher *cryptd_tfm;
437
438         cryptd_tfm = cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
439         if (IS_ERR(cryptd_tfm))
440                 return PTR_ERR(cryptd_tfm);
441         ablk_init_common(tfm, cryptd_tfm);
442         return 0;
443 }
444
445 static struct crypto_alg ablk_cbc_alg = {
446         .cra_name               = "cbc(aes)",
447         .cra_driver_name        = "cbc-aes-aesni",
448         .cra_priority           = 400,
449         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
450         .cra_blocksize          = AES_BLOCK_SIZE,
451         .cra_ctxsize            = sizeof(struct async_aes_ctx),
452         .cra_alignmask          = 0,
453         .cra_type               = &crypto_ablkcipher_type,
454         .cra_module             = THIS_MODULE,
455         .cra_list               = LIST_HEAD_INIT(ablk_cbc_alg.cra_list),
456         .cra_init               = ablk_cbc_init,
457         .cra_exit               = ablk_exit,
458         .cra_u = {
459                 .ablkcipher = {
460                         .min_keysize    = AES_MIN_KEY_SIZE,
461                         .max_keysize    = AES_MAX_KEY_SIZE,
462                         .ivsize         = AES_BLOCK_SIZE,
463                         .setkey         = ablk_set_key,
464                         .encrypt        = ablk_encrypt,
465                         .decrypt        = ablk_decrypt,
466                 },
467         },
468 };
469
470 #ifdef HAS_CTR
471 static int ablk_ctr_init(struct crypto_tfm *tfm)
472 {
473         struct cryptd_ablkcipher *cryptd_tfm;
474
475         cryptd_tfm = cryptd_alloc_ablkcipher("fpu(ctr(__driver-aes-aesni))",
476                                              0, 0);
477         if (IS_ERR(cryptd_tfm))
478                 return PTR_ERR(cryptd_tfm);
479         ablk_init_common(tfm, cryptd_tfm);
480         return 0;
481 }
482
483 static struct crypto_alg ablk_ctr_alg = {
484         .cra_name               = "ctr(aes)",
485         .cra_driver_name        = "ctr-aes-aesni",
486         .cra_priority           = 400,
487         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
488         .cra_blocksize          = 1,
489         .cra_ctxsize            = sizeof(struct async_aes_ctx),
490         .cra_alignmask          = 0,
491         .cra_type               = &crypto_ablkcipher_type,
492         .cra_module             = THIS_MODULE,
493         .cra_list               = LIST_HEAD_INIT(ablk_ctr_alg.cra_list),
494         .cra_init               = ablk_ctr_init,
495         .cra_exit               = ablk_exit,
496         .cra_u = {
497                 .ablkcipher = {
498                         .min_keysize    = AES_MIN_KEY_SIZE,
499                         .max_keysize    = AES_MAX_KEY_SIZE,
500                         .ivsize         = AES_BLOCK_SIZE,
501                         .setkey         = ablk_set_key,
502                         .encrypt        = ablk_encrypt,
503                         .decrypt        = ablk_decrypt,
504                         .geniv          = "chainiv",
505                 },
506         },
507 };
508 #endif
509
510 #ifdef HAS_LRW
511 static int ablk_lrw_init(struct crypto_tfm *tfm)
512 {
513         struct cryptd_ablkcipher *cryptd_tfm;
514
515         cryptd_tfm = cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
516                                              0, 0);
517         if (IS_ERR(cryptd_tfm))
518                 return PTR_ERR(cryptd_tfm);
519         ablk_init_common(tfm, cryptd_tfm);
520         return 0;
521 }
522
523 static struct crypto_alg ablk_lrw_alg = {
524         .cra_name               = "lrw(aes)",
525         .cra_driver_name        = "lrw-aes-aesni",
526         .cra_priority           = 400,
527         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
528         .cra_blocksize          = AES_BLOCK_SIZE,
529         .cra_ctxsize            = sizeof(struct async_aes_ctx),
530         .cra_alignmask          = 0,
531         .cra_type               = &crypto_ablkcipher_type,
532         .cra_module             = THIS_MODULE,
533         .cra_list               = LIST_HEAD_INIT(ablk_lrw_alg.cra_list),
534         .cra_init               = ablk_lrw_init,
535         .cra_exit               = ablk_exit,
536         .cra_u = {
537                 .ablkcipher = {
538                         .min_keysize    = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
539                         .max_keysize    = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
540                         .ivsize         = AES_BLOCK_SIZE,
541                         .setkey         = ablk_set_key,
542                         .encrypt        = ablk_encrypt,
543                         .decrypt        = ablk_decrypt,
544                 },
545         },
546 };
547 #endif
548
549 #ifdef HAS_PCBC
550 static int ablk_pcbc_init(struct crypto_tfm *tfm)
551 {
552         struct cryptd_ablkcipher *cryptd_tfm;
553
554         cryptd_tfm = cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
555                                              0, 0);
556         if (IS_ERR(cryptd_tfm))
557                 return PTR_ERR(cryptd_tfm);
558         ablk_init_common(tfm, cryptd_tfm);
559         return 0;
560 }
561
562 static struct crypto_alg ablk_pcbc_alg = {
563         .cra_name               = "pcbc(aes)",
564         .cra_driver_name        = "pcbc-aes-aesni",
565         .cra_priority           = 400,
566         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
567         .cra_blocksize          = AES_BLOCK_SIZE,
568         .cra_ctxsize            = sizeof(struct async_aes_ctx),
569         .cra_alignmask          = 0,
570         .cra_type               = &crypto_ablkcipher_type,
571         .cra_module             = THIS_MODULE,
572         .cra_list               = LIST_HEAD_INIT(ablk_pcbc_alg.cra_list),
573         .cra_init               = ablk_pcbc_init,
574         .cra_exit               = ablk_exit,
575         .cra_u = {
576                 .ablkcipher = {
577                         .min_keysize    = AES_MIN_KEY_SIZE,
578                         .max_keysize    = AES_MAX_KEY_SIZE,
579                         .ivsize         = AES_BLOCK_SIZE,
580                         .setkey         = ablk_set_key,
581                         .encrypt        = ablk_encrypt,
582                         .decrypt        = ablk_decrypt,
583                 },
584         },
585 };
586 #endif
587
588 #ifdef HAS_XTS
589 static int ablk_xts_init(struct crypto_tfm *tfm)
590 {
591         struct cryptd_ablkcipher *cryptd_tfm;
592
593         cryptd_tfm = cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
594                                              0, 0);
595         if (IS_ERR(cryptd_tfm))
596                 return PTR_ERR(cryptd_tfm);
597         ablk_init_common(tfm, cryptd_tfm);
598         return 0;
599 }
600
601 static struct crypto_alg ablk_xts_alg = {
602         .cra_name               = "xts(aes)",
603         .cra_driver_name        = "xts-aes-aesni",
604         .cra_priority           = 400,
605         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
606         .cra_blocksize          = AES_BLOCK_SIZE,
607         .cra_ctxsize            = sizeof(struct async_aes_ctx),
608         .cra_alignmask          = 0,
609         .cra_type               = &crypto_ablkcipher_type,
610         .cra_module             = THIS_MODULE,
611         .cra_list               = LIST_HEAD_INIT(ablk_xts_alg.cra_list),
612         .cra_init               = ablk_xts_init,
613         .cra_exit               = ablk_exit,
614         .cra_u = {
615                 .ablkcipher = {
616                         .min_keysize    = 2 * AES_MIN_KEY_SIZE,
617                         .max_keysize    = 2 * AES_MAX_KEY_SIZE,
618                         .ivsize         = AES_BLOCK_SIZE,
619                         .setkey         = ablk_set_key,
620                         .encrypt        = ablk_encrypt,
621                         .decrypt        = ablk_decrypt,
622                 },
623         },
624 };
625 #endif
626
627 static int __init aesni_init(void)
628 {
629         int err;
630
631         if (!cpu_has_aes) {
632                 printk(KERN_INFO "Intel AES-NI instructions are not detected.\n");
633                 return -ENODEV;
634         }
635         if ((err = crypto_register_alg(&aesni_alg)))
636                 goto aes_err;
637         if ((err = crypto_register_alg(&__aesni_alg)))
638                 goto __aes_err;
639         if ((err = crypto_register_alg(&blk_ecb_alg)))
640                 goto blk_ecb_err;
641         if ((err = crypto_register_alg(&blk_cbc_alg)))
642                 goto blk_cbc_err;
643         if ((err = crypto_register_alg(&ablk_ecb_alg)))
644                 goto ablk_ecb_err;
645         if ((err = crypto_register_alg(&ablk_cbc_alg)))
646                 goto ablk_cbc_err;
647 #ifdef HAS_CTR
648         if ((err = crypto_register_alg(&ablk_ctr_alg)))
649                 goto ablk_ctr_err;
650 #endif
651 #ifdef HAS_LRW
652         if ((err = crypto_register_alg(&ablk_lrw_alg)))
653                 goto ablk_lrw_err;
654 #endif
655 #ifdef HAS_PCBC
656         if ((err = crypto_register_alg(&ablk_pcbc_alg)))
657                 goto ablk_pcbc_err;
658 #endif
659 #ifdef HAS_XTS
660         if ((err = crypto_register_alg(&ablk_xts_alg)))
661                 goto ablk_xts_err;
662 #endif
663
664         return err;
665
666 #ifdef HAS_XTS
667 ablk_xts_err:
668 #endif
669 #ifdef HAS_PCBC
670         crypto_unregister_alg(&ablk_pcbc_alg);
671 ablk_pcbc_err:
672 #endif
673 #ifdef HAS_LRW
674         crypto_unregister_alg(&ablk_lrw_alg);
675 ablk_lrw_err:
676 #endif
677 #ifdef HAS_CTR
678         crypto_unregister_alg(&ablk_ctr_alg);
679 ablk_ctr_err:
680 #endif
681         crypto_unregister_alg(&ablk_cbc_alg);
682 ablk_cbc_err:
683         crypto_unregister_alg(&ablk_ecb_alg);
684 ablk_ecb_err:
685         crypto_unregister_alg(&blk_cbc_alg);
686 blk_cbc_err:
687         crypto_unregister_alg(&blk_ecb_alg);
688 blk_ecb_err:
689         crypto_unregister_alg(&__aesni_alg);
690 __aes_err:
691         crypto_unregister_alg(&aesni_alg);
692 aes_err:
693         return err;
694 }
695
696 static void __exit aesni_exit(void)
697 {
698 #ifdef HAS_XTS
699         crypto_unregister_alg(&ablk_xts_alg);
700 #endif
701 #ifdef HAS_PCBC
702         crypto_unregister_alg(&ablk_pcbc_alg);
703 #endif
704 #ifdef HAS_LRW
705         crypto_unregister_alg(&ablk_lrw_alg);
706 #endif
707 #ifdef HAS_CTR
708         crypto_unregister_alg(&ablk_ctr_alg);
709 #endif
710         crypto_unregister_alg(&ablk_cbc_alg);
711         crypto_unregister_alg(&ablk_ecb_alg);
712         crypto_unregister_alg(&blk_cbc_alg);
713         crypto_unregister_alg(&blk_ecb_alg);
714         crypto_unregister_alg(&__aesni_alg);
715         crypto_unregister_alg(&aesni_alg);
716 }
717
718 module_init(aesni_init);
719 module_exit(aesni_exit);
720
721 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
722 MODULE_LICENSE("GPL");
723 MODULE_ALIAS("aes");