868da54b18508c1827096bee736d4497dd79c92c
[pandora-kernel.git] / drivers / crypto / padlock-sha.c
1 /*
2  * Cryptographic API.
3  *
4  * Support for VIA PadLock hardware crypto engine.
5  *
6  * Copyright (c) 2006  Michal Ludvig <michal@logix.cz>
7  *
8  * This program is free software; you can redistribute it and/or modify
9  * it under the terms of the GNU General Public License as published by
10  * the Free Software Foundation; either version 2 of the License, or
11  * (at your option) any later version.
12  *
13  */
14
15 #include <crypto/internal/hash.h>
16 #include <crypto/sha.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/init.h>
20 #include <linux/errno.h>
21 #include <linux/interrupt.h>
22 #include <linux/kernel.h>
23 #include <linux/scatterlist.h>
24 #include <asm/i387.h>
25 #include "padlock.h"
26
27 struct padlock_sha_ctx {
28         char            *data;
29         size_t          used;
30         int             bypass;
31         void (*f_sha_padlock)(const char *in, char *out, int count);
32         struct shash_desc *fallback;
33 };
34
35 static inline struct padlock_sha_ctx *ctx(struct crypto_tfm *tfm)
36 {
37         return crypto_tfm_ctx(tfm);
38 }
39
40 /* We'll need aligned address on the stack */
41 #define NEAREST_ALIGNED(ptr) \
42         ((void *)ALIGN((size_t)(ptr), PADLOCK_ALIGNMENT))
43
44 static struct crypto_alg sha1_alg, sha256_alg;
45
46 static int padlock_sha_bypass(struct crypto_tfm *tfm)
47 {
48         int err = 0;
49
50         if (ctx(tfm)->bypass)
51                 goto out;
52
53         err = crypto_shash_init(ctx(tfm)->fallback);
54         if (err)
55                 goto out;
56
57         if (ctx(tfm)->data && ctx(tfm)->used)
58                 err = crypto_shash_update(ctx(tfm)->fallback, ctx(tfm)->data,
59                                           ctx(tfm)->used);
60
61         ctx(tfm)->used = 0;
62         ctx(tfm)->bypass = 1;
63
64 out:
65         return err;
66 }
67
68 static void padlock_sha_init(struct crypto_tfm *tfm)
69 {
70         ctx(tfm)->used = 0;
71         ctx(tfm)->bypass = 0;
72 }
73
74 static void padlock_sha_update(struct crypto_tfm *tfm,
75                         const uint8_t *data, unsigned int length)
76 {
77         int err;
78
79         /* Our buffer is always one page. */
80         if (unlikely(!ctx(tfm)->bypass &&
81                      (ctx(tfm)->used + length > PAGE_SIZE))) {
82                 err = padlock_sha_bypass(tfm);
83                 BUG_ON(err);
84         }
85
86         if (unlikely(ctx(tfm)->bypass)) {
87                 err = crypto_shash_update(ctx(tfm)->fallback, data, length);
88                 BUG_ON(err);
89                 return;
90         }
91
92         memcpy(ctx(tfm)->data + ctx(tfm)->used, data, length);
93         ctx(tfm)->used += length;
94 }
95
96 static inline void padlock_output_block(uint32_t *src,
97                         uint32_t *dst, size_t count)
98 {
99         while (count--)
100                 *dst++ = swab32(*src++);
101 }
102
103 static void padlock_do_sha1(const char *in, char *out, int count)
104 {
105         /* We can't store directly to *out as it may be unaligned. */
106         /* BTW Don't reduce the buffer size below 128 Bytes!
107          *     PadLock microcode needs it that big. */
108         char buf[128+16];
109         char *result = NEAREST_ALIGNED(buf);
110         int ts_state;
111
112         ((uint32_t *)result)[0] = SHA1_H0;
113         ((uint32_t *)result)[1] = SHA1_H1;
114         ((uint32_t *)result)[2] = SHA1_H2;
115         ((uint32_t *)result)[3] = SHA1_H3;
116         ((uint32_t *)result)[4] = SHA1_H4;
117  
118         /* prevent taking the spurious DNA fault with padlock. */
119         ts_state = irq_ts_save();
120         asm volatile (".byte 0xf3,0x0f,0xa6,0xc8" /* rep xsha1 */
121                       : "+S"(in), "+D"(result)
122                       : "c"(count), "a"(0));
123         irq_ts_restore(ts_state);
124
125         padlock_output_block((uint32_t *)result, (uint32_t *)out, 5);
126 }
127
128 static void padlock_do_sha256(const char *in, char *out, int count)
129 {
130         /* We can't store directly to *out as it may be unaligned. */
131         /* BTW Don't reduce the buffer size below 128 Bytes!
132          *     PadLock microcode needs it that big. */
133         char buf[128+16];
134         char *result = NEAREST_ALIGNED(buf);
135         int ts_state;
136
137         ((uint32_t *)result)[0] = SHA256_H0;
138         ((uint32_t *)result)[1] = SHA256_H1;
139         ((uint32_t *)result)[2] = SHA256_H2;
140         ((uint32_t *)result)[3] = SHA256_H3;
141         ((uint32_t *)result)[4] = SHA256_H4;
142         ((uint32_t *)result)[5] = SHA256_H5;
143         ((uint32_t *)result)[6] = SHA256_H6;
144         ((uint32_t *)result)[7] = SHA256_H7;
145
146         /* prevent taking the spurious DNA fault with padlock. */
147         ts_state = irq_ts_save();
148         asm volatile (".byte 0xf3,0x0f,0xa6,0xd0" /* rep xsha256 */
149                       : "+S"(in), "+D"(result)
150                       : "c"(count), "a"(0));
151         irq_ts_restore(ts_state);
152
153         padlock_output_block((uint32_t *)result, (uint32_t *)out, 8);
154 }
155
156 static void padlock_sha_final(struct crypto_tfm *tfm, uint8_t *out)
157 {
158         int err;
159
160         if (unlikely(ctx(tfm)->bypass)) {
161                 err = crypto_shash_final(ctx(tfm)->fallback, out);
162                 BUG_ON(err);
163                 ctx(tfm)->bypass = 0;
164                 return;
165         }
166
167         /* Pass the input buffer to PadLock microcode... */
168         ctx(tfm)->f_sha_padlock(ctx(tfm)->data, out, ctx(tfm)->used);
169
170         ctx(tfm)->used = 0;
171 }
172
173 static int padlock_cra_init(struct crypto_tfm *tfm)
174 {
175         const char *fallback_driver_name = tfm->__crt_alg->cra_name;
176         struct crypto_shash *fallback_tfm;
177         int err = -ENOMEM;
178
179         /* For now we'll allocate one page. This
180          * could eventually be configurable one day. */
181         ctx(tfm)->data = (char *)__get_free_page(GFP_KERNEL);
182         if (!ctx(tfm)->data)
183                 goto out;
184
185         /* Allocate a fallback and abort if it failed. */
186         fallback_tfm = crypto_alloc_shash(fallback_driver_name, 0,
187                                           CRYPTO_ALG_NEED_FALLBACK);
188         if (IS_ERR(fallback_tfm)) {
189                 printk(KERN_WARNING PFX "Fallback driver '%s' could not be loaded!\n",
190                        fallback_driver_name);
191                 err = PTR_ERR(fallback_tfm);
192                 goto out_free_page;
193         }
194
195         ctx(tfm)->fallback = kmalloc(sizeof(struct shash_desc) +
196                                      crypto_shash_descsize(fallback_tfm),
197                                      GFP_KERNEL);
198         if (!ctx(tfm)->fallback)
199                 goto out_free_tfm;
200
201         ctx(tfm)->fallback->tfm = fallback_tfm;
202         ctx(tfm)->fallback->flags = 0;
203         return 0;
204
205 out_free_tfm:
206         crypto_free_shash(fallback_tfm);
207 out_free_page:
208         free_page((unsigned long)(ctx(tfm)->data));
209 out:
210         return err;
211 }
212
213 static int padlock_sha1_cra_init(struct crypto_tfm *tfm)
214 {
215         ctx(tfm)->f_sha_padlock = padlock_do_sha1;
216
217         return padlock_cra_init(tfm);
218 }
219
220 static int padlock_sha256_cra_init(struct crypto_tfm *tfm)
221 {
222         ctx(tfm)->f_sha_padlock = padlock_do_sha256;
223
224         return padlock_cra_init(tfm);
225 }
226
227 static void padlock_cra_exit(struct crypto_tfm *tfm)
228 {
229         if (ctx(tfm)->data) {
230                 free_page((unsigned long)(ctx(tfm)->data));
231                 ctx(tfm)->data = NULL;
232         }
233
234         crypto_free_shash(ctx(tfm)->fallback->tfm);
235
236         kzfree(ctx(tfm)->fallback);
237 }
238
239 static struct crypto_alg sha1_alg = {
240         .cra_name               =       "sha1",
241         .cra_driver_name        =       "sha1-padlock",
242         .cra_priority           =       PADLOCK_CRA_PRIORITY,
243         .cra_flags              =       CRYPTO_ALG_TYPE_DIGEST |
244                                         CRYPTO_ALG_NEED_FALLBACK,
245         .cra_blocksize          =       SHA1_BLOCK_SIZE,
246         .cra_ctxsize            =       sizeof(struct padlock_sha_ctx),
247         .cra_module             =       THIS_MODULE,
248         .cra_list               =       LIST_HEAD_INIT(sha1_alg.cra_list),
249         .cra_init               =       padlock_sha1_cra_init,
250         .cra_exit               =       padlock_cra_exit,
251         .cra_u                  =       {
252                 .digest = {
253                         .dia_digestsize =       SHA1_DIGEST_SIZE,
254                         .dia_init       =       padlock_sha_init,
255                         .dia_update     =       padlock_sha_update,
256                         .dia_final      =       padlock_sha_final,
257                 }
258         }
259 };
260
261 static struct crypto_alg sha256_alg = {
262         .cra_name               =       "sha256",
263         .cra_driver_name        =       "sha256-padlock",
264         .cra_priority           =       PADLOCK_CRA_PRIORITY,
265         .cra_flags              =       CRYPTO_ALG_TYPE_DIGEST |
266                                         CRYPTO_ALG_NEED_FALLBACK,
267         .cra_blocksize          =       SHA256_BLOCK_SIZE,
268         .cra_ctxsize            =       sizeof(struct padlock_sha_ctx),
269         .cra_module             =       THIS_MODULE,
270         .cra_list               =       LIST_HEAD_INIT(sha256_alg.cra_list),
271         .cra_init               =       padlock_sha256_cra_init,
272         .cra_exit               =       padlock_cra_exit,
273         .cra_u                  =       {
274                 .digest = {
275                         .dia_digestsize =       SHA256_DIGEST_SIZE,
276                         .dia_init       =       padlock_sha_init,
277                         .dia_update     =       padlock_sha_update,
278                         .dia_final      =       padlock_sha_final,
279                 }
280         }
281 };
282
283 static int __init padlock_init(void)
284 {
285         int rc = -ENODEV;
286
287         if (!cpu_has_phe) {
288                 printk(KERN_NOTICE PFX "VIA PadLock Hash Engine not detected.\n");
289                 return -ENODEV;
290         }
291
292         if (!cpu_has_phe_enabled) {
293                 printk(KERN_NOTICE PFX "VIA PadLock detected, but not enabled. Hmm, strange...\n");
294                 return -ENODEV;
295         }
296
297         rc = crypto_register_alg(&sha1_alg);
298         if (rc)
299                 goto out;
300
301         rc = crypto_register_alg(&sha256_alg);
302         if (rc)
303                 goto out_unreg1;
304
305         printk(KERN_NOTICE PFX "Using VIA PadLock ACE for SHA1/SHA256 algorithms.\n");
306
307         return 0;
308
309 out_unreg1:
310         crypto_unregister_alg(&sha1_alg);
311 out:
312         printk(KERN_ERR PFX "VIA PadLock SHA1/SHA256 initialization failed.\n");
313         return rc;
314 }
315
316 static void __exit padlock_fini(void)
317 {
318         crypto_unregister_alg(&sha1_alg);
319         crypto_unregister_alg(&sha256_alg);
320 }
321
322 module_init(padlock_init);
323 module_exit(padlock_fini);
324
325 MODULE_DESCRIPTION("VIA PadLock SHA1/SHA256 algorithms support.");
326 MODULE_LICENSE("GPL");
327 MODULE_AUTHOR("Michal Ludvig");
328
329 MODULE_ALIAS("sha1-all");
330 MODULE_ALIAS("sha256-all");
331 MODULE_ALIAS("sha1-padlock");
332 MODULE_ALIAS("sha256-padlock");