net/mlx4_en: Fix mixed PFC and Global pause user control requests
[pandora-kernel.git] / crypto / shash.c
1 /*
2  * Synchronous Cryptographic Hash operations.
3  *
4  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22
23 #include "internal.h"
24
25 static const struct crypto_type crypto_shash_type;
26
27 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
28                     unsigned int keylen)
29 {
30         return -ENOSYS;
31 }
32 EXPORT_SYMBOL_GPL(shash_no_setkey);
33
34 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
35                                   unsigned int keylen)
36 {
37         struct shash_alg *shash = crypto_shash_alg(tfm);
38         unsigned long alignmask = crypto_shash_alignmask(tfm);
39         unsigned long absize;
40         u8 *buffer, *alignbuffer;
41         int err;
42
43         absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
44         buffer = kmalloc(absize, GFP_KERNEL);
45         if (!buffer)
46                 return -ENOMEM;
47
48         alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
49         memcpy(alignbuffer, key, keylen);
50         err = shash->setkey(tfm, alignbuffer, keylen);
51         kzfree(buffer);
52         return err;
53 }
54
55 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
56                         unsigned int keylen)
57 {
58         struct shash_alg *shash = crypto_shash_alg(tfm);
59         unsigned long alignmask = crypto_shash_alignmask(tfm);
60         int err;
61
62         if ((unsigned long)key & alignmask)
63                 err = shash_setkey_unaligned(tfm, key, keylen);
64         else
65                 err = shash->setkey(tfm, key, keylen);
66
67         if (err)
68                 return err;
69
70         crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
71         return 0;
72 }
73 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
74
75 static inline unsigned int shash_align_buffer_size(unsigned len,
76                                                    unsigned long mask)
77 {
78         return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
79 }
80
81 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
82                                   unsigned int len)
83 {
84         struct crypto_shash *tfm = desc->tfm;
85         struct shash_alg *shash = crypto_shash_alg(tfm);
86         unsigned long alignmask = crypto_shash_alignmask(tfm);
87         unsigned int unaligned_len = alignmask + 1 -
88                                      ((unsigned long)data & alignmask);
89         u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
90                 __attribute__ ((aligned));
91         u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
92         int err;
93
94         if (unaligned_len > len)
95                 unaligned_len = len;
96
97         memcpy(buf, data, unaligned_len);
98         err = shash->update(desc, buf, unaligned_len);
99         memset(buf, 0, unaligned_len);
100
101         return err ?:
102                shash->update(desc, data + unaligned_len, len - unaligned_len);
103 }
104
105 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
106                         unsigned int len)
107 {
108         struct crypto_shash *tfm = desc->tfm;
109         struct shash_alg *shash = crypto_shash_alg(tfm);
110         unsigned long alignmask = crypto_shash_alignmask(tfm);
111
112         if ((unsigned long)data & alignmask)
113                 return shash_update_unaligned(desc, data, len);
114
115         return shash->update(desc, data, len);
116 }
117 EXPORT_SYMBOL_GPL(crypto_shash_update);
118
119 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
120 {
121         struct crypto_shash *tfm = desc->tfm;
122         unsigned long alignmask = crypto_shash_alignmask(tfm);
123         struct shash_alg *shash = crypto_shash_alg(tfm);
124         unsigned int ds = crypto_shash_digestsize(tfm);
125         u8 ubuf[shash_align_buffer_size(ds, alignmask)]
126                 __attribute__ ((aligned));
127         u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
128         int err;
129
130         err = shash->final(desc, buf);
131         if (err)
132                 goto out;
133
134         memcpy(out, buf, ds);
135
136 out:
137         memset(buf, 0, ds);
138         return err;
139 }
140
141 int crypto_shash_final(struct shash_desc *desc, u8 *out)
142 {
143         struct crypto_shash *tfm = desc->tfm;
144         struct shash_alg *shash = crypto_shash_alg(tfm);
145         unsigned long alignmask = crypto_shash_alignmask(tfm);
146
147         if ((unsigned long)out & alignmask)
148                 return shash_final_unaligned(desc, out);
149
150         return shash->final(desc, out);
151 }
152 EXPORT_SYMBOL_GPL(crypto_shash_final);
153
154 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
155                                  unsigned int len, u8 *out)
156 {
157         return crypto_shash_update(desc, data, len) ?:
158                crypto_shash_final(desc, out);
159 }
160
161 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
162                        unsigned int len, u8 *out)
163 {
164         struct crypto_shash *tfm = desc->tfm;
165         struct shash_alg *shash = crypto_shash_alg(tfm);
166         unsigned long alignmask = crypto_shash_alignmask(tfm);
167
168         if (((unsigned long)data | (unsigned long)out) & alignmask)
169                 return shash_finup_unaligned(desc, data, len, out);
170
171         return shash->finup(desc, data, len, out);
172 }
173 EXPORT_SYMBOL_GPL(crypto_shash_finup);
174
175 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
176                                   unsigned int len, u8 *out)
177 {
178         return crypto_shash_init(desc) ?:
179                crypto_shash_finup(desc, data, len, out);
180 }
181
182 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
183                         unsigned int len, u8 *out)
184 {
185         struct crypto_shash *tfm = desc->tfm;
186         struct shash_alg *shash = crypto_shash_alg(tfm);
187         unsigned long alignmask = crypto_shash_alignmask(tfm);
188
189         if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
190                 return -ENOKEY;
191
192         if (((unsigned long)data | (unsigned long)out) & alignmask)
193                 return shash_digest_unaligned(desc, data, len, out);
194
195         return shash->digest(desc, data, len, out);
196 }
197 EXPORT_SYMBOL_GPL(crypto_shash_digest);
198
199 static int shash_default_export(struct shash_desc *desc, void *out)
200 {
201         memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
202         return 0;
203 }
204
205 static int shash_default_import(struct shash_desc *desc, const void *in)
206 {
207         memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
208         return 0;
209 }
210
211 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
212                               unsigned int keylen)
213 {
214         struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
215
216         return crypto_shash_setkey(*ctx, key, keylen);
217 }
218
219 static int shash_async_init(struct ahash_request *req)
220 {
221         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
222         struct shash_desc *desc = ahash_request_ctx(req);
223
224         desc->tfm = *ctx;
225         desc->flags = req->base.flags;
226
227         return crypto_shash_init(desc);
228 }
229
230 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
231 {
232         struct crypto_hash_walk walk;
233         int nbytes;
234
235         for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
236              nbytes = crypto_hash_walk_done(&walk, nbytes))
237                 nbytes = crypto_shash_update(desc, walk.data, nbytes);
238
239         return nbytes;
240 }
241 EXPORT_SYMBOL_GPL(shash_ahash_update);
242
243 static int shash_async_update(struct ahash_request *req)
244 {
245         return shash_ahash_update(req, ahash_request_ctx(req));
246 }
247
248 static int shash_async_final(struct ahash_request *req)
249 {
250         return crypto_shash_final(ahash_request_ctx(req), req->result);
251 }
252
253 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
254 {
255         struct crypto_hash_walk walk;
256         int nbytes;
257
258         nbytes = crypto_hash_walk_first(req, &walk);
259         if (!nbytes)
260                 return crypto_shash_final(desc, req->result);
261
262         do {
263                 nbytes = crypto_hash_walk_last(&walk) ?
264                          crypto_shash_finup(desc, walk.data, nbytes,
265                                             req->result) :
266                          crypto_shash_update(desc, walk.data, nbytes);
267                 nbytes = crypto_hash_walk_done(&walk, nbytes);
268         } while (nbytes > 0);
269
270         return nbytes;
271 }
272 EXPORT_SYMBOL_GPL(shash_ahash_finup);
273
274 static int shash_async_finup(struct ahash_request *req)
275 {
276         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
277         struct shash_desc *desc = ahash_request_ctx(req);
278
279         desc->tfm = *ctx;
280         desc->flags = req->base.flags;
281
282         return shash_ahash_finup(req, desc);
283 }
284
285 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
286 {
287         unsigned int nbytes = req->nbytes;
288         struct scatterlist *sg;
289         unsigned int offset;
290         int err;
291
292         if (nbytes &&
293             (sg = req->src, offset = sg->offset,
294              nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
295                 void *data;
296
297                 data = crypto_kmap(sg_page(sg), 0);
298                 err = crypto_shash_digest(desc, data + offset, nbytes,
299                                           req->result);
300                 crypto_kunmap(data, 0);
301                 crypto_yield(desc->flags);
302         } else
303                 err = crypto_shash_init(desc) ?:
304                       shash_ahash_finup(req, desc);
305
306         return err;
307 }
308 EXPORT_SYMBOL_GPL(shash_ahash_digest);
309
310 static int shash_async_digest(struct ahash_request *req)
311 {
312         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
313         struct shash_desc *desc = ahash_request_ctx(req);
314
315         desc->tfm = *ctx;
316         desc->flags = req->base.flags;
317
318         return shash_ahash_digest(req, desc);
319 }
320
321 static int shash_async_export(struct ahash_request *req, void *out)
322 {
323         return crypto_shash_export(ahash_request_ctx(req), out);
324 }
325
326 static int shash_async_import(struct ahash_request *req, const void *in)
327 {
328         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
329         struct shash_desc *desc = ahash_request_ctx(req);
330
331         desc->tfm = *ctx;
332         desc->flags = req->base.flags;
333
334         return crypto_shash_import(desc, in);
335 }
336
337 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
338 {
339         struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
340
341         crypto_free_shash(*ctx);
342 }
343
344 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
345 {
346         struct crypto_alg *calg = tfm->__crt_alg;
347         struct shash_alg *alg = __crypto_shash_alg(calg);
348         struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
349         struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
350         struct crypto_shash *shash;
351
352         if (!crypto_mod_get(calg))
353                 return -EAGAIN;
354
355         shash = crypto_create_tfm(calg, &crypto_shash_type);
356         if (IS_ERR(shash)) {
357                 crypto_mod_put(calg);
358                 return PTR_ERR(shash);
359         }
360
361         *ctx = shash;
362         tfm->exit = crypto_exit_shash_ops_async;
363
364         crt->init = shash_async_init;
365         crt->update = shash_async_update;
366         crt->final = shash_async_final;
367         crt->finup = shash_async_finup;
368         crt->digest = shash_async_digest;
369         crt->setkey = shash_async_setkey;
370
371         crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
372                                     CRYPTO_TFM_NEED_KEY);
373
374         if (alg->export)
375                 crt->export = shash_async_export;
376         if (alg->import)
377                 crt->import = shash_async_import;
378
379         crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
380
381         return 0;
382 }
383
384 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
385                                unsigned int keylen)
386 {
387         struct shash_desc **descp = crypto_hash_ctx(tfm);
388         struct shash_desc *desc = *descp;
389
390         return crypto_shash_setkey(desc->tfm, key, keylen);
391 }
392
393 static int shash_compat_init(struct hash_desc *hdesc)
394 {
395         struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
396         struct shash_desc *desc = *descp;
397
398         desc->flags = hdesc->flags;
399
400         return crypto_shash_init(desc);
401 }
402
403 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
404                                unsigned int len)
405 {
406         struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
407         struct shash_desc *desc = *descp;
408         struct crypto_hash_walk walk;
409         int nbytes;
410
411         for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
412              nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
413                 nbytes = crypto_shash_update(desc, walk.data, nbytes);
414
415         return nbytes;
416 }
417
418 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
419 {
420         struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
421
422         return crypto_shash_final(*descp, out);
423 }
424
425 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
426                                unsigned int nbytes, u8 *out)
427 {
428         unsigned int offset = sg->offset;
429         int err;
430
431         if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
432                 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
433                 struct shash_desc *desc = *descp;
434                 void *data;
435
436                 desc->flags = hdesc->flags;
437
438                 data = crypto_kmap(sg_page(sg), 0);
439                 err = crypto_shash_digest(desc, data + offset, nbytes, out);
440                 crypto_kunmap(data, 0);
441                 crypto_yield(desc->flags);
442                 goto out;
443         }
444
445         err = shash_compat_init(hdesc);
446         if (err)
447                 goto out;
448
449         err = shash_compat_update(hdesc, sg, nbytes);
450         if (err)
451                 goto out;
452
453         err = shash_compat_final(hdesc, out);
454
455 out:
456         return err;
457 }
458
459 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
460 {
461         struct shash_desc **descp = crypto_tfm_ctx(tfm);
462         struct shash_desc *desc = *descp;
463
464         crypto_free_shash(desc->tfm);
465         kzfree(desc);
466 }
467
468 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
469 {
470         struct hash_tfm *crt = &tfm->crt_hash;
471         struct crypto_alg *calg = tfm->__crt_alg;
472         struct shash_alg *alg = __crypto_shash_alg(calg);
473         struct shash_desc **descp = crypto_tfm_ctx(tfm);
474         struct crypto_shash *shash;
475         struct shash_desc *desc;
476
477         if (!crypto_mod_get(calg))
478                 return -EAGAIN;
479
480         shash = crypto_create_tfm(calg, &crypto_shash_type);
481         if (IS_ERR(shash)) {
482                 crypto_mod_put(calg);
483                 return PTR_ERR(shash);
484         }
485
486         desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
487                        GFP_KERNEL);
488         if (!desc) {
489                 crypto_free_shash(shash);
490                 return -ENOMEM;
491         }
492
493         *descp = desc;
494         desc->tfm = shash;
495         tfm->exit = crypto_exit_shash_ops_compat;
496
497         crt->init = shash_compat_init;
498         crt->update = shash_compat_update;
499         crt->final  = shash_compat_final;
500         crt->digest = shash_compat_digest;
501         crt->setkey = shash_compat_setkey;
502
503         crt->digestsize = alg->digestsize;
504
505         return 0;
506 }
507
508 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
509 {
510         switch (mask & CRYPTO_ALG_TYPE_MASK) {
511         case CRYPTO_ALG_TYPE_HASH_MASK:
512                 return crypto_init_shash_ops_compat(tfm);
513         }
514
515         return -EINVAL;
516 }
517
518 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
519                                          u32 mask)
520 {
521         switch (mask & CRYPTO_ALG_TYPE_MASK) {
522         case CRYPTO_ALG_TYPE_HASH_MASK:
523                 return sizeof(struct shash_desc *);
524         }
525
526         return 0;
527 }
528
529 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
530 {
531         struct crypto_shash *hash = __crypto_shash_cast(tfm);
532         struct shash_alg *alg = crypto_shash_alg(hash);
533
534         hash->descsize = alg->descsize;
535
536         if (crypto_shash_alg_has_setkey(alg) &&
537             !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
538                 crypto_shash_set_flags(hash, CRYPTO_TFM_NEED_KEY);
539
540         return 0;
541 }
542
543 static unsigned int crypto_shash_extsize(struct crypto_alg *alg)
544 {
545         return alg->cra_ctxsize;
546 }
547
548 #ifdef CONFIG_NET
549 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
550 {
551         struct crypto_report_hash rhash;
552         struct shash_alg *salg = __crypto_shash_alg(alg);
553
554         strncpy(rhash.type, "shash", sizeof(rhash.type));
555
556         rhash.blocksize = alg->cra_blocksize;
557         rhash.digestsize = salg->digestsize;
558
559         NLA_PUT(skb, CRYPTOCFGA_REPORT_HASH,
560                 sizeof(struct crypto_report_hash), &rhash);
561
562         return 0;
563
564 nla_put_failure:
565         return -EMSGSIZE;
566 }
567 #else
568 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
569 {
570         return -ENOSYS;
571 }
572 #endif
573
574 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
575         __attribute__ ((unused));
576 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
577 {
578         struct shash_alg *salg = __crypto_shash_alg(alg);
579
580         seq_printf(m, "type         : shash\n");
581         seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
582         seq_printf(m, "digestsize   : %u\n", salg->digestsize);
583 }
584
585 static const struct crypto_type crypto_shash_type = {
586         .ctxsize = crypto_shash_ctxsize,
587         .extsize = crypto_shash_extsize,
588         .init = crypto_init_shash_ops,
589         .init_tfm = crypto_shash_init_tfm,
590 #ifdef CONFIG_PROC_FS
591         .show = crypto_shash_show,
592 #endif
593         .report = crypto_shash_report,
594         .maskclear = ~CRYPTO_ALG_TYPE_MASK,
595         .maskset = CRYPTO_ALG_TYPE_MASK,
596         .type = CRYPTO_ALG_TYPE_SHASH,
597         .tfmsize = offsetof(struct crypto_shash, base),
598 };
599
600 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
601                                         u32 mask)
602 {
603         return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
604 }
605 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
606
607 static int shash_prepare_alg(struct shash_alg *alg)
608 {
609         struct crypto_alg *base = &alg->base;
610
611         if (alg->digestsize > PAGE_SIZE / 8 ||
612             alg->descsize > PAGE_SIZE / 8 ||
613             alg->statesize > PAGE_SIZE / 8)
614                 return -EINVAL;
615
616         base->cra_type = &crypto_shash_type;
617         base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
618         base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
619
620         if (!alg->finup)
621                 alg->finup = shash_finup_unaligned;
622         if (!alg->digest)
623                 alg->digest = shash_digest_unaligned;
624         if (!alg->export) {
625                 alg->export = shash_default_export;
626                 alg->import = shash_default_import;
627                 alg->statesize = alg->descsize;
628         }
629         if (!alg->setkey)
630                 alg->setkey = shash_no_setkey;
631
632         return 0;
633 }
634
635 int crypto_register_shash(struct shash_alg *alg)
636 {
637         struct crypto_alg *base = &alg->base;
638         int err;
639
640         err = shash_prepare_alg(alg);
641         if (err)
642                 return err;
643
644         return crypto_register_alg(base);
645 }
646 EXPORT_SYMBOL_GPL(crypto_register_shash);
647
648 int crypto_unregister_shash(struct shash_alg *alg)
649 {
650         return crypto_unregister_alg(&alg->base);
651 }
652 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
653
654 int shash_register_instance(struct crypto_template *tmpl,
655                             struct shash_instance *inst)
656 {
657         int err;
658
659         err = shash_prepare_alg(&inst->alg);
660         if (err)
661                 return err;
662
663         return crypto_register_instance(tmpl, shash_crypto_instance(inst));
664 }
665 EXPORT_SYMBOL_GPL(shash_register_instance);
666
667 void shash_free_instance(struct crypto_instance *inst)
668 {
669         crypto_drop_spawn(crypto_instance_ctx(inst));
670         kfree(shash_instance(inst));
671 }
672 EXPORT_SYMBOL_GPL(shash_free_instance);
673
674 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
675                             struct shash_alg *alg,
676                             struct crypto_instance *inst)
677 {
678         return crypto_init_spawn2(&spawn->base, &alg->base, inst,
679                                   &crypto_shash_type);
680 }
681 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
682
683 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
684 {
685         struct crypto_alg *alg;
686
687         alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
688         return IS_ERR(alg) ? ERR_CAST(alg) :
689                container_of(alg, struct shash_alg, base);
690 }
691 EXPORT_SYMBOL_GPL(shash_attr_alg);
692
693 MODULE_LICENSE("GPL");
694 MODULE_DESCRIPTION("Synchronous cryptographic hash type");