2 * GCM: Galois/Counter Mode.
4 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published
8 * by the Free Software Foundation.
11 #include <crypto/algapi.h>
12 #include <crypto/gf128mul.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/completion.h>
15 #include <linux/err.h>
16 #include <linux/init.h>
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/slab.h>
23 struct gcm_instance_ctx {
24 struct crypto_spawn ctr;
27 struct crypto_gcm_ctx {
28 struct crypto_ablkcipher *ctr;
29 struct gf128mul_4k *gf128;
32 struct crypto_gcm_ghash_ctx {
35 struct gf128mul_4k *gf128;
39 struct crypto_gcm_req_priv_ctx {
42 struct scatterlist src[2];
43 struct scatterlist dst[2];
44 struct crypto_gcm_ghash_ctx ghash;
45 struct ablkcipher_request abreq;
48 struct crypto_gcm_setkey_result {
50 struct completion completion;
53 static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
54 struct aead_request *req)
56 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
58 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
61 static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
62 struct gf128mul_4k *gf128)
67 memset(ctx->buffer, 0, 16);
70 static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
71 const u8 *src, unsigned int srclen)
73 u8 *dst = ctx->buffer;
76 int n = min(srclen, ctx->bytes);
77 u8 *pos = dst + (16 - ctx->bytes);
86 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
89 while (srclen >= 16) {
90 crypto_xor(dst, src, 16);
91 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
97 ctx->bytes = 16 - srclen;
103 static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
104 struct scatterlist *sg, int len)
106 struct scatter_walk walk;
113 scatterwalk_start(&walk, sg);
116 n = scatterwalk_clamp(&walk, len);
119 scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
120 n = scatterwalk_clamp(&walk, len);
123 src = scatterwalk_map(&walk, 0);
125 crypto_gcm_ghash_update(ctx, src, n);
128 scatterwalk_unmap(src, 0);
129 scatterwalk_advance(&walk, n);
130 scatterwalk_done(&walk, 0, len);
132 crypto_yield(ctx->flags);
136 static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
138 u8 *dst = ctx->buffer;
141 u8 *tmp = dst + (16 - ctx->bytes);
146 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
152 static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
153 unsigned int authlen,
154 unsigned int cryptlen, u8 *dst)
156 u8 *buf = ctx->buffer;
159 lengths.a = cpu_to_be64(authlen * 8);
160 lengths.b = cpu_to_be64(cryptlen * 8);
162 crypto_gcm_ghash_flush(ctx);
163 crypto_xor(buf, (u8 *)&lengths, 16);
164 gf128mul_4k_lle((be128 *)buf, ctx->gf128);
165 crypto_xor(dst, buf, 16);
168 static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
170 struct crypto_gcm_setkey_result *result = req->data;
172 if (err == -EINPROGRESS)
176 complete(&result->completion);
179 static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
182 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
183 struct crypto_ablkcipher *ctr = ctx->ctr;
188 struct crypto_gcm_setkey_result result;
190 struct scatterlist sg[1];
191 struct ablkcipher_request req;
195 crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
196 crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
197 CRYPTO_TFM_REQ_MASK);
199 err = crypto_ablkcipher_setkey(ctr, key, keylen);
203 crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
204 CRYPTO_TFM_RES_MASK);
206 data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr),
211 init_completion(&data->result.completion);
212 sg_init_one(data->sg, &data->hash, sizeof(data->hash));
213 ablkcipher_request_set_tfm(&data->req, ctr);
214 ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
215 CRYPTO_TFM_REQ_MAY_BACKLOG,
216 crypto_gcm_setkey_done,
218 ablkcipher_request_set_crypt(&data->req, data->sg, data->sg,
219 sizeof(data->hash), data->iv);
221 err = crypto_ablkcipher_encrypt(&data->req);
222 if (err == -EINPROGRESS || err == -EBUSY) {
223 err = wait_for_completion_interruptible(
224 &data->result.completion);
226 err = data->result.err;
232 if (ctx->gf128 != NULL)
233 gf128mul_free_4k(ctx->gf128);
235 ctx->gf128 = gf128mul_init_4k_lle(&data->hash);
237 if (ctx->gf128 == NULL)
245 static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
246 struct aead_request *req,
247 unsigned int cryptlen)
249 struct crypto_aead *aead = crypto_aead_reqtfm(req);
250 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
251 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
252 u32 flags = req->base.tfm->crt_flags;
253 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
254 struct scatterlist *dst;
255 __be32 counter = cpu_to_be32(1);
257 memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
258 memcpy(req->iv + 12, &counter, 4);
260 sg_init_table(pctx->src, 2);
261 sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
262 scatterwalk_sg_chain(pctx->src, 2, req->src);
265 if (req->src != req->dst) {
266 sg_init_table(pctx->dst, 2);
267 sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
268 scatterwalk_sg_chain(pctx->dst, 2, req->dst);
272 ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
273 ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
274 cryptlen + sizeof(pctx->auth_tag),
277 crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
279 crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
280 crypto_gcm_ghash_flush(ghash);
283 static int crypto_gcm_hash(struct aead_request *req)
285 struct crypto_aead *aead = crypto_aead_reqtfm(req);
286 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
287 u8 *auth_tag = pctx->auth_tag;
288 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
290 crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen);
291 crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen,
294 scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
295 crypto_aead_authsize(aead), 1);
299 static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err)
301 struct aead_request *req = areq->data;
304 err = crypto_gcm_hash(req);
306 aead_request_complete(req, err);
309 static int crypto_gcm_encrypt(struct aead_request *req)
311 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
312 struct ablkcipher_request *abreq = &pctx->abreq;
315 crypto_gcm_init_crypt(abreq, req, req->cryptlen);
316 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
317 crypto_gcm_encrypt_done, req);
319 err = crypto_ablkcipher_encrypt(abreq);
323 return crypto_gcm_hash(req);
326 static int crypto_gcm_verify(struct aead_request *req)
328 struct crypto_aead *aead = crypto_aead_reqtfm(req);
329 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
330 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
331 u8 *auth_tag = pctx->auth_tag;
332 u8 *iauth_tag = pctx->iauth_tag;
333 unsigned int authsize = crypto_aead_authsize(aead);
334 unsigned int cryptlen = req->cryptlen - authsize;
336 crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
338 authsize = crypto_aead_authsize(aead);
339 scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
340 return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
343 static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
345 struct aead_request *req = areq->data;
348 err = crypto_gcm_verify(req);
350 aead_request_complete(req, err);
353 static int crypto_gcm_decrypt(struct aead_request *req)
355 struct crypto_aead *aead = crypto_aead_reqtfm(req);
356 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
357 struct ablkcipher_request *abreq = &pctx->abreq;
358 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
359 unsigned int cryptlen = req->cryptlen;
360 unsigned int authsize = crypto_aead_authsize(aead);
363 if (cryptlen < authsize)
365 cryptlen -= authsize;
367 crypto_gcm_init_crypt(abreq, req, cryptlen);
368 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
369 crypto_gcm_decrypt_done, req);
371 crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
373 err = crypto_ablkcipher_decrypt(abreq);
377 return crypto_gcm_verify(req);
380 static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
382 struct crypto_instance *inst = (void *)tfm->__crt_alg;
383 struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
384 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
385 struct crypto_ablkcipher *ctr;
389 ctr = crypto_spawn_ablkcipher(&ictx->ctr);
397 align = crypto_tfm_alg_alignmask(tfm);
398 align &= ~(crypto_tfm_ctx_alignment() - 1);
399 tfm->crt_aead.reqsize = align +
400 sizeof(struct crypto_gcm_req_priv_ctx) +
401 crypto_ablkcipher_reqsize(ctr);
406 static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
408 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
410 if (ctx->gf128 != NULL)
411 gf128mul_free_4k(ctx->gf128);
413 crypto_free_ablkcipher(ctx->ctr);
416 static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
418 struct crypto_instance *inst;
419 struct crypto_alg *ctr;
420 struct crypto_alg *cipher;
421 struct gcm_instance_ctx *ctx;
423 char ctr_name[CRYPTO_MAX_ALG_NAME];
425 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD);
429 cipher = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
430 CRYPTO_ALG_TYPE_MASK);
432 inst = ERR_PTR(PTR_ERR(cipher));
436 inst = ERR_PTR(ENAMETOOLONG);
437 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
438 cipher->cra_name) >= CRYPTO_MAX_ALG_NAME)
441 ctr = crypto_alg_mod_lookup(ctr_name, CRYPTO_ALG_TYPE_BLKCIPHER,
442 CRYPTO_ALG_TYPE_MASK);
445 return ERR_PTR(PTR_ERR(ctr));
447 if (cipher->cra_blocksize != 16)
450 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
456 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
457 "gcm(%s)", cipher->cra_name) >= CRYPTO_MAX_ALG_NAME ||
458 snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
459 "gcm(%s)", cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
463 ctx = crypto_instance_ctx(inst);
464 err = crypto_init_spawn(&ctx->ctr, ctr, inst, CRYPTO_ALG_TYPE_MASK);
468 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
469 inst->alg.cra_priority = ctr->cra_priority;
470 inst->alg.cra_blocksize = 16;
471 inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
472 inst->alg.cra_type = &crypto_aead_type;
473 inst->alg.cra_aead.ivsize = 16;
474 inst->alg.cra_aead.maxauthsize = 16;
475 inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
476 inst->alg.cra_init = crypto_gcm_init_tfm;
477 inst->alg.cra_exit = crypto_gcm_exit_tfm;
478 inst->alg.cra_aead.setkey = crypto_gcm_setkey;
479 inst->alg.cra_aead.encrypt = crypto_gcm_encrypt;
480 inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
492 static void crypto_gcm_free(struct crypto_instance *inst)
494 struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
496 crypto_drop_spawn(&ctx->ctr);
500 static struct crypto_template crypto_gcm_tmpl = {
502 .alloc = crypto_gcm_alloc,
503 .free = crypto_gcm_free,
504 .module = THIS_MODULE,
507 static int __init crypto_gcm_module_init(void)
509 return crypto_register_template(&crypto_gcm_tmpl);
512 static void __exit crypto_gcm_module_exit(void)
514 crypto_unregister_template(&crypto_gcm_tmpl);
517 module_init(crypto_gcm_module_init);
518 module_exit(crypto_gcm_module_exit);
520 MODULE_LICENSE("GPL");
521 MODULE_DESCRIPTION("Galois/Counter Mode");
522 MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");