Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
authorLinus Torvalds <torvalds@linux-foundation.org>
Fri, 25 Jan 2008 16:38:25 +0000 (08:38 -0800)
committerLinus Torvalds <torvalds@linux-foundation.org>
Fri, 25 Jan 2008 16:38:25 +0000 (08:38 -0800)
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (125 commits)
  [CRYPTO] twofish: Merge common glue code
  [CRYPTO] hifn_795x: Fixup container_of() usage
  [CRYPTO] cast6: inline bloat--
  [CRYPTO] api: Set default CRYPTO_MINALIGN to unsigned long long
  [CRYPTO] tcrypt: Make xcbc available as a standalone test
  [CRYPTO] xcbc: Remove bogus hash/cipher test
  [CRYPTO] xcbc: Fix algorithm leak when block size check fails
  [CRYPTO] tcrypt: Zero axbuf in the right function
  [CRYPTO] padlock: Only reset the key once for each CBC and ECB operation
  [CRYPTO] api: Include sched.h for cond_resched in scatterwalk.h
  [CRYPTO] salsa20-asm: Remove unnecessary dependency on CRYPTO_SALSA20
  [CRYPTO] tcrypt: Add select of AEAD
  [CRYPTO] salsa20: Add x86-64 assembly version
  [CRYPTO] salsa20_i586: Salsa20 stream cipher algorithm (i586 version)
  [CRYPTO] gcm: Introduce rfc4106
  [CRYPTO] api: Show async type
  [CRYPTO] chainiv: Avoid lock spinning where possible
  [CRYPTO] seqiv: Add select AEAD in Kconfig
  [CRYPTO] scatterwalk: Handle zero nbytes in scatterwalk_map_and_copy
  [CRYPTO] null: Allow setkey on digest_null
  ...

72 files changed:
Documentation/crypto/api-intro.txt
arch/s390/crypto/aes_s390.c
arch/x86/crypto/Makefile
arch/x86/crypto/aes-i586-asm_32.S
arch/x86/crypto/aes-x86_64-asm_64.S
arch/x86/crypto/aes_32.c [deleted file]
arch/x86/crypto/aes_64.c [deleted file]
arch/x86/crypto/aes_glue.c [new file with mode: 0644]
arch/x86/crypto/salsa20-i586-asm_32.S [new file with mode: 0644]
arch/x86/crypto/salsa20-x86_64-asm_64.S [new file with mode: 0644]
arch/x86/crypto/salsa20_glue.c [new file with mode: 0644]
arch/x86/crypto/twofish_64.c [deleted file]
arch/x86/crypto/twofish_glue.c [moved from arch/x86/crypto/twofish_32.c with 94% similarity]
crypto/Kconfig
crypto/Makefile
crypto/ablkcipher.c
crypto/aead.c
crypto/aes_generic.c
crypto/algapi.c
crypto/api.c
crypto/authenc.c
crypto/blkcipher.c
crypto/camellia.c
crypto/cast6.c
crypto/cbc.c
crypto/ccm.c [new file with mode: 0644]
crypto/chainiv.c [new file with mode: 0644]
crypto/cryptd.c
crypto/crypto_null.c
crypto/ctr.c [new file with mode: 0644]
crypto/des_generic.c
crypto/digest.c
crypto/eseqiv.c [new file with mode: 0644]
crypto/gcm.c [new file with mode: 0644]
crypto/hmac.c
crypto/internal.h
crypto/lzo.c [new file with mode: 0644]
crypto/pcbc.c
crypto/salsa20_generic.c [new file with mode: 0644]
crypto/scatterwalk.c
crypto/seqiv.c [new file with mode: 0644]
crypto/sha256_generic.c
crypto/tcrypt.c
crypto/tcrypt.h
crypto/twofish_common.c
crypto/xcbc.c
drivers/char/hw_random/amd-rng.c
drivers/char/hw_random/core.c
drivers/char/hw_random/geode-rng.c
drivers/char/hw_random/intel-rng.c
drivers/char/hw_random/omap-rng.c
drivers/char/hw_random/pasemi-rng.c
drivers/char/hw_random/via-rng.c
drivers/crypto/Kconfig
drivers/crypto/Makefile
drivers/crypto/geode-aes.c
drivers/crypto/geode-aes.h
drivers/crypto/hifn_795x.c [new file with mode: 0644]
drivers/crypto/padlock-aes.c
include/crypto/aead.h [new file with mode: 0644]
include/crypto/aes.h [new file with mode: 0644]
include/crypto/algapi.h
include/crypto/authenc.h [new file with mode: 0644]
include/crypto/ctr.h [new file with mode: 0644]
include/crypto/des.h [new file with mode: 0644]
include/crypto/internal/aead.h [new file with mode: 0644]
include/crypto/internal/skcipher.h [new file with mode: 0644]
include/crypto/scatterwalk.h [moved from crypto/scatterwalk.h with 69% similarity]
include/crypto/sha.h
include/crypto/skcipher.h [new file with mode: 0644]
include/linux/crypto.h
include/linux/hw_random.h

index a2ac6d2..8b49302 100644 (file)
@@ -33,9 +33,16 @@ The idea is to make the user interface and algorithm registration API
 very simple, while hiding the core logic from both.  Many good ideas
 from existing APIs such as Cryptoapi and Nettle have been adapted for this.
 
-The API currently supports three types of transforms: Ciphers, Digests and
-Compressors.  The compression algorithms especially seem to be performing
-very well so far.
+The API currently supports five main types of transforms: AEAD (Authenticated
+Encryption with Associated Data), Block Ciphers, Ciphers, Compressors and
+Hashes.
+
+Please note that Block Ciphers is somewhat of a misnomer.  It is in fact
+meant to support all ciphers including stream ciphers.  The difference
+between Block Ciphers and Ciphers is that the latter operates on exactly
+one block while the former can operate on an arbitrary amount of data,
+subject to block size requirements (i.e., non-stream ciphers can only
+process multiples of blocks).
 
 Support for hardware crypto devices via an asynchronous interface is
 under development.
@@ -69,29 +76,12 @@ Here's an example of how to use the API:
 Many real examples are available in the regression test module (tcrypt.c).
 
 
-CONFIGURATION NOTES
-
-As Triple DES is part of the DES module, for those using modular builds,
-add the following line to /etc/modprobe.conf:
-
-  alias des3_ede des
-
-The Null algorithms reside in the crypto_null module, so these lines
-should also be added:
-
-  alias cipher_null crypto_null
-  alias digest_null crypto_null
-  alias compress_null crypto_null
-
-The SHA384 algorithm shares code within the SHA512 module, so you'll
-also need:
-  alias sha384 sha512
-
-
 DEVELOPER NOTES
 
 Transforms may only be allocated in user context, and cryptographic
-methods may only be called from softirq and user contexts.
+methods may only be called from softirq and user contexts.  For
+transforms with a setkey method it too should only be called from
+user context.
 
 When using the API for ciphers, performance will be optimal if each
 scatterlist contains data which is a multiple of the cipher's block
@@ -130,8 +120,9 @@ might already be working on.
 BUGS
 
 Send bug reports to:
-Herbert Xu <herbert@gondor.apana.org.au>
-Cc: David S. Miller <davem@redhat.com>
+linux-crypto@vger.kernel.org
+Cc: Herbert Xu <herbert@gondor.apana.org.au>,
+    David S. Miller <davem@redhat.com>
 
 
 FURTHER INFORMATION
index 5126696..46c9705 100644 (file)
@@ -6,6 +6,7 @@
  * s390 Version:
  *   Copyright IBM Corp. 2005,2007
  *   Author(s): Jan Glauber (jang@de.ibm.com)
+ *             Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
  *
  * Derived from "crypto/aes_generic.c"
  *
  *
  */
 
+#include <crypto/aes.h>
 #include <crypto/algapi.h>
+#include <linux/err.h>
 #include <linux/module.h>
 #include <linux/init.h>
 #include "crypt_s390.h"
 
-#define AES_MIN_KEY_SIZE       16
-#define AES_MAX_KEY_SIZE       32
-
-/* data block size for all key lengths */
-#define AES_BLOCK_SIZE         16
-
 #define AES_KEYLEN_128         1
 #define AES_KEYLEN_192         2
 #define AES_KEYLEN_256         4
@@ -39,45 +36,89 @@ struct s390_aes_ctx {
        long enc;
        long dec;
        int key_len;
+       union {
+               struct crypto_blkcipher *blk;
+               struct crypto_cipher *cip;
+       } fallback;
 };
 
-static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
-                      unsigned int key_len)
+/*
+ * Check if the key_len is supported by the HW.
+ * Returns 0 if it is, a positive number if it is not and software fallback is
+ * required or a negative number in case the key size is not valid
+ */
+static int need_fallback(unsigned int key_len)
 {
-       struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
-       u32 *flags = &tfm->crt_flags;
-
        switch (key_len) {
        case 16:
                if (!(keylen_flag & AES_KEYLEN_128))
-                       goto fail;
+                       return 1;
                break;
        case 24:
                if (!(keylen_flag & AES_KEYLEN_192))
-                       goto fail;
-
+                       return 1;
                break;
        case 32:
                if (!(keylen_flag & AES_KEYLEN_256))
-                       goto fail;
+                       return 1;
                break;
        default:
-               goto fail;
+               return -1;
                break;
        }
+       return 0;
+}
+
+static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
+               unsigned int key_len)
+{
+       struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+       int ret;
+
+       sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
+       sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
+                       CRYPTO_TFM_REQ_MASK);
+
+       ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
+       if (ret) {
+               tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
+               tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
+                               CRYPTO_TFM_RES_MASK);
+       }
+       return ret;
+}
+
+static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
+                      unsigned int key_len)
+{
+       struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+       u32 *flags = &tfm->crt_flags;
+       int ret;
+
+       ret = need_fallback(key_len);
+       if (ret < 0) {
+               *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
+               return -EINVAL;
+       }
 
        sctx->key_len = key_len;
-       memcpy(sctx->key, in_key, key_len);
-       return 0;
-fail:
-       *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
-       return -EINVAL;
+       if (!ret) {
+               memcpy(sctx->key, in_key, key_len);
+               return 0;
+       }
+
+       return setkey_fallback_cip(tfm, in_key, key_len);
 }
 
 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {
        const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 
+       if (unlikely(need_fallback(sctx->key_len))) {
+               crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
+               return;
+       }
+
        switch (sctx->key_len) {
        case 16:
                crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
@@ -98,6 +139,11 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {
        const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 
+       if (unlikely(need_fallback(sctx->key_len))) {
+               crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
+               return;
+       }
+
        switch (sctx->key_len) {
        case 16:
                crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
@@ -114,6 +160,29 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
        }
 }
 
+static int fallback_init_cip(struct crypto_tfm *tfm)
+{
+       const char *name = tfm->__crt_alg->cra_name;
+       struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+
+       sctx->fallback.cip = crypto_alloc_cipher(name, 0,
+                       CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
+
+       if (IS_ERR(sctx->fallback.cip)) {
+               printk(KERN_ERR "Error allocating fallback algo %s\n", name);
+               return PTR_ERR(sctx->fallback.blk);
+       }
+
+       return 0;
+}
+
+static void fallback_exit_cip(struct crypto_tfm *tfm)
+{
+       struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+
+       crypto_free_cipher(sctx->fallback.cip);
+       sctx->fallback.cip = NULL;
+}
 
 static struct crypto_alg aes_alg = {
        .cra_name               =       "aes",
@@ -125,6 +194,8 @@ static struct crypto_alg aes_alg = {
        .cra_ctxsize            =       sizeof(struct s390_aes_ctx),
        .cra_module             =       THIS_MODULE,
        .cra_list               =       LIST_HEAD_INIT(aes_alg.cra_list),
+       .cra_init               =       fallback_init_cip,
+       .cra_exit               =       fallback_exit_cip,
        .cra_u                  =       {
                .cipher = {
                        .cia_min_keysize        =       AES_MIN_KEY_SIZE,
@@ -136,10 +207,70 @@ static struct crypto_alg aes_alg = {
        }
 };
 
+static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
+               unsigned int len)
+{
+       struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+       unsigned int ret;
+
+       sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
+       sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
+                       CRYPTO_TFM_REQ_MASK);
+
+       ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
+       if (ret) {
+               tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
+               tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
+                               CRYPTO_TFM_RES_MASK);
+       }
+       return ret;
+}
+
+static int fallback_blk_dec(struct blkcipher_desc *desc,
+               struct scatterlist *dst, struct scatterlist *src,
+               unsigned int nbytes)
+{
+       unsigned int ret;
+       struct crypto_blkcipher *tfm;
+       struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
+
+       tfm = desc->tfm;
+       desc->tfm = sctx->fallback.blk;
+
+       ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
+
+       desc->tfm = tfm;
+       return ret;
+}
+
+static int fallback_blk_enc(struct blkcipher_desc *desc,
+               struct scatterlist *dst, struct scatterlist *src,
+               unsigned int nbytes)
+{
+       unsigned int ret;
+       struct crypto_blkcipher *tfm;
+       struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
+
+       tfm = desc->tfm;
+       desc->tfm = sctx->fallback.blk;
+
+       ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
+
+       desc->tfm = tfm;
+       return ret;
+}
+
 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
                           unsigned int key_len)
 {
        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+       int ret;
+
+       ret = need_fallback(key_len);
+       if (ret > 0) {
+               sctx->key_len = key_len;
+               return setkey_fallback_blk(tfm, in_key, key_len);
+       }
 
        switch (key_len) {
        case 16:
@@ -188,6 +319,9 @@ static int ecb_aes_encrypt(struct blkcipher_desc *desc,
        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
        struct blkcipher_walk walk;
 
+       if (unlikely(need_fallback(sctx->key_len)))
+               return fallback_blk_enc(desc, dst, src, nbytes);
+
        blkcipher_walk_init(&walk, dst, src, nbytes);
        return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
 }
@@ -199,10 +333,37 @@ static int ecb_aes_decrypt(struct blkcipher_desc *desc,
        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
        struct blkcipher_walk walk;
 
+       if (unlikely(need_fallback(sctx->key_len)))
+               return fallback_blk_dec(desc, dst, src, nbytes);
+
        blkcipher_walk_init(&walk, dst, src, nbytes);
        return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
 }
 
+static int fallback_init_blk(struct crypto_tfm *tfm)
+{
+       const char *name = tfm->__crt_alg->cra_name;
+       struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+
+       sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
+                       CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
+
+       if (IS_ERR(sctx->fallback.blk)) {
+               printk(KERN_ERR "Error allocating fallback algo %s\n", name);
+               return PTR_ERR(sctx->fallback.blk);
+       }
+
+       return 0;
+}
+
+static void fallback_exit_blk(struct crypto_tfm *tfm)
+{
+       struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+
+       crypto_free_blkcipher(sctx->fallback.blk);
+       sctx->fallback.blk = NULL;
+}
+
 static struct crypto_alg ecb_aes_alg = {
        .cra_name               =       "ecb(aes)",
        .cra_driver_name        =       "ecb-aes-s390",
@@ -214,6 +375,8 @@ static struct crypto_alg ecb_aes_alg = {
        .cra_type               =       &crypto_blkcipher_type,
        .cra_module             =       THIS_MODULE,
        .cra_list               =       LIST_HEAD_INIT(ecb_aes_alg.cra_list),
+       .cra_init               =       fallback_init_blk,
+       .cra_exit               =       fallback_exit_blk,
        .cra_u                  =       {
                .blkcipher = {
                        .min_keysize            =       AES_MIN_KEY_SIZE,
@@ -229,6 +392,13 @@ static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
                           unsigned int key_len)
 {
        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+       int ret;
+
+       ret = need_fallback(key_len);
+       if (ret > 0) {
+               sctx->key_len = key_len;
+               return setkey_fallback_blk(tfm, in_key, key_len);
+       }
 
        switch (key_len) {
        case 16:
@@ -283,6 +453,9 @@ static int cbc_aes_encrypt(struct blkcipher_desc *desc,
        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
        struct blkcipher_walk walk;
 
+       if (unlikely(need_fallback(sctx->key_len)))
+               return fallback_blk_enc(desc, dst, src, nbytes);
+
        blkcipher_walk_init(&walk, dst, src, nbytes);
        return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
 }
@@ -294,6 +467,9 @@ static int cbc_aes_decrypt(struct blkcipher_desc *desc,
        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
        struct blkcipher_walk walk;
 
+       if (unlikely(need_fallback(sctx->key_len)))
+               return fallback_blk_dec(desc, dst, src, nbytes);
+
        blkcipher_walk_init(&walk, dst, src, nbytes);
        return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
 }
@@ -309,6 +485,8 @@ static struct crypto_alg cbc_aes_alg = {
        .cra_type               =       &crypto_blkcipher_type,
        .cra_module             =       THIS_MODULE,
        .cra_list               =       LIST_HEAD_INIT(cbc_aes_alg.cra_list),
+       .cra_init               =       fallback_init_blk,
+       .cra_exit               =       fallback_exit_blk,
        .cra_u                  =       {
                .blkcipher = {
                        .min_keysize            =       AES_MIN_KEY_SIZE,
@@ -336,14 +514,10 @@ static int __init aes_init(void)
                return -EOPNOTSUPP;
 
        /* z9 109 and z9 BC/EC only support 128 bit key length */
-       if (keylen_flag == AES_KEYLEN_128) {
-               aes_alg.cra_u.cipher.cia_max_keysize = AES_MIN_KEY_SIZE;
-               ecb_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
-               cbc_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
+       if (keylen_flag == AES_KEYLEN_128)
                printk(KERN_INFO
                       "aes_s390: hardware acceleration only available for"
                       "128 bit keys\n");
-       }
 
        ret = crypto_register_alg(&aes_alg);
        if (ret)
@@ -382,4 +556,3 @@ MODULE_ALIAS("aes");
 
 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
 MODULE_LICENSE("GPL");
-
index 46bb609..3874c2d 100644 (file)
@@ -4,12 +4,16 @@
 
 obj-$(CONFIG_CRYPTO_AES_586) += aes-i586.o
 obj-$(CONFIG_CRYPTO_TWOFISH_586) += twofish-i586.o
+obj-$(CONFIG_CRYPTO_SALSA20_586) += salsa20-i586.o
 
 obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o
 obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o
+obj-$(CONFIG_CRYPTO_SALSA20_X86_64) += salsa20-x86_64.o
 
-aes-i586-y := aes-i586-asm_32.o aes_32.o
-twofish-i586-y := twofish-i586-asm_32.o twofish_32.o
+aes-i586-y := aes-i586-asm_32.o aes_glue.o
+twofish-i586-y := twofish-i586-asm_32.o twofish_glue.o
+salsa20-i586-y := salsa20-i586-asm_32.o salsa20_glue.o
 
-aes-x86_64-y := aes-x86_64-asm_64.o aes_64.o
-twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_64.o
+aes-x86_64-y := aes-x86_64-asm_64.o aes_glue.o
+twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_glue.o
+salsa20-x86_64-y := salsa20-x86_64-asm_64.o salsa20_glue.o
index f942f0c..1093bed 100644 (file)
@@ -46,9 +46,9 @@
 #define in_blk 16
 
 /* offsets in crypto_tfm structure */
-#define ekey (crypto_tfm_ctx_offset + 0)
-#define nrnd (crypto_tfm_ctx_offset + 256)
-#define dkey (crypto_tfm_ctx_offset + 260)
+#define klen (crypto_tfm_ctx_offset + 0)
+#define ekey (crypto_tfm_ctx_offset + 4)
+#define dkey (crypto_tfm_ctx_offset + 244)
 
 // register mapping for encrypt and decrypt subroutines
 
 
 .global  aes_enc_blk
 
-.extern  ft_tab
-.extern  fl_tab
+.extern  crypto_ft_tab
+.extern  crypto_fl_tab
 
 .align 4
 
@@ -236,7 +236,7 @@ aes_enc_blk:
 1:     push    %ebx
        mov     in_blk+4(%esp),%r2
        push    %esi
-       mov     nrnd(%ebp),%r3   // number of rounds
+       mov     klen(%ebp),%r3   // key size
        push    %edi
 #if ekey != 0
        lea     ekey(%ebp),%ebp  // key pointer
@@ -255,26 +255,26 @@ aes_enc_blk:
 
        sub     $8,%esp         // space for register saves on stack
        add     $16,%ebp        // increment to next round key
-       cmp     $12,%r3
+       cmp     $24,%r3
        jb      4f              // 10 rounds for 128-bit key
        lea     32(%ebp),%ebp
        je      3f              // 12 rounds for 192-bit key
        lea     32(%ebp),%ebp
 
-2:     fwd_rnd1( -64(%ebp) ,ft_tab)    // 14 rounds for 256-bit key
-       fwd_rnd2( -48(%ebp) ,ft_tab)
-3:     fwd_rnd1( -32(%ebp) ,ft_tab)    // 12 rounds for 192-bit key
-       fwd_rnd2( -16(%ebp) ,ft_tab)
-4:     fwd_rnd1(    (%ebp) ,ft_tab)    // 10 rounds for 128-bit key
-       fwd_rnd2( +16(%ebp) ,ft_tab)
-       fwd_rnd1( +32(%ebp) ,ft_tab)
-       fwd_rnd2( +48(%ebp) ,ft_tab)
-       fwd_rnd1( +64(%ebp) ,ft_tab)
-       fwd_rnd2( +80(%ebp) ,ft_tab)
-       fwd_rnd1( +96(%ebp) ,ft_tab)
-       fwd_rnd2(+112(%ebp) ,ft_tab)
-       fwd_rnd1(+128(%ebp) ,ft_tab)
-       fwd_rnd2(+144(%ebp) ,fl_tab)    // last round uses a different table
+2:     fwd_rnd1( -64(%ebp), crypto_ft_tab)     // 14 rounds for 256-bit key
+       fwd_rnd2( -48(%ebp), crypto_ft_tab)
+3:     fwd_rnd1( -32(%ebp), crypto_ft_tab)     // 12 rounds for 192-bit key
+       fwd_rnd2( -16(%ebp), crypto_ft_tab)
+4:     fwd_rnd1(    (%ebp), crypto_ft_tab)     // 10 rounds for 128-bit key
+       fwd_rnd2( +16(%ebp), crypto_ft_tab)
+       fwd_rnd1( +32(%ebp), crypto_ft_tab)
+       fwd_rnd2( +48(%ebp), crypto_ft_tab)
+       fwd_rnd1( +64(%ebp), crypto_ft_tab)
+       fwd_rnd2( +80(%ebp), crypto_ft_tab)
+       fwd_rnd1( +96(%ebp), crypto_ft_tab)
+       fwd_rnd2(+112(%ebp), crypto_ft_tab)
+       fwd_rnd1(+128(%ebp), crypto_ft_tab)
+       fwd_rnd2(+144(%ebp), crypto_fl_tab)     // last round uses a different table
 
 // move final values to the output array.  CAUTION: the 
 // order of these assigns rely on the register mappings
@@ -297,8 +297,8 @@ aes_enc_blk:
 
 .global  aes_dec_blk
 
-.extern  it_tab
-.extern  il_tab
+.extern  crypto_it_tab
+.extern  crypto_il_tab
 
 .align 4
 
@@ -312,14 +312,11 @@ aes_dec_blk:
 1:     push    %ebx
        mov     in_blk+4(%esp),%r2
        push    %esi
-       mov     nrnd(%ebp),%r3   // number of rounds
+       mov     klen(%ebp),%r3   // key size
        push    %edi
 #if dkey != 0
        lea     dkey(%ebp),%ebp  // key pointer
 #endif
-       mov     %r3,%r0
-       shl     $4,%r0
-       add     %r0,%ebp
        
 // input four columns and xor in first round key
 
@@ -333,27 +330,27 @@ aes_dec_blk:
        xor     12(%ebp),%r5
 
        sub     $8,%esp         // space for register saves on stack
-       sub     $16,%ebp        // increment to next round key
-       cmp     $12,%r3
+       add     $16,%ebp        // increment to next round key
+       cmp     $24,%r3
        jb      4f              // 10 rounds for 128-bit key
-       lea     -32(%ebp),%ebp
+       lea     32(%ebp),%ebp
        je      3f              // 12 rounds for 192-bit key
-       lea     -32(%ebp),%ebp
-
-2:     inv_rnd1( +64(%ebp), it_tab)    // 14 rounds for 256-bit key
-       inv_rnd2( +48(%ebp), it_tab)
-3:     inv_rnd1( +32(%ebp), it_tab)    // 12 rounds for 192-bit key
-       inv_rnd2( +16(%ebp), it_tab)
-4:     inv_rnd1(    (%ebp), it_tab)    // 10 rounds for 128-bit key
-       inv_rnd2( -16(%ebp), it_tab)
-       inv_rnd1( -32(%ebp), it_tab)
-       inv_rnd2( -48(%ebp), it_tab)
-       inv_rnd1( -64(%ebp), it_tab)
-       inv_rnd2( -80(%ebp), it_tab)
-       inv_rnd1( -96(%ebp), it_tab)
-       inv_rnd2(-112(%ebp), it_tab)
-       inv_rnd1(-128(%ebp), it_tab)
-       inv_rnd2(-144(%ebp), il_tab)    // last round uses a different table
+       lea     32(%ebp),%ebp
+
+2:     inv_rnd1( -64(%ebp), crypto_it_tab)     // 14 rounds for 256-bit key
+       inv_rnd2( -48(%ebp), crypto_it_tab)
+3:     inv_rnd1( -32(%ebp), crypto_it_tab)     // 12 rounds for 192-bit key
+       inv_rnd2( -16(%ebp), crypto_it_tab)
+4:     inv_rnd1(    (%ebp), crypto_it_tab)     // 10 rounds for 128-bit key
+       inv_rnd2( +16(%ebp), crypto_it_tab)
+       inv_rnd1( +32(%ebp), crypto_it_tab)
+       inv_rnd2( +48(%ebp), crypto_it_tab)
+       inv_rnd1( +64(%ebp), crypto_it_tab)
+       inv_rnd2( +80(%ebp), crypto_it_tab)
+       inv_rnd1( +96(%ebp), crypto_it_tab)
+       inv_rnd2(+112(%ebp), crypto_it_tab)
+       inv_rnd1(+128(%ebp), crypto_it_tab)
+       inv_rnd2(+144(%ebp), crypto_il_tab)     // last round uses a different table
 
 // move final values to the output array.  CAUTION: the 
 // order of these assigns rely on the register mappings
index 26b40de..a120f52 100644 (file)
@@ -8,10 +8,10 @@
  * including this sentence is retained in full.
  */
 
-.extern aes_ft_tab
-.extern aes_it_tab
-.extern aes_fl_tab
-.extern aes_il_tab
+.extern crypto_ft_tab
+.extern crypto_it_tab
+.extern crypto_fl_tab
+.extern crypto_il_tab
 
 .text
 
        .align  8;                      \
 FUNC:  movq    r1,r2;                  \
        movq    r3,r4;                  \
-       leaq    BASE+KEY+52(r8),r9;     \
+       leaq    BASE+KEY+48+4(r8),r9;   \
        movq    r10,r11;                \
        movl    (r7),r5 ## E;           \
        movl    4(r7),r1 ## E;          \
        movl    8(r7),r6 ## E;          \
        movl    12(r7),r7 ## E;         \
-       movl    BASE(r8),r10 ## E;      \
+       movl    BASE+0(r8),r10 ## E;    \
        xorl    -48(r9),r5 ## E;        \
        xorl    -44(r9),r1 ## E;        \
        xorl    -40(r9),r6 ## E;        \
@@ -154,37 +154,37 @@ FUNC:     movq    r1,r2;                  \
 /* void aes_enc_blk(stuct crypto_tfm *tfm, u8 *out, const u8 *in) */
 
        entry(aes_enc_blk,0,enc128,enc192)
-       encrypt_round(aes_ft_tab,-96)
-       encrypt_round(aes_ft_tab,-80)
-enc192:        encrypt_round(aes_ft_tab,-64)
-       encrypt_round(aes_ft_tab,-48)
-enc128:        encrypt_round(aes_ft_tab,-32)
-       encrypt_round(aes_ft_tab,-16)
-       encrypt_round(aes_ft_tab,  0)
-       encrypt_round(aes_ft_tab, 16)
-       encrypt_round(aes_ft_tab, 32)
-       encrypt_round(aes_ft_tab, 48)
-       encrypt_round(aes_ft_tab, 64)
-       encrypt_round(aes_ft_tab, 80)
-       encrypt_round(aes_ft_tab, 96)
-       encrypt_final(aes_fl_tab,112)
+       encrypt_round(crypto_ft_tab,-96)
+       encrypt_round(crypto_ft_tab,-80)
+enc192:        encrypt_round(crypto_ft_tab,-64)
+       encrypt_round(crypto_ft_tab,-48)
+enc128:        encrypt_round(crypto_ft_tab,-32)
+       encrypt_round(crypto_ft_tab,-16)
+       encrypt_round(crypto_ft_tab,  0)
+       encrypt_round(crypto_ft_tab, 16)
+       encrypt_round(crypto_ft_tab, 32)
+       encrypt_round(crypto_ft_tab, 48)
+       encrypt_round(crypto_ft_tab, 64)
+       encrypt_round(crypto_ft_tab, 80)
+       encrypt_round(crypto_ft_tab, 96)
+       encrypt_final(crypto_fl_tab,112)
        return
 
 /* void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in) */
 
        entry(aes_dec_blk,240,dec128,dec192)
-       decrypt_round(aes_it_tab,-96)
-       decrypt_round(aes_it_tab,-80)
-dec192:        decrypt_round(aes_it_tab,-64)
-       decrypt_round(aes_it_tab,-48)
-dec128:        decrypt_round(aes_it_tab,-32)
-       decrypt_round(aes_it_tab,-16)
-       decrypt_round(aes_it_tab,  0)
-       decrypt_round(aes_it_tab, 16)
-       decrypt_round(aes_it_tab, 32)
-       decrypt_round(aes_it_tab, 48)
-       decrypt_round(aes_it_tab, 64)
-       decrypt_round(aes_it_tab, 80)
-       decrypt_round(aes_it_tab, 96)
-       decrypt_final(aes_il_tab,112)
+       decrypt_round(crypto_it_tab,-96)
+       decrypt_round(crypto_it_tab,-80)
+dec192:        decrypt_round(crypto_it_tab,-64)
+       decrypt_round(crypto_it_tab,-48)
+dec128:        decrypt_round(crypto_it_tab,-32)
+       decrypt_round(crypto_it_tab,-16)
+       decrypt_round(crypto_it_tab,  0)
+       decrypt_round(crypto_it_tab, 16)
+       decrypt_round(crypto_it_tab, 32)
+       decrypt_round(crypto_it_tab, 48)
+       decrypt_round(crypto_it_tab, 64)
+       decrypt_round(crypto_it_tab, 80)
+       decrypt_round(crypto_it_tab, 96)
+       decrypt_final(crypto_il_tab,112)
        return
diff --git a/arch/x86/crypto/aes_32.c b/arch/x86/crypto/aes_32.c
deleted file mode 100644 (file)
index 49aad93..0000000
+++ /dev/null
@@ -1,515 +0,0 @@
-/* 
- * 
- * Glue Code for optimized 586 assembler version of AES
- *
- * Copyright (c) 2002, Dr Brian Gladman <>, Worcester, UK.
- * All rights reserved.
- *
- * LICENSE TERMS
- *
- * The free distribution and use of this software in both source and binary
- * form is allowed (with or without changes) provided that:
- *
- *   1. distributions of this source code include the above copyright
- *      notice, this list of conditions and the following disclaimer;
- *
- *   2. distributions in binary form include the above copyright
- *      notice, this list of conditions and the following disclaimer
- *      in the documentation and/or other associated materials;
- *
- *   3. the copyright holder's name is not used to endorse products
- *      built using this software without specific written permission.
- *
- * ALTERNATIVELY, provided that this notice is retained in full, this product
- * may be distributed under the terms of the GNU General Public License (GPL),
- * in which case the provisions of the GPL apply INSTEAD OF those given above.
- *
- * DISCLAIMER
- *
- * This software is provided 'as is' with no explicit or implied warranties
- * in respect of its properties, including, but not limited to, correctness
- * and/or fitness for purpose.
- *
- * Copyright (c) 2003, Adam J. Richter <adam@yggdrasil.com> (conversion to
- * 2.5 API).
- * Copyright (c) 2003, 2004 Fruhwirth Clemens <clemens@endorphin.org>
- * Copyright (c) 2004 Red Hat, Inc., James Morris <jmorris@redhat.com>
- *
- */
-
-#include <asm/byteorder.h>
-#include <linux/kernel.h>
-#include <linux/module.h>
-#include <linux/init.h>
-#include <linux/types.h>
-#include <linux/crypto.h>
-#include <linux/linkage.h>
-
-asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
-asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
-
-#define AES_MIN_KEY_SIZE       16
-#define AES_MAX_KEY_SIZE       32
-#define AES_BLOCK_SIZE         16
-#define AES_KS_LENGTH          4 * AES_BLOCK_SIZE
-#define RC_LENGTH              29
-
-struct aes_ctx {
-       u32 ekey[AES_KS_LENGTH];
-       u32 rounds;
-       u32 dkey[AES_KS_LENGTH];
-};
-
-#define WPOLY 0x011b
-#define bytes2word(b0, b1, b2, b3)  \
-       (((u32)(b3) << 24) | ((u32)(b2) << 16) | ((u32)(b1) << 8) | (b0))
-
-/* define the finite field multiplies required for Rijndael */
-#define f2(x) ((x) ? pow[log[x] + 0x19] : 0)
-#define f3(x) ((x) ? pow[log[x] + 0x01] : 0)
-#define f9(x) ((x) ? pow[log[x] + 0xc7] : 0)
-#define fb(x) ((x) ? pow[log[x] + 0x68] : 0)
-#define fd(x) ((x) ? pow[log[x] + 0xee] : 0)
-#define fe(x) ((x) ? pow[log[x] + 0xdf] : 0)
-#define fi(x) ((x) ?   pow[255 - log[x]]: 0)
-
-static inline u32 upr(u32 x, int n)
-{
-       return (x << 8 * n) | (x >> (32 - 8 * n));
-}
-
-static inline u8 bval(u32 x, int n)
-{
-       return x >> 8 * n;
-}
-
-/* The forward and inverse affine transformations used in the S-box */
-#define fwd_affine(x) \
-       (w = (u32)x, w ^= (w<<1)^(w<<2)^(w<<3)^(w<<4), 0x63^(u8)(w^(w>>8)))
-
-#define inv_affine(x) \
-       (w = (u32)x, w = (w<<1)^(w<<3)^(w<<6), 0x05^(u8)(w^(w>>8)))
-
-static u32 rcon_tab[RC_LENGTH];
-
-u32 ft_tab[4][256];
-u32 fl_tab[4][256];
-static u32 im_tab[4][256];
-u32 il_tab[4][256];
-u32 it_tab[4][256];
-
-static void gen_tabs(void)
-{
-       u32 i, w;
-       u8 pow[512], log[256];
-
-       /*
-        * log and power tables for GF(2^8) finite field with
-        * WPOLY as modular polynomial - the simplest primitive
-        * root is 0x03, used here to generate the tables.
-        */
-       i = 0; w = 1; 
-       
-       do {
-               pow[i] = (u8)w;
-               pow[i + 255] = (u8)w;
-               log[w] = (u8)i++;
-               w ^=  (w << 1) ^ (w & 0x80 ? WPOLY : 0);
-       } while (w != 1);
-       
-       for(i = 0, w = 1; i < RC_LENGTH; ++i) {
-               rcon_tab[i] = bytes2word(w, 0, 0, 0);
-               w = f2(w);
-       }
-
-       for(i = 0; i < 256; ++i) {
-               u8 b;
-               
-               b = fwd_affine(fi((u8)i));
-               w = bytes2word(f2(b), b, b, f3(b));
-
-               /* tables for a normal encryption round */
-               ft_tab[0][i] = w;
-               ft_tab[1][i] = upr(w, 1);
-               ft_tab[2][i] = upr(w, 2);
-               ft_tab[3][i] = upr(w, 3);
-               w = bytes2word(b, 0, 0, 0);
-               
-               /*
-                * tables for last encryption round
-                * (may also be used in the key schedule)
-                */
-               fl_tab[0][i] = w;
-               fl_tab[1][i] = upr(w, 1);
-               fl_tab[2][i] = upr(w, 2);
-               fl_tab[3][i] = upr(w, 3);
-               
-               b = fi(inv_affine((u8)i));
-               w = bytes2word(fe(b), f9(b), fd(b), fb(b));
-
-               /* tables for the inverse mix column operation  */
-               im_tab[0][b] = w;
-               im_tab[1][b] = upr(w, 1);
-               im_tab[2][b] = upr(w, 2);
-               im_tab[3][b] = upr(w, 3);
-
-               /* tables for a normal decryption round */
-               it_tab[0][i] = w;
-               it_tab[1][i] = upr(w,1);
-               it_tab[2][i] = upr(w,2);
-               it_tab[3][i] = upr(w,3);
-
-               w = bytes2word(b, 0, 0, 0);
-               
-               /* tables for last decryption round */
-               il_tab[0][i] = w;
-               il_tab[1][i] = upr(w,1);
-               il_tab[2][i] = upr(w,2);
-               il_tab[3][i] = upr(w,3);
-    }
-}
-
-#define four_tables(x,tab,vf,rf,c)             \
-(      tab[0][bval(vf(x,0,c),rf(0,c))] ^       \
-       tab[1][bval(vf(x,1,c),rf(1,c))] ^       \
-       tab[2][bval(vf(x,2,c),rf(2,c))] ^       \
-       tab[3][bval(vf(x,3,c),rf(3,c))]         \
-)
-
-#define vf1(x,r,c)  (x)
-#define rf1(r,c)    (r)
-#define rf2(r,c)    ((r-c)&3)
-
-#define inv_mcol(x) four_tables(x,im_tab,vf1,rf1,0)
-#define ls_box(x,c) four_tables(x,fl_tab,vf1,rf2,c)
-
-#define ff(x) inv_mcol(x)
-
-#define ke4(k,i)                                                       \
-{                                                                      \
-       k[4*(i)+4] = ss[0] ^= ls_box(ss[3],3) ^ rcon_tab[i];            \
-       k[4*(i)+5] = ss[1] ^= ss[0];                                    \
-       k[4*(i)+6] = ss[2] ^= ss[1];                                    \
-       k[4*(i)+7] = ss[3] ^= ss[2];                                    \
-}
-
-#define kel4(k,i)                                                      \
-{                                                                      \
-       k[4*(i)+4] = ss[0] ^= ls_box(ss[3],3) ^ rcon_tab[i];            \
-       k[4*(i)+5] = ss[1] ^= ss[0];                                    \
-       k[4*(i)+6] = ss[2] ^= ss[1]; k[4*(i)+7] = ss[3] ^= ss[2];       \
-}
-
-#define ke6(k,i)                                                       \
-{                                                                      \
-       k[6*(i)+ 6] = ss[0] ^= ls_box(ss[5],3) ^ rcon_tab[i];           \
-       k[6*(i)+ 7] = ss[1] ^= ss[0];                                   \
-       k[6*(i)+ 8] = ss[2] ^= ss[1];                                   \
-       k[6*(i)+ 9] = ss[3] ^= ss[2];                                   \
-       k[6*(i)+10] = ss[4] ^= ss[3];                                   \
-       k[6*(i)+11] = ss[5] ^= ss[4];                                   \
-}
-
-#define kel6(k,i)                                                      \
-{                                                                      \
-       k[6*(i)+ 6] = ss[0] ^= ls_box(ss[5],3) ^ rcon_tab[i];           \
-       k[6*(i)+ 7] = ss[1] ^= ss[0];                                   \
-       k[6*(i)+ 8] = ss[2] ^= ss[1];                                   \
-       k[6*(i)+ 9] = ss[3] ^= ss[2];                                   \
-}
-
-#define ke8(k,i)                                                       \
-{                                                                      \
-       k[8*(i)+ 8] = ss[0] ^= ls_box(ss[7],3) ^ rcon_tab[i];           \
-       k[8*(i)+ 9] = ss[1] ^= ss[0];                                   \
-       k[8*(i)+10] = ss[2] ^= ss[1];                                   \
-       k[8*(i)+11] = ss[3] ^= ss[2];                                   \
-       k[8*(i)+12] = ss[4] ^= ls_box(ss[3],0);                         \
-       k[8*(i)+13] = ss[5] ^= ss[4];                                   \
-       k[8*(i)+14] = ss[6] ^= ss[5];                                   \
-       k[8*(i)+15] = ss[7] ^= ss[6];                                   \
-}
-
-#define kel8(k,i)                                                      \
-{                                                                      \
-       k[8*(i)+ 8] = ss[0] ^= ls_box(ss[7],3) ^ rcon_tab[i];           \
-       k[8*(i)+ 9] = ss[1] ^= ss[0];                                   \
-       k[8*(i)+10] = ss[2] ^= ss[1];                                   \
-       k[8*(i)+11] = ss[3] ^= ss[2];                                   \
-}
-
-#define kdf4(k,i)                                                      \
-{                                                                      \
-       ss[0] = ss[0] ^ ss[2] ^ ss[1] ^ ss[3];                          \
-       ss[1] = ss[1] ^ ss[3];                                          \
-       ss[2] = ss[2] ^ ss[3];                                          \
-       ss[3] = ss[3];                                                  \
-       ss[4] = ls_box(ss[(i+3) % 4], 3) ^ rcon_tab[i];                 \
-       ss[i % 4] ^= ss[4];                                             \
-       ss[4] ^= k[4*(i)];                                              \
-       k[4*(i)+4] = ff(ss[4]);                                         \
-       ss[4] ^= k[4*(i)+1];                                            \
-       k[4*(i)+5] = ff(ss[4]);                                         \
-       ss[4] ^= k[4*(i)+2];                                            \
-       k[4*(i)+6] = ff(ss[4]);                                         \
-       ss[4] ^= k[4*(i)+3];                                            \
-       k[4*(i)+7] = ff(ss[4]);                                         \
-}
-
-#define kd4(k,i)                                                       \
-{                                                                      \
-       ss[4] = ls_box(ss[(i+3) % 4], 3) ^ rcon_tab[i];                 \
-       ss[i % 4] ^= ss[4];                                             \
-       ss[4] = ff(ss[4]);                                              \
-       k[4*(i)+4] = ss[4] ^= k[4*(i)];                                 \
-       k[4*(i)+5] = ss[4] ^= k[4*(i)+1];                               \
-       k[4*(i)+6] = ss[4] ^= k[4*(i)+2];                               \
-       k[4*(i)+7] = ss[4] ^= k[4*(i)+3];                               \
-}
-
-#define kdl4(k,i)                                                      \
-{                                                                      \
-       ss[4] = ls_box(ss[(i+3) % 4], 3) ^ rcon_tab[i];                 \
-       ss[i % 4] ^= ss[4];                                             \
-       k[4*(i)+4] = (ss[0] ^= ss[1]) ^ ss[2] ^ ss[3];                  \
-       k[4*(i)+5] = ss[1] ^ ss[3];                                     \
-       k[4*(i)+6] = ss[0];                                             \
-       k[4*(i)+7] = ss[1];                                             \
-}
-
-#define kdf6(k,i)                                                      \
-{                                                                      \
-       ss[0] ^= ls_box(ss[5],3) ^ rcon_tab[i];                         \
-       k[6*(i)+ 6] = ff(ss[0]);                                        \
-       ss[1] ^= ss[0];                                                 \
-       k[6*(i)+ 7] = ff(ss[1]);                                        \
-       ss[2] ^= ss[1];                                                 \
-       k[6*(i)+ 8] = ff(ss[2]);                                        \
-       ss[3] ^= ss[2];                                                 \
-       k[6*(i)+ 9] = ff(ss[3]);                                        \
-       ss[4] ^= ss[3];                                                 \
-       k[6*(i)+10] = ff(ss[4]);                                        \
-       ss[5] ^= ss[4];                                                 \
-       k[6*(i)+11] = ff(ss[5]);                                        \
-}
-
-#define kd6(k,i)                                                       \
-{                                                                      \
-       ss[6] = ls_box(ss[5],3) ^ rcon_tab[i];                          \
-       ss[0] ^= ss[6]; ss[6] = ff(ss[6]);                              \
-       k[6*(i)+ 6] = ss[6] ^= k[6*(i)];                                \
-       ss[1] ^= ss[0];                                                 \
-       k[6*(i)+ 7] = ss[6] ^= k[6*(i)+ 1];                             \
-       ss[2] ^= ss[1];                                                 \
-       k[6*(i)+ 8] = ss[6] ^= k[6*(i)+ 2];                             \
-       ss[3] ^= ss[2];                                                 \
-       k[6*(i)+ 9] = ss[6] ^= k[6*(i)+ 3];                             \
-       ss[4] ^= ss[3];                                                 \
-       k[6*(i)+10] = ss[6] ^= k[6*(i)+ 4];                             \
-       ss[5] ^= ss[4];                                                 \
-       k[6*(i)+11] = ss[6] ^= k[6*(i)+ 5];                             \
-}
-
-#define kdl6(k,i)                                                      \
-{                                                                      \
-       ss[0] ^= ls_box(ss[5],3) ^ rcon_tab[i];                         \
-       k[6*(i)+ 6] = ss[0];                                            \
-       ss[1] ^= ss[0];                                                 \
-       k[6*(i)+ 7] = ss[1];                                            \
-       ss[2] ^= ss[1];                                                 \
-       k[6*(i)+ 8] = ss[2];                                            \
-       ss[3] ^= ss[2];                                                 \
-       k[6*(i)+ 9] = ss[3];                                            \
-}
-
-#define kdf8(k,i)                                                      \
-{                                                                      \
-       ss[0] ^= ls_box(ss[7],3) ^ rcon_tab[i];                         \
-       k[8*(i)+ 8] = ff(ss[0]);                                        \
-       ss[1] ^= ss[0];                                                 \
-       k[8*(i)+ 9] = ff(ss[1]);                                        \
-       ss[2] ^= ss[1];                                                 \
-       k[8*(i)+10] = ff(ss[2]);                                        \
-       ss[3] ^= ss[2];                                                 \
-       k[8*(i)+11] = ff(ss[3]);                                        \
-       ss[4] ^= ls_box(ss[3],0);                                       \
-       k[8*(i)+12] = ff(ss[4]);                                        \
-       ss[5] ^= ss[4];                                                 \
-       k[8*(i)+13] = ff(ss[5]);                                        \
-       ss[6] ^= ss[5];                                                 \
-       k[8*(i)+14] = ff(ss[6]);                                        \
-       ss[7] ^= ss[6];                                                 \
-       k[8*(i)+15] = ff(ss[7]);                                        \
-}
-
-#define kd8(k,i)                                                       \
-{                                                                      \
-       u32 __g = ls_box(ss[7],3) ^ rcon_tab[i];                        \
-       ss[0] ^= __g;                                                   \
-       __g = ff(__g);                                                  \
-       k[8*(i)+ 8] = __g ^= k[8*(i)];                                  \
-       ss[1] ^= ss[0];                                                 \
-       k[8*(i)+ 9] = __g ^= k[8*(i)+ 1];                               \
-       ss[2] ^= ss[1];                                                 \
-       k[8*(i)+10] = __g ^= k[8*(i)+ 2];                               \
-       ss[3] ^= ss[2];                                                 \
-       k[8*(i)+11] = __g ^= k[8*(i)+ 3];                               \
-       __g = ls_box(ss[3],0);                                          \
-       ss[4] ^= __g;                                                   \
-       __g = ff(__g);                                                  \
-       k[8*(i)+12] = __g ^= k[8*(i)+ 4];                               \
-       ss[5] ^= ss[4];                                                 \
-       k[8*(i)+13] = __g ^= k[8*(i)+ 5];                               \
-       ss[6] ^= ss[5];                                                 \
-       k[8*(i)+14] = __g ^= k[8*(i)+ 6];                               \
-       ss[7] ^= ss[6];                                                 \
-       k[8*(i)+15] = __g ^= k[8*(i)+ 7];                               \
-}
-
-#define kdl8(k,i)                                                      \
-{                                                                      \
-       ss[0] ^= ls_box(ss[7],3) ^ rcon_tab[i];                         \
-       k[8*(i)+ 8] = ss[0];                                            \
-       ss[1] ^= ss[0];                                                 \
-       k[8*(i)+ 9] = ss[1];                                            \
-       ss[2] ^= ss[1];                                                 \
-       k[8*(i)+10] = ss[2];                                            \
-       ss[3] ^= ss[2];                                                 \
-       k[8*(i)+11] = ss[3];                                            \
-}
-
-static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
-                      unsigned int key_len)
-{
-       int i;
-       u32 ss[8];
-       struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
-       const __le32 *key = (const __le32 *)in_key;
-       u32 *flags = &tfm->crt_flags;
-
-       /* encryption schedule */
-       
-       ctx->ekey[0] = ss[0] = le32_to_cpu(key[0]);
-       ctx->ekey[1] = ss[1] = le32_to_cpu(key[1]);
-       ctx->ekey[2] = ss[2] = le32_to_cpu(key[2]);
-       ctx->ekey[3] = ss[3] = le32_to_cpu(key[3]);
-
-       switch(key_len) {
-       case 16:
-               for (i = 0; i < 9; i++)
-                       ke4(ctx->ekey, i);
-               kel4(ctx->ekey, 9);
-               ctx->rounds = 10;
-               break;
-               
-       case 24:
-               ctx->ekey[4] = ss[4] = le32_to_cpu(key[4]);
-               ctx->ekey[5] = ss[5] = le32_to_cpu(key[5]);
-               for (i = 0; i < 7; i++)
-                       ke6(ctx->ekey, i);
-               kel6(ctx->ekey, 7); 
-               ctx->rounds = 12;
-               break;
-
-       case 32:
-               ctx->ekey[4] = ss[4] = le32_to_cpu(key[4]);
-               ctx->ekey[5] = ss[5] = le32_to_cpu(key[5]);
-               ctx->ekey[6] = ss[6] = le32_to_cpu(key[6]);
-               ctx->ekey[7] = ss[7] = le32_to_cpu(key[7]);
-               for (i = 0; i < 6; i++)
-                       ke8(ctx->ekey, i);
-               kel8(ctx->ekey, 6);
-               ctx->rounds = 14;
-               break;
-
-       default:
-               *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
-               return -EINVAL;
-       }
-       
-       /* decryption schedule */
-       
-       ctx->dkey[0] = ss[0] = le32_to_cpu(key[0]);
-       ctx->dkey[1] = ss[1] = le32_to_cpu(key[1]);
-       ctx->dkey[2] = ss[2] = le32_to_cpu(key[2]);
-       ctx->dkey[3] = ss[3] = le32_to_cpu(key[3]);
-
-       switch (key_len) {
-       case 16:
-               kdf4(ctx->dkey, 0);
-               for (i = 1; i < 9; i++)
-                       kd4(ctx->dkey, i);
-               kdl4(ctx->dkey, 9);
-               break;
-               
-       case 24:
-               ctx->dkey[4] = ff(ss[4] = le32_to_cpu(key[4]));
-               ctx->dkey[5] = ff(ss[5] = le32_to_cpu(key[5]));
-               kdf6(ctx->dkey, 0);
-               for (i = 1; i < 7; i++)
-                       kd6(ctx->dkey, i);
-               kdl6(ctx->dkey, 7);
-               break;
-
-       case 32:
-               ctx->dkey[4] = ff(ss[4] = le32_to_cpu(key[4]));
-               ctx->dkey[5] = ff(ss[5] = le32_to_cpu(key[5]));
-               ctx->dkey[6] = ff(ss[6] = le32_to_cpu(key[6]));
-               ctx->dkey[7] = ff(ss[7] = le32_to_cpu(key[7]));
-               kdf8(ctx->dkey, 0);
-               for (i = 1; i < 6; i++)
-                       kd8(ctx->dkey, i);
-               kdl8(ctx->dkey, 6);
-               break;
-       }
-       return 0;
-}
-
-static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
-       aes_enc_blk(tfm, dst, src);
-}
-
-static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
-       aes_dec_blk(tfm, dst, src);
-}
-
-static struct crypto_alg aes_alg = {
-       .cra_name               =       "aes",
-       .cra_driver_name        =       "aes-i586",
-       .cra_priority           =       200,
-       .cra_flags              =       CRYPTO_ALG_TYPE_CIPHER,
-       .cra_blocksize          =       AES_BLOCK_SIZE,
-       .cra_ctxsize            =       sizeof(struct aes_ctx),
-       .cra_module             =       THIS_MODULE,
-       .cra_list               =       LIST_HEAD_INIT(aes_alg.cra_list),
-       .cra_u                  =       {
-               .cipher = {
-                       .cia_min_keysize        =       AES_MIN_KEY_SIZE,
-                       .cia_max_keysize        =       AES_MAX_KEY_SIZE,
-                       .cia_setkey             =       aes_set_key,
-                       .cia_encrypt            =       aes_encrypt,
-                       .cia_decrypt            =       aes_decrypt
-               }
-       }
-};
-
-static int __init aes_init(void)
-{
-       gen_tabs();
-       return crypto_register_alg(&aes_alg);
-}
-
-static void __exit aes_fini(void)
-{
-       crypto_unregister_alg(&aes_alg);
-}
-
-module_init(aes_init);
-module_exit(aes_fini);
-
-MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, i586 asm optimized");
-MODULE_LICENSE("Dual BSD/GPL");
-MODULE_AUTHOR("Fruhwirth Clemens, James Morris, Brian Gladman, Adam Richter");
-MODULE_ALIAS("aes");
diff --git a/arch/x86/crypto/aes_64.c b/arch/x86/crypto/aes_64.c
deleted file mode 100644 (file)
index 5cdb13e..0000000
+++ /dev/null
@@ -1,336 +0,0 @@
-/*
- * Cryptographic API.
- *
- * AES Cipher Algorithm.
- *
- * Based on Brian Gladman's code.
- *
- * Linux developers:
- *  Alexander Kjeldaas <astor@fast.no>
- *  Herbert Valerio Riedel <hvr@hvrlab.org>
- *  Kyle McMartin <kyle@debian.org>
- *  Adam J. Richter <adam@yggdrasil.com> (conversion to 2.5 API).
- *  Andreas Steinmetz <ast@domdv.de> (adapted to x86_64 assembler)
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; either version 2 of the License, or
- * (at your option) any later version.
- *
- * ---------------------------------------------------------------------------
- * Copyright (c) 2002, Dr Brian Gladman <brg@gladman.me.uk>, Worcester, UK.
- * All rights reserved.
- *
- * LICENSE TERMS
- *
- * The free distribution and use of this software in both source and binary
- * form is allowed (with or without changes) provided that:
- *
- *   1. distributions of this source code include the above copyright
- *      notice, this list of conditions and the following disclaimer;
- *
- *   2. distributions in binary form include the above copyright
- *      notice, this list of conditions and the following disclaimer
- *      in the documentation and/or other associated materials;
- *
- *   3. the copyright holder's name is not used to endorse products
- *      built using this software without specific written permission.
- *
- * ALTERNATIVELY, provided that this notice is retained in full, this product
- * may be distributed under the terms of the GNU General Public License (GPL),
- * in which case the provisions of the GPL apply INSTEAD OF those given above.
- *
- * DISCLAIMER
- *
- * This software is provided 'as is' with no explicit or implied warranties
- * in respect of its properties, including, but not limited to, correctness
- * and/or fitness for purpose.
- * ---------------------------------------------------------------------------
- */
-
-/* Some changes from the Gladman version:
-    s/RIJNDAEL(e_key)/E_KEY/g
-    s/RIJNDAEL(d_key)/D_KEY/g
-*/
-
-#include <asm/byteorder.h>
-#include <linux/bitops.h>
-#include <linux/crypto.h>
-#include <linux/errno.h>
-#include <linux/init.h>
-#include <linux/module.h>
-#include <linux/types.h>
-
-#define AES_MIN_KEY_SIZE       16
-#define AES_MAX_KEY_SIZE       32
-
-#define AES_BLOCK_SIZE         16
-
-/*
- * #define byte(x, nr) ((unsigned char)((x) >> (nr*8)))
- */
-static inline u8 byte(const u32 x, const unsigned n)
-{
-       return x >> (n << 3);
-}
-
-struct aes_ctx
-{
-       u32 key_length;
-       u32 buf[120];
-};
-
-#define E_KEY (&ctx->buf[0])
-#define D_KEY (&ctx->buf[60])
-
-static u8 pow_tab[256] __initdata;
-static u8 log_tab[256] __initdata;
-static u8 sbx_tab[256] __initdata;
-static u8 isb_tab[256] __initdata;
-static u32 rco_tab[10];
-u32 aes_ft_tab[4][256];
-u32 aes_it_tab[4][256];
-
-u32 aes_fl_tab[4][256];
-u32 aes_il_tab[4][256];
-
-static inline u8 f_mult(u8 a, u8 b)
-{
-       u8 aa = log_tab[a], cc = aa + log_tab[b];
-
-       return pow_tab[cc + (cc < aa ? 1 : 0)];
-}
-
-#define ff_mult(a, b) (a && b ? f_mult(a, b) : 0)
-
-#define ls_box(x)                              \
-       (aes_fl_tab[0][byte(x, 0)] ^            \
-        aes_fl_tab[1][byte(x, 1)] ^            \
-        aes_fl_tab[2][byte(x, 2)] ^            \
-        aes_fl_tab[3][byte(x, 3)])
-
-static void __init gen_tabs(void)
-{
-       u32 i, t;
-       u8 p, q;
-
-       /* log and power tables for GF(2**8) finite field with
-          0x011b as modular polynomial - the simplest primitive
-          root is 0x03, used here to generate the tables */
-
-       for (i = 0, p = 1; i < 256; ++i) {
-               pow_tab[i] = (u8)p;
-               log_tab[p] = (u8)i;
-
-               p ^= (p << 1) ^ (p & 0x80 ? 0x01b : 0);
-       }
-
-       log_tab[1] = 0;
-
-       for (i = 0, p = 1; i < 10; ++i) {
-               rco_tab[i] = p;
-
-               p = (p << 1) ^ (p & 0x80 ? 0x01b : 0);
-       }
-
-       for (i = 0; i < 256; ++i) {
-               p = (i ? pow_tab[255 - log_tab[i]] : 0);
-               q = ((p >> 7) | (p << 1)) ^ ((p >> 6) | (p << 2));
-               p ^= 0x63 ^ q ^ ((q >> 6) | (q << 2));
-               sbx_tab[i] = p;
-               isb_tab[p] = (u8)i;
-       }
-
-       for (i = 0; i < 256; ++i) {
-               p = sbx_tab[i];
-
-               t = p;
-               aes_fl_tab[0][i] = t;
-               aes_fl_tab[1][i] = rol32(t, 8);
-               aes_fl_tab[2][i] = rol32(t, 16);
-               aes_fl_tab[3][i] = rol32(t, 24);
-
-               t = ((u32)ff_mult(2, p)) |
-                   ((u32)p << 8) |
-                   ((u32)p << 16) | ((u32)ff_mult(3, p) << 24);
-
-               aes_ft_tab[0][i] = t;
-               aes_ft_tab[1][i] = rol32(t, 8);
-               aes_ft_tab[2][i] = rol32(t, 16);
-               aes_ft_tab[3][i] = rol32(t, 24);
-
-               p = isb_tab[i];
-
-               t = p;
-               aes_il_tab[0][i] = t;
-               aes_il_tab[1][i] = rol32(t, 8);
-               aes_il_tab[2][i] = rol32(t, 16);
-               aes_il_tab[3][i] = rol32(t, 24);
-
-               t = ((u32)ff_mult(14, p)) |
-                   ((u32)ff_mult(9, p) << 8) |
-                   ((u32)ff_mult(13, p) << 16) |
-                   ((u32)ff_mult(11, p) << 24);
-
-               aes_it_tab[0][i] = t;
-               aes_it_tab[1][i] = rol32(t, 8);
-               aes_it_tab[2][i] = rol32(t, 16);
-               aes_it_tab[3][i] = rol32(t, 24);
-       }
-}
-
-#define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b)
-
-#define imix_col(y, x)                 \
-       u    = star_x(x);               \
-       v    = star_x(u);               \
-       w    = star_x(v);               \
-       t    = w ^ (x);                 \
-       (y)  = u ^ v ^ w;               \
-       (y) ^= ror32(u ^ t,  8) ^       \
-              ror32(v ^ t, 16) ^       \
-              ror32(t, 24)
-
-/* initialise the key schedule from the user supplied key */
-
-#define loop4(i)                                       \
-{                                                      \
-       t = ror32(t,  8); t = ls_box(t) ^ rco_tab[i];   \
-       t ^= E_KEY[4 * i];     E_KEY[4 * i + 4] = t;    \
-       t ^= E_KEY[4 * i + 1]; E_KEY[4 * i + 5] = t;    \
-       t ^= E_KEY[4 * i + 2]; E_KEY[4 * i + 6] = t;    \
-       t ^= E_KEY[4 * i + 3]; E_KEY[4 * i + 7] = t;    \
-}
-
-#define loop6(i)                                       \
-{                                                      \
-       t = ror32(t,  8); t = ls_box(t) ^ rco_tab[i];   \
-       t ^= E_KEY[6 * i];     E_KEY[6 * i + 6] = t;    \
-       t ^= E_KEY[6 * i + 1]; E_KEY[6 * i + 7] = t;    \
-       t ^= E_KEY[6 * i + 2]; E_KEY[6 * i + 8] = t;    \
-       t ^= E_KEY[6 * i + 3]; E_KEY[6 * i + 9] = t;    \
-       t ^= E_KEY[6 * i + 4]; E_KEY[6 * i + 10] = t;   \
-       t ^= E_KEY[6 * i + 5]; E_KEY[6 * i + 11] = t;   \
-}
-
-#define loop8(i)                                       \
-{                                                      \
-       t = ror32(t,  8); ; t = ls_box(t) ^ rco_tab[i]; \
-       t ^= E_KEY[8 * i];     E_KEY[8 * i + 8] = t;    \
-       t ^= E_KEY[8 * i + 1]; E_KEY[8 * i + 9] = t;    \
-       t ^= E_KEY[8 * i + 2]; E_KEY[8 * i + 10] = t;   \
-       t ^= E_KEY[8 * i + 3]; E_KEY[8 * i + 11] = t;   \
-       t  = E_KEY[8 * i + 4] ^ ls_box(t);              \
-       E_KEY[8 * i + 12] = t;                          \
-       t ^= E_KEY[8 * i + 5]; E_KEY[8 * i + 13] = t;   \
-       t ^= E_KEY[8 * i + 6]; E_KEY[8 * i + 14] = t;   \
-       t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t;   \
-}
-
-static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
-                      unsigned int key_len)
-{
-       struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
-       const __le32 *key = (const __le32 *)in_key;
-       u32 *flags = &tfm->crt_flags;
-       u32 i, j, t, u, v, w;
-
-       if (key_len % 8) {
-               *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
-               return -EINVAL;
-       }
-
-       ctx->key_length = key_len;
-
-       D_KEY[key_len + 24] = E_KEY[0] = le32_to_cpu(key[0]);
-       D_KEY[key_len + 25] = E_KEY[1] = le32_to_cpu(key[1]);
-       D_KEY[key_len + 26] = E_KEY[2] = le32_to_cpu(key[2]);
-       D_KEY[key_len + 27] = E_KEY[3] = le32_to_cpu(key[3]);
-
-       switch (key_len) {
-       case 16:
-               t = E_KEY[3];
-               for (i = 0; i < 10; ++i)
-                       loop4(i);
-               break;
-
-       case 24:
-               E_KEY[4] = le32_to_cpu(key[4]);
-               t = E_KEY[5] = le32_to_cpu(key[5]);
-               for (i = 0; i < 8; ++i)
-                       loop6 (i);
-               break;
-
-       case 32:
-               E_KEY[4] = le32_to_cpu(key[4]);
-               E_KEY[5] = le32_to_cpu(key[5]);
-               E_KEY[6] = le32_to_cpu(key[6]);
-               t = E_KEY[7] = le32_to_cpu(key[7]);
-               for (i = 0; i < 7; ++i)
-                       loop8(i);
-               break;
-       }
-
-       D_KEY[0] = E_KEY[key_len + 24];
-       D_KEY[1] = E_KEY[key_len + 25];
-       D_KEY[2] = E_KEY[key_len + 26];
-       D_KEY[3] = E_KEY[key_len + 27];
-
-       for (i = 4; i < key_len + 24; ++i) {
-               j = key_len + 24 - (i & ~3) + (i & 3);
-               imix_col(D_KEY[j], E_KEY[i]);
-       }
-
-       return 0;
-}
-
-asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in);
-asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in);
-
-static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
-       aes_enc_blk(tfm, dst, src);
-}
-
-static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
-       aes_dec_blk(tfm, dst, src);
-}
-
-static struct crypto_alg aes_alg = {
-       .cra_name               =       "aes",
-       .cra_driver_name        =       "aes-x86_64",
-       .cra_priority           =       200,
-       .cra_flags              =       CRYPTO_ALG_TYPE_CIPHER,
-       .cra_blocksize          =       AES_BLOCK_SIZE,
-       .cra_ctxsize            =       sizeof(struct aes_ctx),
-       .cra_module             =       THIS_MODULE,
-       .cra_list               =       LIST_HEAD_INIT(aes_alg.cra_list),
-       .cra_u                  =       {
-               .cipher = {
-                       .cia_min_keysize        =       AES_MIN_KEY_SIZE,
-                       .cia_max_keysize        =       AES_MAX_KEY_SIZE,
-                       .cia_setkey             =       aes_set_key,
-                       .cia_encrypt            =       aes_encrypt,
-                       .cia_decrypt            =       aes_decrypt
-               }
-       }
-};
-
-static int __init aes_init(void)
-{
-       gen_tabs();
-       return crypto_register_alg(&aes_alg);
-}
-
-static void __exit aes_fini(void)
-{
-       crypto_unregister_alg(&aes_alg);
-}
-
-module_init(aes_init);
-module_exit(aes_fini);
-
-MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
-MODULE_LICENSE("GPL");
-MODULE_ALIAS("aes");
diff --git a/arch/x86/crypto/aes_glue.c b/arch/x86/crypto/aes_glue.c
new file mode 100644 (file)
index 0000000..71f4578
--- /dev/null
@@ -0,0 +1,57 @@
+/*
+ * Glue Code for the asm optimized version of the AES Cipher Algorithm
+ *
+ */
+
+#include <crypto/aes.h>
+
+asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in);
+asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in);
+
+static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+{
+       aes_enc_blk(tfm, dst, src);
+}
+
+static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+{
+       aes_dec_blk(tfm, dst, src);
+}
+
+static struct crypto_alg aes_alg = {
+       .cra_name               = "aes",
+       .cra_driver_name        = "aes-asm",
+       .cra_priority           = 200,
+       .cra_flags              = CRYPTO_ALG_TYPE_CIPHER,
+       .cra_blocksize          = AES_BLOCK_SIZE,
+       .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
+       .cra_module             = THIS_MODULE,
+       .cra_list               = LIST_HEAD_INIT(aes_alg.cra_list),
+       .cra_u  = {
+               .cipher = {
+                       .cia_min_keysize        = AES_MIN_KEY_SIZE,
+                       .cia_max_keysize        = AES_MAX_KEY_SIZE,
+                       .cia_setkey             = crypto_aes_set_key,
+                       .cia_encrypt            = aes_encrypt,
+                       .cia_decrypt            = aes_decrypt
+               }
+       }
+};
+
+static int __init aes_init(void)
+{
+       return crypto_register_alg(&aes_alg);
+}
+
+static void __exit aes_fini(void)
+{
+       crypto_unregister_alg(&aes_alg);
+}
+
+module_init(aes_init);
+module_exit(aes_fini);
+
+MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, asm optimized");
+MODULE_LICENSE("GPL");
+MODULE_ALIAS("aes");
+MODULE_ALIAS("aes-asm");
diff --git a/arch/x86/crypto/salsa20-i586-asm_32.S b/arch/x86/crypto/salsa20-i586-asm_32.S
new file mode 100644 (file)
index 0000000..72eb306
--- /dev/null
@@ -0,0 +1,1114 @@
+# salsa20_pm.s version 20051229
+# D. J. Bernstein
+# Public domain.
+
+# enter ECRYPT_encrypt_bytes
+.text
+.p2align 5
+.globl ECRYPT_encrypt_bytes
+ECRYPT_encrypt_bytes:
+       mov     %esp,%eax
+       and     $31,%eax
+       add     $256,%eax
+       sub     %eax,%esp
+       # eax_stack = eax
+       movl    %eax,80(%esp)
+       # ebx_stack = ebx
+       movl    %ebx,84(%esp)
+       # esi_stack = esi
+       movl    %esi,88(%esp)
+       # edi_stack = edi
+       movl    %edi,92(%esp)
+       # ebp_stack = ebp
+       movl    %ebp,96(%esp)
+       # x = arg1
+       movl    4(%esp,%eax),%edx
+       # m = arg2
+       movl    8(%esp,%eax),%esi
+       # out = arg3
+       movl    12(%esp,%eax),%edi
+       # bytes = arg4
+       movl    16(%esp,%eax),%ebx
+       # bytes -= 0
+       sub     $0,%ebx
+       # goto done if unsigned<=
+       jbe     ._done
+._start:
+       # in0 = *(uint32 *) (x + 0)
+       movl    0(%edx),%eax
+       # in1 = *(uint32 *) (x + 4)
+       movl    4(%edx),%ecx
+       # in2 = *(uint32 *) (x + 8)
+       movl    8(%edx),%ebp
+       # j0 = in0
+       movl    %eax,164(%esp)
+       # in3 = *(uint32 *) (x + 12)
+       movl    12(%edx),%eax
+       # j1 = in1
+       movl    %ecx,168(%esp)
+       # in4 = *(uint32 *) (x + 16)
+       movl    16(%edx),%ecx
+       # j2 = in2
+       movl    %ebp,172(%esp)
+       # in5 = *(uint32 *) (x + 20)
+       movl    20(%edx),%ebp
+       # j3 = in3
+       movl    %eax,176(%esp)
+       # in6 = *(uint32 *) (x + 24)
+       movl    24(%edx),%eax
+       # j4 = in4
+       movl    %ecx,180(%esp)
+       # in7 = *(uint32 *) (x + 28)
+       movl    28(%edx),%ecx
+       # j5 = in5
+       movl    %ebp,184(%esp)
+       # in8 = *(uint32 *) (x + 32)
+       movl    32(%edx),%ebp
+       # j6 = in6
+       movl    %eax,188(%esp)
+       # in9 = *(uint32 *) (x + 36)
+       movl    36(%edx),%eax
+       # j7 = in7
+       movl    %ecx,192(%esp)
+       # in10 = *(uint32 *) (x + 40)
+       movl    40(%edx),%ecx
+       # j8 = in8
+       movl    %ebp,196(%esp)
+       # in11 = *(uint32 *) (x + 44)
+       movl    44(%edx),%ebp
+       # j9 = in9
+       movl    %eax,200(%esp)
+       # in12 = *(uint32 *) (x + 48)
+       movl    48(%edx),%eax
+       # j10 = in10
+       movl    %ecx,204(%esp)
+       # in13 = *(uint32 *) (x + 52)
+       movl    52(%edx),%ecx
+       # j11 = in11
+       movl    %ebp,208(%esp)
+       # in14 = *(uint32 *) (x + 56)
+       movl    56(%edx),%ebp
+       # j12 = in12
+       movl    %eax,212(%esp)
+       # in15 = *(uint32 *) (x + 60)
+       movl    60(%edx),%eax
+       # j13 = in13
+       movl    %ecx,216(%esp)
+       # j14 = in14
+       movl    %ebp,220(%esp)
+       # j15 = in15
+       movl    %eax,224(%esp)
+       # x_backup = x
+       movl    %edx,64(%esp)
+._bytesatleast1:
+       #   bytes - 64
+       cmp     $64,%ebx
+       #   goto nocopy if unsigned>=
+       jae     ._nocopy
+       #     ctarget = out
+       movl    %edi,228(%esp)
+       #     out = &tmp
+       leal    0(%esp),%edi
+       #     i = bytes
+       mov     %ebx,%ecx
+       #     while (i) { *out++ = *m++; --i }
+       rep     movsb
+       #     out = &tmp
+       leal    0(%esp),%edi
+       #     m = &tmp
+       leal    0(%esp),%esi
+._nocopy:
+       #   out_backup = out
+       movl    %edi,72(%esp)
+       #   m_backup = m
+       movl    %esi,68(%esp)
+       #   bytes_backup = bytes
+       movl    %ebx,76(%esp)
+       #   in0 = j0
+       movl    164(%esp),%eax
+       #   in1 = j1
+       movl    168(%esp),%ecx
+       #   in2 = j2
+       movl    172(%esp),%edx
+       #   in3 = j3
+       movl    176(%esp),%ebx
+       #   x0 = in0
+       movl    %eax,100(%esp)
+       #   x1 = in1
+       movl    %ecx,104(%esp)
+       #   x2 = in2
+       movl    %edx,108(%esp)
+       #   x3 = in3
+       movl    %ebx,112(%esp)
+       #   in4 = j4
+       movl    180(%esp),%eax
+       #   in5 = j5
+       movl    184(%esp),%ecx
+       #   in6 = j6
+       movl    188(%esp),%edx
+       #   in7 = j7
+       movl    192(%esp),%ebx
+       #   x4 = in4
+       movl    %eax,116(%esp)
+       #   x5 = in5
+       movl    %ecx,120(%esp)
+       #   x6 = in6
+       movl    %edx,124(%esp)
+       #   x7 = in7
+       movl    %ebx,128(%esp)
+       #   in8 = j8
+       movl    196(%esp),%eax
+       #   in9 = j9
+       movl    200(%esp),%ecx
+       #   in10 = j10
+       movl    204(%esp),%edx
+       #   in11 = j11
+       movl    208(%esp),%ebx
+       #   x8 = in8
+       movl    %eax,132(%esp)
+       #   x9 = in9
+       movl    %ecx,136(%esp)
+       #   x10 = in10
+       movl    %edx,140(%esp)
+       #   x11 = in11
+       movl    %ebx,144(%esp)
+       #   in12 = j12
+       movl    212(%esp),%eax
+       #   in13 = j13
+       movl    216(%esp),%ecx
+       #   in14 = j14
+       movl    220(%esp),%edx
+       #   in15 = j15
+       movl    224(%esp),%ebx
+       #   x12 = in12
+       movl    %eax,148(%esp)
+       #   x13 = in13
+       movl    %ecx,152(%esp)
+       #   x14 = in14
+       movl    %edx,156(%esp)
+       #   x15 = in15
+       movl    %ebx,160(%esp)
+       #   i = 20
+       mov     $20,%ebp
+       # p = x0
+       movl    100(%esp),%eax
+       # s = x5
+       movl    120(%esp),%ecx
+       # t = x10
+       movl    140(%esp),%edx
+       # w = x15
+       movl    160(%esp),%ebx
+._mainloop:
+       # x0 = p
+       movl    %eax,100(%esp)
+       #                               x10 = t
+       movl    %edx,140(%esp)
+       # p += x12
+       addl    148(%esp),%eax
+       #               x5 = s
+       movl    %ecx,120(%esp)
+       #                               t += x6
+       addl    124(%esp),%edx
+       #                                               x15 = w
+       movl    %ebx,160(%esp)
+       #               r = x1
+       movl    104(%esp),%esi
+       #               r += s
+       add     %ecx,%esi
+       #                                               v = x11
+       movl    144(%esp),%edi
+       #                                               v += w
+       add     %ebx,%edi
+       # p <<<= 7
+       rol     $7,%eax
+       # p ^= x4
+       xorl    116(%esp),%eax
+       #                               t <<<= 7
+       rol     $7,%edx
+       #                               t ^= x14
+       xorl    156(%esp),%edx
+       #               r <<<= 7
+       rol     $7,%esi
+       #               r ^= x9
+       xorl    136(%esp),%esi
+       #                                               v <<<= 7
+       rol     $7,%edi
+       #                                               v ^= x3
+       xorl    112(%esp),%edi
+       # x4 = p
+       movl    %eax,116(%esp)
+       #                               x14 = t
+       movl    %edx,156(%esp)
+       # p += x0
+       addl    100(%esp),%eax
+       #               x9 = r
+       movl    %esi,136(%esp)
+       #                               t += x10
+       addl    140(%esp),%edx
+       #                                               x3 = v
+       movl    %edi,112(%esp)
+       # p <<<= 9
+       rol     $9,%eax
+       # p ^= x8
+       xorl    132(%esp),%eax
+       #                               t <<<= 9
+       rol     $9,%edx
+       #                               t ^= x2
+       xorl    108(%esp),%edx
+       #               s += r
+       add     %esi,%ecx
+       #               s <<<= 9
+       rol     $9,%ecx
+       #               s ^= x13
+       xorl    152(%esp),%ecx
+       #                                               w += v
+       add     %edi,%ebx
+       #                                               w <<<= 9
+       rol     $9,%ebx
+       #                                               w ^= x7
+       xorl    128(%esp),%ebx
+       # x8 = p
+       movl    %eax,132(%esp)
+       #                               x2 = t
+       movl    %edx,108(%esp)
+       # p += x4
+       addl    116(%esp),%eax
+       #               x13 = s
+       movl    %ecx,152(%esp)
+       #                               t += x14
+       addl    156(%esp),%edx
+       #                                               x7 = w
+       movl    %ebx,128(%esp)
+       # p <<<= 13
+       rol     $13,%eax
+       # p ^= x12
+       xorl    148(%esp),%eax
+       #                               t <<<= 13
+       rol     $13,%edx
+       #                               t ^= x6
+       xorl    124(%esp),%edx
+       #               r += s
+       add     %ecx,%esi
+       #               r <<<= 13
+       rol     $13,%esi
+       #               r ^= x1
+       xorl    104(%esp),%esi
+       #                                               v += w
+       add     %ebx,%edi
+       #                                               v <<<= 13
+       rol     $13,%edi
+       #                                               v ^= x11
+       xorl    144(%esp),%edi
+       # x12 = p
+       movl    %eax,148(%esp)
+       #                               x6 = t
+       movl    %edx,124(%esp)
+       # p += x8
+       addl    132(%esp),%eax
+       #               x1 = r
+       movl    %esi,104(%esp)
+       #                               t += x2
+       addl    108(%esp),%edx
+       #                                               x11 = v
+       movl    %edi,144(%esp)
+       # p <<<= 18
+       rol     $18,%eax
+       # p ^= x0
+       xorl    100(%esp),%eax
+       #                               t <<<= 18
+       rol     $18,%edx
+       #                               t ^= x10
+       xorl    140(%esp),%edx
+       #               s += r
+       add     %esi,%ecx
+       #               s <<<= 18
+       rol     $18,%ecx
+       #               s ^= x5
+       xorl    120(%esp),%ecx
+       #                                               w += v
+       add     %edi,%ebx
+       #                                               w <<<= 18
+       rol     $18,%ebx
+       #                                               w ^= x15
+       xorl    160(%esp),%ebx
+       # x0 = p
+       movl    %eax,100(%esp)
+       #                               x10 = t
+       movl    %edx,140(%esp)
+       # p += x3
+       addl    112(%esp),%eax
+       # p <<<= 7
+       rol     $7,%eax
+       #               x5 = s
+       movl    %ecx,120(%esp)
+       #                               t += x9
+       addl    136(%esp),%edx
+       #                                               x15 = w
+       movl    %ebx,160(%esp)
+       #               r = x4
+       movl    116(%esp),%esi
+       #               r += s
+       add     %ecx,%esi
+       #                                               v = x14
+       movl    156(%esp),%edi
+       #                                               v += w
+       add     %ebx,%edi
+       # p ^= x1
+       xorl    104(%esp),%eax
+       #                               t <<<= 7
+       rol     $7,%edx
+       #                               t ^= x11
+       xorl    144(%esp),%edx
+       #               r <<<= 7
+       rol     $7,%esi
+       #               r ^= x6
+       xorl    124(%esp),%esi
+       #                                               v <<<= 7
+       rol     $7,%edi
+       #                                               v ^= x12
+       xorl    148(%esp),%edi
+       # x1 = p
+       movl    %eax,104(%esp)
+       #                               x11 = t
+       movl    %edx,144(%esp)
+       # p += x0
+       addl    100(%esp),%eax
+       #               x6 = r
+       movl    %esi,124(%esp)
+       #                               t += x10
+       addl    140(%esp),%edx
+       #                                               x12 = v
+       movl    %edi,148(%esp)
+       # p <<<= 9
+       rol     $9,%eax
+       # p ^= x2
+       xorl    108(%esp),%eax
+       #                               t <<<= 9
+       rol     $9,%edx
+       #                               t ^= x8
+       xorl    132(%esp),%edx
+       #               s += r
+       add     %esi,%ecx
+       #               s <<<= 9
+       rol     $9,%ecx
+       #               s ^= x7
+       xorl    128(%esp),%ecx
+       #                                               w += v
+       add     %edi,%ebx
+       #                                               w <<<= 9
+       rol     $9,%ebx
+       #                                               w ^= x13
+       xorl    152(%esp),%ebx
+       # x2 = p
+       movl    %eax,108(%esp)
+       #                               x8 = t
+       movl    %edx,132(%esp)
+       # p += x1
+       addl    104(%esp),%eax
+       #               x7 = s
+       movl    %ecx,128(%esp)
+       #                               t += x11
+       addl    144(%esp),%edx
+       #                                               x13 = w
+       movl    %ebx,152(%esp)
+       # p <<<= 13
+       rol     $13,%eax
+       # p ^= x3
+       xorl    112(%esp),%eax
+       #                               t <<<= 13
+       rol     $13,%edx
+       #                               t ^= x9
+       xorl    136(%esp),%edx
+       #               r += s
+       add     %ecx,%esi
+       #               r <<<= 13
+       rol     $13,%esi
+       #               r ^= x4
+       xorl    116(%esp),%esi
+       #                                               v += w
+       add     %ebx,%edi
+       #                                               v <<<= 13
+       rol     $13,%edi
+       #                                               v ^= x14
+       xorl    156(%esp),%edi
+       # x3 = p
+       movl    %eax,112(%esp)
+       #                               x9 = t
+       movl    %edx,136(%esp)
+       # p += x2
+       addl    108(%esp),%eax
+       #               x4 = r
+       movl    %esi,116(%esp)
+       #                               t += x8
+       addl    132(%esp),%edx
+       #                                               x14 = v
+       movl    %edi,156(%esp)
+       # p <<<= 18
+       rol     $18,%eax
+       # p ^= x0
+       xorl    100(%esp),%eax
+       #                               t <<<= 18
+       rol     $18,%edx
+       #                               t ^= x10
+       xorl    140(%esp),%edx
+       #               s += r
+       add     %esi,%ecx
+       #               s <<<= 18
+       rol     $18,%ecx
+       #               s ^= x5
+       xorl    120(%esp),%ecx
+       #                                               w += v
+       add     %edi,%ebx
+       #                                               w <<<= 18
+       rol     $18,%ebx
+       #                                               w ^= x15
+       xorl    160(%esp),%ebx
+       # x0 = p
+       movl    %eax,100(%esp)
+       #                               x10 = t
+       movl    %edx,140(%esp)
+       # p += x12
+       addl    148(%esp),%eax
+       #               x5 = s
+       movl    %ecx,120(%esp)
+       #                               t += x6
+       addl    124(%esp),%edx
+       #                                               x15 = w
+       movl    %ebx,160(%esp)
+       #               r = x1
+       movl    104(%esp),%esi
+       #               r += s
+       add     %ecx,%esi
+       #                                               v = x11
+       movl    144(%esp),%edi
+       #                                               v += w
+       add     %ebx,%edi
+       # p <<<= 7
+       rol     $7,%eax
+       # p ^= x4
+       xorl    116(%esp),%eax
+       #                               t <<<= 7
+       rol     $7,%edx
+       #                               t ^= x14
+       xorl    156(%esp),%edx
+       #               r <<<= 7
+       rol     $7,%esi
+       #               r ^= x9
+       xorl    136(%esp),%esi
+       #                                               v <<<= 7
+       rol     $7,%edi
+       #                                               v ^= x3
+       xorl    112(%esp),%edi
+       # x4 = p
+       movl    %eax,116(%esp)
+       #                               x14 = t
+       movl    %edx,156(%esp)
+       # p += x0
+       addl    100(%esp),%eax
+       #               x9 = r
+       movl    %esi,136(%esp)
+       #                               t += x10
+       addl    140(%esp),%edx
+       #                                               x3 = v
+       movl    %edi,112(%esp)
+       # p <<<= 9
+       rol     $9,%eax
+       # p ^= x8
+       xorl    132(%esp),%eax
+       #                               t <<<= 9
+       rol     $9,%edx
+       #                               t ^= x2
+       xorl    108(%esp),%edx
+       #               s += r
+       add     %esi,%ecx
+       #               s <<<= 9
+       rol     $9,%ecx
+       #               s ^= x13
+       xorl    152(%esp),%ecx
+       #                                               w += v
+       add     %edi,%ebx
+       #                                               w <<<= 9
+       rol     $9,%ebx
+       #                                               w ^= x7
+       xorl    128(%esp),%ebx
+       # x8 = p
+       movl    %eax,132(%esp)
+       #                               x2 = t
+       movl    %edx,108(%esp)
+       # p += x4
+       addl    116(%esp),%eax
+       #               x13 = s
+       movl    %ecx,152(%esp)
+       #                               t += x14
+       addl    156(%esp),%edx
+       #                                               x7 = w
+       movl    %ebx,128(%esp)
+       # p <<<= 13
+       rol     $13,%eax
+       # p ^= x12
+       xorl    148(%esp),%eax
+       #                               t <<<= 13
+       rol     $13,%edx
+       #                               t ^= x6
+       xorl    124(%esp),%edx
+       #               r += s
+       add     %ecx,%esi
+       #               r <<<= 13
+       rol     $13,%esi
+       #               r ^= x1
+       xorl    104(%esp),%esi
+       #                                               v += w
+       add     %ebx,%edi
+       #                                               v <<<= 13
+       rol     $13,%edi
+       #                                               v ^= x11
+       xorl    144(%esp),%edi
+       # x12 = p
+       movl    %eax,148(%esp)
+       #                               x6 = t
+       movl    %edx,124(%esp)
+       # p += x8
+       addl    132(%esp),%eax
+       #               x1 = r
+       movl    %esi,104(%esp)
+       #                               t += x2
+       addl    108(%esp),%edx
+       #                                               x11 = v
+       movl    %edi,144(%esp)
+       # p <<<= 18
+       rol     $18,%eax
+       # p ^= x0
+       xorl    100(%esp),%eax
+       #                               t <<<= 18
+       rol     $18,%edx
+       #                               t ^= x10
+       xorl    140(%esp),%edx
+       #               s += r
+       add     %esi,%ecx
+       #               s <<<= 18
+       rol     $18,%ecx
+       #               s ^= x5
+       xorl    120(%esp),%ecx
+       #                                               w += v
+       add     %edi,%ebx
+       #                                               w <<<= 18
+       rol     $18,%ebx
+       #                                               w ^= x15
+       xorl    160(%esp),%ebx
+       # x0 = p
+       movl    %eax,100(%esp)
+       #                               x10 = t
+       movl    %edx,140(%esp)
+       # p += x3
+       addl    112(%esp),%eax
+       # p <<<= 7
+       rol     $7,%eax
+       #               x5 = s
+       movl    %ecx,120(%esp)
+       #                               t += x9
+       addl    136(%esp),%edx
+       #                                               x15 = w
+       movl    %ebx,160(%esp)
+       #               r = x4
+       movl    116(%esp),%esi
+       #               r += s
+       add     %ecx,%esi
+       #                                               v = x14
+       movl    156(%esp),%edi
+       #                                               v += w
+       add     %ebx,%edi
+       # p ^= x1
+       xorl    104(%esp),%eax
+       #                               t <<<= 7
+       rol     $7,%edx
+       #                               t ^= x11
+       xorl    144(%esp),%edx
+       #               r <<<= 7
+       rol     $7,%esi
+       #               r ^= x6
+       xorl    124(%esp),%esi
+       #                                               v <<<= 7
+       rol     $7,%edi
+       #                                               v ^= x12
+       xorl    148(%esp),%edi
+       # x1 = p
+       movl    %eax,104(%esp)
+       #                               x11 = t
+       movl    %edx,144(%esp)
+       # p += x0
+       addl    100(%esp),%eax
+       #               x6 = r
+       movl    %esi,124(%esp)
+       #                               t += x10
+       addl    140(%esp),%edx
+       #                                               x12 = v
+       movl    %edi,148(%esp)
+       # p <<<= 9
+       rol     $9,%eax
+       # p ^= x2
+       xorl    108(%esp),%eax
+       #                               t <<<= 9
+       rol     $9,%edx
+       #                               t ^= x8
+       xorl    132(%esp),%edx
+       #               s += r
+       add     %esi,%ecx
+       #               s <<<= 9
+       rol     $9,%ecx
+       #               s ^= x7
+       xorl    128(%esp),%ecx
+       #                                               w += v
+       add     %edi,%ebx
+       #                                               w <<<= 9
+       rol     $9,%ebx
+       #                                               w ^= x13
+       xorl    152(%esp),%ebx
+       # x2 = p
+       movl    %eax,108(%esp)
+       #                               x8 = t
+       movl    %edx,132(%esp)
+       # p += x1
+       addl    104(%esp),%eax
+       #               x7 = s
+       movl    %ecx,128(%esp)
+       #                               t += x11
+       addl    144(%esp),%edx
+       #                                               x13 = w
+       movl    %ebx,152(%esp)
+       # p <<<= 13
+       rol     $13,%eax
+       # p ^= x3
+       xorl    112(%esp),%eax
+       #                               t <<<= 13
+       rol     $13,%edx
+       #                               t ^= x9
+       xorl    136(%esp),%edx
+       #               r += s
+       add     %ecx,%esi
+       #               r <<<= 13
+       rol     $13,%esi
+       #               r ^= x4
+       xorl    116(%esp),%esi
+       #                                               v += w
+       add     %ebx,%edi
+       #                                               v <<<= 13
+       rol     $13,%edi
+       #                                               v ^= x14
+       xorl    156(%esp),%edi
+       # x3 = p
+       movl    %eax,112(%esp)
+       #                               x9 = t
+       movl    %edx,136(%esp)
+       # p += x2
+       addl    108(%esp),%eax
+       #               x4 = r
+       movl    %esi,116(%esp)
+       #                               t += x8
+       addl    132(%esp),%edx
+       #                                               x14 = v
+       movl    %edi,156(%esp)
+       # p <<<= 18
+       rol     $18,%eax
+       # p ^= x0
+       xorl    100(%esp),%eax
+       #                               t <<<= 18
+       rol     $18,%edx
+       #                               t ^= x10
+       xorl    140(%esp),%edx
+       #               s += r
+       add     %esi,%ecx
+       #               s <<<= 18
+       rol     $18,%ecx
+       #               s ^= x5
+       xorl    120(%esp),%ecx
+       #                                               w += v
+       add     %edi,%ebx
+       #                                               w <<<= 18
+       rol     $18,%ebx
+       #                                               w ^= x15
+       xorl    160(%esp),%ebx
+       # i -= 4
+       sub     $4,%ebp
+       # goto mainloop if unsigned >
+       ja      ._mainloop
+       # x0 = p
+       movl    %eax,100(%esp)
+       # x5 = s
+       movl    %ecx,120(%esp)
+       # x10 = t
+       movl    %edx,140(%esp)
+       # x15 = w
+       movl    %ebx,160(%esp)
+       #   out = out_backup
+       movl    72(%esp),%edi
+       #   m = m_backup
+       movl    68(%esp),%esi
+       #   in0 = x0
+       movl    100(%esp),%eax
+       #   in1 = x1
+       movl    104(%esp),%ecx
+       #   in0 += j0
+       addl    164(%esp),%eax
+       #   in1 += j1
+       addl    168(%esp),%ecx
+       #   in0 ^= *(uint32 *) (m + 0)
+       xorl    0(%esi),%eax
+       #   in1 ^= *(uint32 *) (m + 4)
+       xorl    4(%esi),%ecx
+       #   *(uint32 *) (out + 0) = in0
+       movl    %eax,0(%edi)
+       #   *(uint32 *) (out + 4) = in1
+       movl    %ecx,4(%edi)
+       #   in2 = x2
+       movl    108(%esp),%eax
+       #   in3 = x3
+       movl    112(%esp),%ecx
+       #   in2 += j2
+       addl    172(%esp),%eax
+       #   in3 += j3
+       addl    176(%esp),%ecx
+       #   in2 ^= *(uint32 *) (m + 8)
+       xorl    8(%esi),%eax
+       #   in3 ^= *(uint32 *) (m + 12)
+       xorl    12(%esi),%ecx
+       #   *(uint32 *) (out + 8) = in2
+       movl    %eax,8(%edi)
+       #   *(uint32 *) (out + 12) = in3
+       movl    %ecx,12(%edi)
+       #   in4 = x4
+       movl    116(%esp),%eax
+       #   in5 = x5
+       movl    120(%esp),%ecx
+       #   in4 += j4
+       addl    180(%esp),%eax
+       #   in5 += j5
+       addl    184(%esp),%ecx
+       #   in4 ^= *(uint32 *) (m + 16)
+       xorl    16(%esi),%eax
+       #   in5 ^= *(uint32 *) (m + 20)
+       xorl    20(%esi),%ecx
+       #   *(uint32 *) (out + 16) = in4
+       movl    %eax,16(%edi)
+       #   *(uint32 *) (out + 20) = in5
+       movl    %ecx,20(%edi)
+       #   in6 = x6
+       movl    124(%esp),%eax
+       #   in7 = x7
+       movl    128(%esp),%ecx
+       #   in6 += j6
+       addl    188(%esp),%eax
+       #   in7 += j7
+       addl    192(%esp),%ecx
+       #   in6 ^= *(uint32 *) (m + 24)
+       xorl    24(%esi),%eax
+       #   in7 ^= *(uint32 *) (m + 28)
+       xorl    28(%esi),%ecx
+       #   *(uint32 *) (out + 24) = in6
+       movl    %eax,24(%edi)
+       #   *(uint32 *) (out + 28) = in7
+       movl    %ecx,28(%edi)
+       #   in8 = x8
+       movl    132(%esp),%eax
+       #   in9 = x9
+       movl    136(%esp),%ecx
+       #   in8 += j8
+       addl    196(%esp),%eax
+       #   in9 += j9
+       addl    200(%esp),%ecx
+       #   in8 ^= *(uint32 *) (m + 32)
+       xorl    32(%esi),%eax
+       #   in9 ^= *(uint32 *) (m + 36)
+       xorl    36(%esi),%ecx
+       #   *(uint32 *) (out + 32) = in8
+       movl    %eax,32(%edi)
+       #   *(uint32 *) (out + 36) = in9
+       movl    %ecx,36(%edi)
+       #   in10 = x10
+       movl    140(%esp),%eax
+       #   in11 = x11
+       movl    144(%esp),%ecx
+       #   in10 += j10
+       addl    204(%esp),%eax
+       #   in11 += j11
+       addl    208(%esp),%ecx
+       #   in10 ^= *(uint32 *) (m + 40)
+       xorl    40(%esi),%eax
+       #   in11 ^= *(uint32 *) (m + 44)
+       xorl    44(%esi),%ecx
+       #   *(uint32 *) (out + 40) = in10
+       movl    %eax,40(%edi)
+       #   *(uint32 *) (out + 44) = in11
+       movl    %ecx,44(%edi)
+       #   in12 = x12
+       movl    148(%esp),%eax
+       #   in13 = x13
+       movl    152(%esp),%ecx
+       #   in12 += j12
+       addl    212(%esp),%eax
+       #   in13 += j13
+       addl    216(%esp),%ecx
+       #   in12 ^= *(uint32 *) (m + 48)
+       xorl    48(%esi),%eax
+       #   in13 ^= *(uint32 *) (m + 52)
+       xorl    52(%esi),%ecx
+       #   *(uint32 *) (out + 48) = in12
+       movl    %eax,48(%edi)
+       #   *(uint32 *) (out + 52) = in13
+       movl    %ecx,52(%edi)
+       #   in14 = x14
+       movl    156(%esp),%eax
+       #   in15 = x15
+       movl    160(%esp),%ecx
+       #   in14 += j14
+       addl    220(%esp),%eax
+       #   in15 += j15
+       addl    224(%esp),%ecx
+       #   in14 ^= *(uint32 *) (m + 56)
+       xorl    56(%esi),%eax
+       #   in15 ^= *(uint32 *) (m + 60)
+       xorl    60(%esi),%ecx
+       #   *(uint32 *) (out + 56) = in14
+       movl    %eax,56(%edi)
+       #   *(uint32 *) (out + 60) = in15
+       movl    %ecx,60(%edi)
+       #   bytes = bytes_backup
+       movl    76(%esp),%ebx
+       #   in8 = j8
+       movl    196(%esp),%eax
+       #   in9 = j9
+       movl    200(%esp),%ecx
+       #   in8 += 1
+       add     $1,%eax
+       #   in9 += 0 + carry
+       adc     $0,%ecx
+       #   j8 = in8
+       movl    %eax,196(%esp)
+       #   j9 = in9
+       movl    %ecx,200(%esp)
+       #   bytes - 64
+       cmp     $64,%ebx
+       #   goto bytesatleast65 if unsigned>
+       ja      ._bytesatleast65
+       #     goto bytesatleast64 if unsigned>=
+       jae     ._bytesatleast64
+       #       m = out
+       mov     %edi,%esi
+       #       out = ctarget
+       movl    228(%esp),%edi
+       #       i = bytes
+       mov     %ebx,%ecx
+       #       while (i) { *out++ = *m++; --i }
+       rep     movsb
+._bytesatleast64:
+       #     x = x_backup
+       movl    64(%esp),%eax
+       #     in8 = j8
+       movl    196(%esp),%ecx
+       #     in9 = j9
+       movl    200(%esp),%edx
+       #     *(uint32 *) (x + 32) = in8
+       movl    %ecx,32(%eax)
+       #     *(uint32 *) (x + 36) = in9
+       movl    %edx,36(%eax)
+._done:
+       #     eax = eax_stack
+       movl    80(%esp),%eax
+       #     ebx = ebx_stack
+       movl    84(%esp),%ebx
+       #     esi = esi_stack
+       movl    88(%esp),%esi
+       #     edi = edi_stack
+       movl    92(%esp),%edi
+       #     ebp = ebp_stack
+       movl    96(%esp),%ebp
+       #     leave
+       add     %eax,%esp
+       ret
+._bytesatleast65:
+       #   bytes -= 64
+       sub     $64,%ebx
+       #   out += 64
+       add     $64,%edi
+       #   m += 64
+       add     $64,%esi
+       # goto bytesatleast1
+       jmp     ._bytesatleast1
+# enter ECRYPT_keysetup
+.text
+.p2align 5
+.globl ECRYPT_keysetup
+ECRYPT_keysetup:
+       mov     %esp,%eax
+       and     $31,%eax
+       add     $256,%eax
+       sub     %eax,%esp
+       #   eax_stack = eax
+       movl    %eax,64(%esp)
+       #   ebx_stack = ebx
+       movl    %ebx,68(%esp)
+       #   esi_stack = esi
+       movl    %esi,72(%esp)
+       #   edi_stack = edi
+       movl    %edi,76(%esp)
+       #   ebp_stack = ebp
+       movl    %ebp,80(%esp)
+       #   k = arg2
+       movl    8(%esp,%eax),%ecx
+       #   kbits = arg3
+       movl    12(%esp,%eax),%edx
+       #   x = arg1
+       movl    4(%esp,%eax),%eax
+       #   in1 = *(uint32 *) (k + 0)
+       movl    0(%ecx),%ebx
+       #   in2 = *(uint32 *) (k + 4)
+       movl    4(%ecx),%esi
+       #   in3 = *(uint32 *) (k + 8)
+       movl    8(%ecx),%edi
+       #   in4 = *(uint32 *) (k + 12)
+       movl    12(%ecx),%ebp
+       #   *(uint32 *) (x + 4) = in1
+       movl    %ebx,4(%eax)
+       #   *(uint32 *) (x + 8) = in2
+       movl    %esi,8(%eax)
+       #   *(uint32 *) (x + 12) = in3
+       movl    %edi,12(%eax)
+       #   *(uint32 *) (x + 16) = in4
+       movl    %ebp,16(%eax)
+       #   kbits - 256
+       cmp     $256,%edx
+       #   goto kbits128 if unsigned<
+       jb      ._kbits128
+._kbits256:
+       #     in11 = *(uint32 *) (k + 16)
+       movl    16(%ecx),%edx
+       #     in12 = *(uint32 *) (k + 20)
+       movl    20(%ecx),%ebx
+       #     in13 = *(uint32 *) (k + 24)
+       movl    24(%ecx),%esi
+       #     in14 = *(uint32 *) (k + 28)
+       movl    28(%ecx),%ecx
+       #     *(uint32 *) (x + 44) = in11
+       movl    %edx,44(%eax)
+       #     *(uint32 *) (x + 48) = in12
+       movl    %ebx,48(%eax)
+       #     *(uint32 *) (x + 52) = in13
+       movl    %esi,52(%eax)
+       #     *(uint32 *) (x + 56) = in14
+       movl    %ecx,56(%eax)
+       #     in0 = 1634760805
+       mov     $1634760805,%ecx
+       #     in5 = 857760878
+       mov     $857760878,%edx
+       #     in10 = 2036477234
+       mov     $2036477234,%ebx
+       #     in15 = 1797285236
+       mov     $1797285236,%esi
+       #     *(uint32 *) (x + 0) = in0
+       movl    %ecx,0(%eax)
+       #     *(uint32 *) (x + 20) = in5
+       movl    %edx,20(%eax)
+       #     *(uint32 *) (x + 40) = in10
+       movl    %ebx,40(%eax)
+       #     *(uint32 *) (x + 60) = in15
+       movl    %esi,60(%eax)
+       #   goto keysetupdone
+       jmp     ._keysetupdone
+._kbits128:
+       #     in11 = *(uint32 *) (k + 0)
+       movl    0(%ecx),%edx
+       #     in12 = *(uint32 *) (k + 4)
+       movl    4(%ecx),%ebx
+       #     in13 = *(uint32 *) (k + 8)
+       movl    8(%ecx),%esi
+       #     in14 = *(uint32 *) (k + 12)
+       movl    12(%ecx),%ecx
+       #     *(uint32 *) (x + 44) = in11
+       movl    %edx,44(%eax)
+       #     *(uint32 *) (x + 48) = in12
+       movl    %ebx,48(%eax)
+       #     *(uint32 *) (x + 52) = in13
+       movl    %esi,52(%eax)
+       #     *(uint32 *) (x + 56) = in14
+       movl    %ecx,56(%eax)
+       #     in0 = 1634760805
+       mov     $1634760805,%ecx
+       #     in5 = 824206446
+       mov     $824206446,%edx
+       #     in10 = 2036477238
+       mov     $2036477238,%ebx
+       #     in15 = 1797285236
+       mov     $1797285236,%esi
+       #     *(uint32 *) (x + 0) = in0
+       movl    %ecx,0(%eax)
+       #     *(uint32 *) (x + 20) = in5
+       movl    %edx,20(%eax)
+       #     *(uint32 *) (x + 40) = in10
+       movl    %ebx,40(%eax)
+       #     *(uint32 *) (x + 60) = in15
+       movl    %esi,60(%eax)
+._keysetupdone:
+       #   eax = eax_stack
+       movl    64(%esp),%eax
+       #   ebx = ebx_stack
+       movl    68(%esp),%ebx
+       #   esi = esi_stack
+       movl    72(%esp),%esi
+       #   edi = edi_stack
+       movl    76(%esp),%edi
+       #   ebp = ebp_stack
+       movl    80(%esp),%ebp
+       # leave
+       add     %eax,%esp
+       ret
+# enter ECRYPT_ivsetup
+.text
+.p2align 5
+.globl ECRYPT_ivsetup
+ECRYPT_ivsetup:
+       mov     %esp,%eax
+       and     $31,%eax
+       add     $256,%eax
+       sub     %eax,%esp
+       #   eax_stack = eax
+       movl    %eax,64(%esp)
+       #   ebx_stack = ebx
+       movl    %ebx,68(%esp)
+       #   esi_stack = esi
+       movl    %esi,72(%esp)
+       #   edi_stack = edi
+       movl    %edi,76(%esp)
+       #   ebp_stack = ebp
+       movl    %ebp,80(%esp)
+       #   iv = arg2
+       movl    8(%esp,%eax),%ecx
+       #   x = arg1
+       movl    4(%esp,%eax),%eax
+       #   in6 = *(uint32 *) (iv + 0)
+       movl    0(%ecx),%edx
+       #   in7 = *(uint32 *) (iv + 4)
+       movl    4(%ecx),%ecx
+       #   in8 = 0
+       mov     $0,%ebx
+       #   in9 = 0
+       mov     $0,%esi
+       #   *(uint32 *) (x + 24) = in6
+       movl    %edx,24(%eax)
+       #   *(uint32 *) (x + 28) = in7
+       movl    %ecx,28(%eax)
+       #   *(uint32 *) (x + 32) = in8
+       movl    %ebx,32(%eax)
+       #   *(uint32 *) (x + 36) = in9
+       movl    %esi,36(%eax)
+       #   eax = eax_stack
+       movl    64(%esp),%eax
+       #   ebx = ebx_stack
+       movl    68(%esp),%ebx
+       #   esi = esi_stack
+       movl    72(%esp),%esi
+       #   edi = edi_stack
+       movl    76(%esp),%edi
+       #   ebp = ebp_stack
+       movl    80(%esp),%ebp
+       # leave
+       add     %eax,%esp
+       ret
diff --git a/arch/x86/crypto/salsa20-x86_64-asm_64.S b/arch/x86/crypto/salsa20-x86_64-asm_64.S
new file mode 100644 (file)
index 0000000..6214a9b
--- /dev/null
@@ -0,0 +1,920 @@
+# enter ECRYPT_encrypt_bytes
+.text
+.p2align 5
+.globl ECRYPT_encrypt_bytes
+ECRYPT_encrypt_bytes:
+       mov     %rsp,%r11
+       and     $31,%r11
+       add     $256,%r11
+       sub     %r11,%rsp
+       # x = arg1
+       mov     %rdi,%r8
+       # m = arg2
+       mov     %rsi,%rsi
+       # out = arg3
+       mov     %rdx,%rdi
+       # bytes = arg4
+       mov     %rcx,%rdx
+       #               unsigned>? bytes - 0
+       cmp     $0,%rdx
+       # comment:fp stack unchanged by jump
+       # goto done if !unsigned>
+       jbe     ._done
+       # comment:fp stack unchanged by fallthrough
+# start:
+._start:
+       # r11_stack = r11
+       movq    %r11,0(%rsp)
+       # r12_stack = r12
+       movq    %r12,8(%rsp)
+       # r13_stack = r13
+       movq    %r13,16(%rsp)
+       # r14_stack = r14
+       movq    %r14,24(%rsp)
+       # r15_stack = r15
+       movq    %r15,32(%rsp)
+       # rbx_stack = rbx
+       movq    %rbx,40(%rsp)
+       # rbp_stack = rbp
+       movq    %rbp,48(%rsp)
+       # in0 = *(uint64 *) (x + 0)
+       movq    0(%r8),%rcx
+       # in2 = *(uint64 *) (x + 8)
+       movq    8(%r8),%r9
+       # in4 = *(uint64 *) (x + 16)
+       movq    16(%r8),%rax
+       # in6 = *(uint64 *) (x + 24)
+       movq    24(%r8),%r10
+       # in8 = *(uint64 *) (x + 32)
+       movq    32(%r8),%r11
+       # in10 = *(uint64 *) (x + 40)
+       movq    40(%r8),%r12
+       # in12 = *(uint64 *) (x + 48)
+       movq    48(%r8),%r13
+       # in14 = *(uint64 *) (x + 56)
+       movq    56(%r8),%r14
+       # j0 = in0
+       movq    %rcx,56(%rsp)
+       # j2 = in2
+       movq    %r9,64(%rsp)
+       # j4 = in4
+       movq    %rax,72(%rsp)
+       # j6 = in6
+       movq    %r10,80(%rsp)
+       # j8 = in8
+       movq    %r11,88(%rsp)
+       # j10 = in10
+       movq    %r12,96(%rsp)
+       # j12 = in12
+       movq    %r13,104(%rsp)
+       # j14 = in14
+       movq    %r14,112(%rsp)
+       # x_backup = x
+       movq    %r8,120(%rsp)
+# bytesatleast1:
+._bytesatleast1:
+       #                   unsigned<? bytes - 64
+       cmp     $64,%rdx
+       # comment:fp stack unchanged by jump
+       #   goto nocopy if !unsigned<
+       jae     ._nocopy
+       #     ctarget = out
+       movq    %rdi,128(%rsp)
+       #     out = &tmp
+       leaq    192(%rsp),%rdi
+       #     i = bytes
+       mov     %rdx,%rcx
+       #     while (i) { *out++ = *m++; --i }
+       rep     movsb
+       #     out = &tmp
+       leaq    192(%rsp),%rdi
+       #     m = &tmp
+       leaq    192(%rsp),%rsi
+       # comment:fp stack unchanged by fallthrough
+#   nocopy:
+._nocopy:
+       #   out_backup = out
+       movq    %rdi,136(%rsp)
+       #   m_backup = m
+       movq    %rsi,144(%rsp)
+       #   bytes_backup = bytes
+       movq    %rdx,152(%rsp)
+       #   x1 = j0
+       movq    56(%rsp),%rdi
+       #   x0 = x1
+       mov     %rdi,%rdx
+       #   (uint64) x1 >>= 32
+       shr     $32,%rdi
+       #               x3 = j2
+       movq    64(%rsp),%rsi
+       #               x2 = x3
+       mov     %rsi,%rcx
+       #               (uint64) x3 >>= 32
+       shr     $32,%rsi
+       #   x5 = j4
+       movq    72(%rsp),%r8
+       #   x4 = x5
+       mov     %r8,%r9
+       #   (uint64) x5 >>= 32
+       shr     $32,%r8
+       #   x5_stack = x5
+       movq    %r8,160(%rsp)
+       #               x7 = j6
+       movq    80(%rsp),%r8
+       #               x6 = x7
+       mov     %r8,%rax
+       #               (uint64) x7 >>= 32
+       shr     $32,%r8
+       #   x9 = j8
+       movq    88(%rsp),%r10
+       #   x8 = x9
+       mov     %r10,%r11
+       #   (uint64) x9 >>= 32
+       shr     $32,%r10
+       #               x11 = j10
+       movq    96(%rsp),%r12
+       #               x10 = x11
+       mov     %r12,%r13
+       #               x10_stack = x10
+       movq    %r13,168(%rsp)
+       #               (uint64) x11 >>= 32
+       shr     $32,%r12
+       #   x13 = j12
+       movq    104(%rsp),%r13
+       #   x12 = x13
+       mov     %r13,%r14
+       #   (uint64) x13 >>= 32
+       shr     $32,%r13
+       #               x15 = j14
+       movq    112(%rsp),%r15
+       #               x14 = x15
+       mov     %r15,%rbx
+       #               (uint64) x15 >>= 32
+       shr     $32,%r15
+       #               x15_stack = x15
+       movq    %r15,176(%rsp)
+       #   i = 20
+       mov     $20,%r15
+#   mainloop:
+._mainloop:
+       #   i_backup = i
+       movq    %r15,184(%rsp)
+       #               x5 = x5_stack
+       movq    160(%rsp),%r15
+       # a = x12 + x0
+       lea     (%r14,%rdx),%rbp
+       # (uint32) a <<<= 7
+       rol     $7,%ebp
+       # x4 ^= a
+       xor     %rbp,%r9
+       #               b = x1 + x5
+       lea     (%rdi,%r15),%rbp
+       #               (uint32) b <<<= 7
+       rol     $7,%ebp
+       #               x9 ^= b
+       xor     %rbp,%r10
+       # a = x0 + x4
+       lea     (%rdx,%r9),%rbp
+       # (uint32) a <<<= 9
+       rol     $9,%ebp
+       # x8 ^= a
+       xor     %rbp,%r11
+       #               b = x5 + x9
+       lea     (%r15,%r10),%rbp
+       #               (uint32) b <<<= 9
+       rol     $9,%ebp
+       #               x13 ^= b
+       xor     %rbp,%r13
+       # a = x4 + x8
+       lea     (%r9,%r11),%rbp
+       # (uint32) a <<<= 13
+       rol     $13,%ebp
+       # x12 ^= a
+       xor     %rbp,%r14
+       #               b = x9 + x13
+       lea     (%r10,%r13),%rbp
+       #               (uint32) b <<<= 13
+       rol     $13,%ebp
+       #               x1 ^= b
+       xor     %rbp,%rdi
+       # a = x8 + x12
+       lea     (%r11,%r14),%rbp
+       # (uint32) a <<<= 18
+       rol     $18,%ebp
+       # x0 ^= a
+       xor     %rbp,%rdx
+       #               b = x13 + x1
+       lea     (%r13,%rdi),%rbp
+       #               (uint32) b <<<= 18
+       rol     $18,%ebp
+       #               x5 ^= b
+       xor     %rbp,%r15
+       #                               x10 = x10_stack
+       movq    168(%rsp),%rbp
+       #               x5_stack = x5
+       movq    %r15,160(%rsp)
+       #                               c = x6 + x10
+       lea     (%rax,%rbp),%r15
+       #                               (uint32) c <<<= 7
+       rol     $7,%r15d
+       #                               x14 ^= c
+       xor     %r15,%rbx
+       #                               c = x10 + x14
+       lea     (%rbp,%rbx),%r15
+       #                               (uint32) c <<<= 9
+       rol     $9,%r15d
+       #                               x2 ^= c
+       xor     %r15,%rcx
+       #                               c = x14 + x2
+       lea     (%rbx,%rcx),%r15
+       #                               (uint32) c <<<= 13
+       rol     $13,%r15d
+       #                               x6 ^= c
+       xor     %r15,%rax
+       #                               c = x2 + x6
+       lea     (%rcx,%rax),%r15
+       #                               (uint32) c <<<= 18
+       rol     $18,%r15d
+       #                               x10 ^= c
+       xor     %r15,%rbp
+       #                                               x15 = x15_stack
+       movq    176(%rsp),%r15
+       #                               x10_stack = x10
+       movq    %rbp,168(%rsp)
+       #                                               d = x11 + x15
+       lea     (%r12,%r15),%rbp
+       #                                               (uint32) d <<<= 7
+       rol     $7,%ebp
+       #                                               x3 ^= d
+       xor     %rbp,%rsi
+       #                                               d = x15 + x3
+       lea     (%r15,%rsi),%rbp
+       #                                               (uint32) d <<<= 9
+       rol     $9,%ebp
+       #                                               x7 ^= d
+       xor     %rbp,%r8
+       #                                               d = x3 + x7
+       lea     (%rsi,%r8),%rbp
+       #                                               (uint32) d <<<= 13
+       rol     $13,%ebp
+       #                                               x11 ^= d
+       xor     %rbp,%r12
+       #                                               d = x7 + x11
+       lea     (%r8,%r12),%rbp
+       #                                               (uint32) d <<<= 18
+       rol     $18,%ebp
+       #                                               x15 ^= d
+       xor     %rbp,%r15
+       #                                               x15_stack = x15
+       movq    %r15,176(%rsp)
+       #               x5 = x5_stack
+       movq    160(%rsp),%r15
+       # a = x3 + x0
+       lea     (%rsi,%rdx),%rbp
+       # (uint32) a <<<= 7
+       rol     $7,%ebp
+       # x1 ^= a
+       xor     %rbp,%rdi
+       #               b = x4 + x5
+       lea     (%r9,%r15),%rbp
+       #               (uint32) b <<<= 7
+       rol     $7,%ebp
+       #               x6 ^= b
+       xor     %rbp,%rax
+       # a = x0 + x1
+       lea     (%rdx,%rdi),%rbp
+       # (uint32) a <<<= 9
+       rol     $9,%ebp
+       # x2 ^= a
+       xor     %rbp,%rcx
+       #               b = x5 + x6
+       lea     (%r15,%rax),%rbp
+       #               (uint32) b <<<= 9
+       rol     $9,%ebp
+       #               x7 ^= b
+       xor     %rbp,%r8
+       # a = x1 + x2
+       lea     (%rdi,%rcx),%rbp
+       # (uint32) a <<<= 13
+       rol     $13,%ebp
+       # x3 ^= a
+       xor     %rbp,%rsi
+       #               b = x6 + x7
+       lea     (%rax,%r8),%rbp
+       #               (uint32) b <<<= 13
+       rol     $13,%ebp
+       #               x4 ^= b
+       xor     %rbp,%r9
+       # a = x2 + x3
+       lea     (%rcx,%rsi),%rbp
+       # (uint32) a <<<= 18
+       rol     $18,%ebp
+       # x0 ^= a
+       xor     %rbp,%rdx
+       #               b = x7 + x4
+       lea     (%r8,%r9),%rbp
+       #               (uint32) b <<<= 18
+       rol     $18,%ebp
+       #               x5 ^= b
+       xor     %rbp,%r15
+       #                               x10 = x10_stack
+       movq    168(%rsp),%rbp
+       #               x5_stack = x5
+       movq    %r15,160(%rsp)
+       #                               c = x9 + x10
+       lea     (%r10,%rbp),%r15
+       #                               (uint32) c <<<= 7
+       rol     $7,%r15d
+       #                               x11 ^= c
+       xor     %r15,%r12
+       #                               c = x10 + x11
+       lea     (%rbp,%r12),%r15
+       #                               (uint32) c <<<= 9
+       rol     $9,%r15d
+       #                               x8 ^= c
+       xor     %r15,%r11
+       #                               c = x11 + x8
+       lea     (%r12,%r11),%r15
+       #                               (uint32) c <<<= 13
+       rol     $13,%r15d
+       #                               x9 ^= c
+       xor     %r15,%r10
+       #                               c = x8 + x9
+       lea     (%r11,%r10),%r15
+       #                               (uint32) c <<<= 18
+       rol     $18,%r15d
+       #                               x10 ^= c
+       xor     %r15,%rbp
+       #                                               x15 = x15_stack
+       movq    176(%rsp),%r15
+       #                               x10_stack = x10
+       movq    %rbp,168(%rsp)
+       #                                               d = x14 + x15
+       lea     (%rbx,%r15),%rbp
+       #                                               (uint32) d <<<= 7
+       rol     $7,%ebp
+       #                                               x12 ^= d
+       xor     %rbp,%r14
+       #                                               d = x15 + x12
+       lea     (%r15,%r14),%rbp
+       #                                               (uint32) d <<<= 9
+       rol     $9,%ebp
+       #                                               x13 ^= d
+       xor     %rbp,%r13
+       #                                               d = x12 + x13
+       lea     (%r14,%r13),%rbp
+       #                                               (uint32) d <<<= 13
+       rol     $13,%ebp
+       #                                               x14 ^= d
+       xor     %rbp,%rbx
+       #                                               d = x13 + x14
+       lea     (%r13,%rbx),%rbp
+       #                                               (uint32) d <<<= 18
+       rol     $18,%ebp
+       #                                               x15 ^= d
+       xor     %rbp,%r15
+       #                                               x15_stack = x15
+       movq    %r15,176(%rsp)
+       #               x5 = x5_stack
+       movq    160(%rsp),%r15
+       # a = x12 + x0
+       lea     (%r14,%rdx),%rbp
+       # (uint32) a <<<= 7
+       rol     $7,%ebp
+       # x4 ^= a
+       xor     %rbp,%r9
+       #               b = x1 + x5
+       lea     (%rdi,%r15),%rbp
+       #               (uint32) b <<<= 7
+       rol     $7,%ebp
+       #               x9 ^= b
+       xor     %rbp,%r10
+       # a = x0 + x4
+       lea     (%rdx,%r9),%rbp
+       # (uint32) a <<<= 9
+       rol     $9,%ebp
+       # x8 ^= a
+       xor     %rbp,%r11
+       #               b = x5 + x9
+       lea     (%r15,%r10),%rbp
+       #               (uint32) b <<<= 9
+       rol     $9,%ebp
+       #               x13 ^= b
+       xor     %rbp,%r13
+       # a = x4 + x8
+       lea     (%r9,%r11),%rbp
+       # (uint32) a <<<= 13
+       rol     $13,%ebp
+       # x12 ^= a
+       xor     %rbp,%r14
+       #               b = x9 + x13
+       lea     (%r10,%r13),%rbp
+       #               (uint32) b <<<= 13
+       rol     $13,%ebp
+       #               x1 ^= b
+       xor     %rbp,%rdi
+       # a = x8 + x12
+       lea     (%r11,%r14),%rbp
+       # (uint32) a <<<= 18
+       rol     $18,%ebp
+       # x0 ^= a
+       xor     %rbp,%rdx
+       #               b = x13 + x1
+       lea     (%r13,%rdi),%rbp
+       #               (uint32) b <<<= 18
+       rol     $18,%ebp
+       #               x5 ^= b
+       xor     %rbp,%r15
+       #                               x10 = x10_stack
+       movq    168(%rsp),%rbp
+       #               x5_stack = x5
+       movq    %r15,160(%rsp)
+       #                               c = x6 + x10
+       lea     (%rax,%rbp),%r15
+       #                               (uint32) c <<<= 7
+       rol     $7,%r15d
+       #                               x14 ^= c
+       xor     %r15,%rbx
+       #                               c = x10 + x14
+       lea     (%rbp,%rbx),%r15
+       #                               (uint32) c <<<= 9
+       rol     $9,%r15d
+       #                               x2 ^= c
+       xor     %r15,%rcx
+       #                               c = x14 + x2
+       lea     (%rbx,%rcx),%r15
+       #                               (uint32) c <<<= 13
+       rol     $13,%r15d
+       #                               x6 ^= c
+       xor     %r15,%rax
+       #                               c = x2 + x6
+       lea     (%rcx,%rax),%r15
+       #                               (uint32) c <<<= 18
+       rol     $18,%r15d
+       #                               x10 ^= c
+       xor     %r15,%rbp
+       #                                               x15 = x15_stack
+       movq    176(%rsp),%r15
+       #                               x10_stack = x10
+       movq    %rbp,168(%rsp)
+       #                                               d = x11 + x15
+       lea     (%r12,%r15),%rbp
+       #                                               (uint32) d <<<= 7
+       rol     $7,%ebp
+       #                                               x3 ^= d
+       xor     %rbp,%rsi
+       #                                               d = x15 + x3
+       lea     (%r15,%rsi),%rbp
+       #                                               (uint32) d <<<= 9
+       rol     $9,%ebp
+       #                                               x7 ^= d
+       xor     %rbp,%r8
+       #                                               d = x3 + x7
+       lea     (%rsi,%r8),%rbp
+       #                                               (uint32) d <<<= 13
+       rol     $13,%ebp
+       #                                               x11 ^= d
+       xor     %rbp,%r12
+       #                                               d = x7 + x11
+       lea     (%r8,%r12),%rbp
+       #                                               (uint32) d <<<= 18
+       rol     $18,%ebp
+       #                                               x15 ^= d
+       xor     %rbp,%r15
+       #                                               x15_stack = x15
+       movq    %r15,176(%rsp)
+       #               x5 = x5_stack
+       movq    160(%rsp),%r15
+       # a = x3 + x0
+       lea     (%rsi,%rdx),%rbp
+       # (uint32) a <<<= 7
+       rol     $7,%ebp
+       # x1 ^= a
+       xor     %rbp,%rdi
+       #               b = x4 + x5
+       lea     (%r9,%r15),%rbp
+       #               (uint32) b <<<= 7
+       rol     $7,%ebp
+       #               x6 ^= b
+       xor     %rbp,%rax
+       # a = x0 + x1
+       lea     (%rdx,%rdi),%rbp
+       # (uint32) a <<<= 9
+       rol     $9,%ebp
+       # x2 ^= a
+       xor     %rbp,%rcx
+       #               b = x5 + x6
+       lea     (%r15,%rax),%rbp
+       #               (uint32) b <<<= 9
+       rol     $9,%ebp
+       #               x7 ^= b
+       xor     %rbp,%r8
+       # a = x1 + x2
+       lea     (%rdi,%rcx),%rbp
+       # (uint32) a <<<= 13
+       rol     $13,%ebp
+       # x3 ^= a
+       xor     %rbp,%rsi
+       #               b = x6 + x7
+       lea     (%rax,%r8),%rbp
+       #               (uint32) b <<<= 13
+       rol     $13,%ebp
+       #               x4 ^= b
+       xor     %rbp,%r9
+       # a = x2 + x3
+       lea     (%rcx,%rsi),%rbp
+       # (uint32) a <<<= 18
+       rol     $18,%ebp
+       # x0 ^= a
+       xor     %rbp,%rdx
+       #               b = x7 + x4
+       lea     (%r8,%r9),%rbp
+       #               (uint32) b <<<= 18
+       rol     $18,%ebp
+       #               x5 ^= b
+       xor     %rbp,%r15
+       #                               x10 = x10_stack
+       movq    168(%rsp),%rbp
+       #               x5_stack = x5
+       movq    %r15,160(%rsp)
+       #                               c = x9 + x10
+       lea     (%r10,%rbp),%r15
+       #                               (uint32) c <<<= 7
+       rol     $7,%r15d
+       #                               x11 ^= c
+       xor     %r15,%r12
+       #                               c = x10 + x11
+       lea     (%rbp,%r12),%r15
+       #                               (uint32) c <<<= 9
+       rol     $9,%r15d
+       #                               x8 ^= c
+       xor     %r15,%r11
+       #                               c = x11 + x8
+       lea     (%r12,%r11),%r15
+       #                               (uint32) c <<<= 13
+       rol     $13,%r15d
+       #                               x9 ^= c
+       xor     %r15,%r10
+       #                               c = x8 + x9
+       lea     (%r11,%r10),%r15
+       #                               (uint32) c <<<= 18
+       rol     $18,%r15d
+       #                               x10 ^= c
+       xor     %r15,%rbp
+       #                                               x15 = x15_stack
+       movq    176(%rsp),%r15
+       #                               x10_stack = x10
+       movq    %rbp,168(%rsp)
+       #                                               d = x14 + x15
+       lea     (%rbx,%r15),%rbp
+       #                                               (uint32) d <<<= 7
+       rol     $7,%ebp
+       #                                               x12 ^= d
+       xor     %rbp,%r14
+       #                                               d = x15 + x12
+       lea     (%r15,%r14),%rbp
+       #                                               (uint32) d <<<= 9
+       rol     $9,%ebp
+       #                                               x13 ^= d
+       xor     %rbp,%r13
+       #                                               d = x12 + x13
+       lea     (%r14,%r13),%rbp
+       #                                               (uint32) d <<<= 13
+       rol     $13,%ebp
+       #                                               x14 ^= d
+       xor     %rbp,%rbx
+       #                                               d = x13 + x14
+       lea     (%r13,%rbx),%rbp
+       #                                               (uint32) d <<<= 18
+       rol     $18,%ebp
+       #                                               x15 ^= d
+       xor     %rbp,%r15
+       #                                               x15_stack = x15
+       movq    %r15,176(%rsp)
+       #   i = i_backup
+       movq    184(%rsp),%r15
+       #                  unsigned>? i -= 4
+       sub     $4,%r15
+       # comment:fp stack unchanged by jump
+       # goto mainloop if unsigned>
+       ja      ._mainloop
+       #   (uint32) x2 += j2
+       addl    64(%rsp),%ecx
+       #   x3 <<= 32
+       shl     $32,%rsi
+       #   x3 += j2
+       addq    64(%rsp),%rsi
+       #   (uint64) x3 >>= 32
+       shr     $32,%rsi
+       #   x3 <<= 32
+       shl     $32,%rsi
+       #   x2 += x3
+       add     %rsi,%rcx
+       #   (uint32) x6 += j6
+       addl    80(%rsp),%eax
+       #   x7 <<= 32
+       shl     $32,%r8
+       #   x7 += j6
+       addq    80(%rsp),%r8
+       #   (uint64) x7 >>= 32
+       shr     $32,%r8
+       #   x7 <<= 32
+       shl     $32,%r8
+       #   x6 += x7
+       add     %r8,%rax
+       #   (uint32) x8 += j8
+       addl    88(%rsp),%r11d
+       #   x9 <<= 32
+       shl     $32,%r10
+       #   x9 += j8
+       addq    88(%rsp),%r10
+       #   (uint64) x9 >>= 32
+       shr     $32,%r10
+       #   x9 <<= 32
+       shl     $32,%r10
+       #   x8 += x9
+       add     %r10,%r11
+       #   (uint32) x12 += j12
+       addl    104(%rsp),%r14d
+       #   x13 <<= 32
+       shl     $32,%r13
+       #   x13 += j12
+       addq    104(%rsp),%r13
+       #   (uint64) x13 >>= 32
+       shr     $32,%r13
+       #   x13 <<= 32
+       shl     $32,%r13
+       #   x12 += x13
+       add     %r13,%r14
+       #   (uint32) x0 += j0
+       addl    56(%rsp),%edx
+       #   x1 <<= 32
+       shl     $32,%rdi
+       #   x1 += j0
+       addq    56(%rsp),%rdi
+       #   (uint64) x1 >>= 32
+       shr     $32,%rdi
+       #   x1 <<= 32
+       shl     $32,%rdi
+       #   x0 += x1
+       add     %rdi,%rdx
+       #   x5 = x5_stack
+       movq    160(%rsp),%rdi
+       #   (uint32) x4 += j4
+       addl    72(%rsp),%r9d
+       #   x5 <<= 32
+       shl     $32,%rdi
+       #   x5 += j4
+       addq    72(%rsp),%rdi
+       #   (uint64) x5 >>= 32
+       shr     $32,%rdi
+       #   x5 <<= 32
+       shl     $32,%rdi
+       #   x4 += x5
+       add     %rdi,%r9
+       #   x10 = x10_stack
+       movq    168(%rsp),%r8
+       #   (uint32) x10 += j10
+       addl    96(%rsp),%r8d
+       #   x11 <<= 32
+       shl     $32,%r12
+       #   x11 += j10
+       addq    96(%rsp),%r12
+       #   (uint64) x11 >>= 32
+       shr     $32,%r12
+       #   x11 <<= 32
+       shl     $32,%r12
+       #   x10 += x11
+       add     %r12,%r8
+       #   x15 = x15_stack
+       movq    176(%rsp),%rdi
+       #   (uint32) x14 += j14
+       addl    112(%rsp),%ebx
+       #   x15 <<= 32
+       shl     $32,%rdi
+       #   x15 += j14
+       addq    112(%rsp),%rdi
+       #   (uint64) x15 >>= 32
+       shr     $32,%rdi
+       #   x15 <<= 32
+       shl     $32,%rdi
+       #   x14 += x15
+       add     %rdi,%rbx
+       #   out = out_backup
+       movq    136(%rsp),%rdi
+       #   m = m_backup
+       movq    144(%rsp),%rsi
+       #   x0 ^= *(uint64 *) (m + 0)
+       xorq    0(%rsi),%rdx
+       #   *(uint64 *) (out + 0) = x0
+       movq    %rdx,0(%rdi)
+       #   x2 ^= *(uint64 *) (m + 8)
+       xorq    8(%rsi),%rcx
+       #   *(uint64 *) (out + 8) = x2
+       movq    %rcx,8(%rdi)
+       #   x4 ^= *(uint64 *) (m + 16)
+       xorq    16(%rsi),%r9
+       #   *(uint64 *) (out + 16) = x4
+       movq    %r9,16(%rdi)
+       #   x6 ^= *(uint64 *) (m + 24)
+       xorq    24(%rsi),%rax
+       #   *(uint64 *) (out + 24) = x6
+       movq    %rax,24(%rdi)
+       #   x8 ^= *(uint64 *) (m + 32)
+       xorq    32(%rsi),%r11
+       #   *(uint64 *) (out + 32) = x8
+       movq    %r11,32(%rdi)
+       #   x10 ^= *(uint64 *) (m + 40)
+       xorq    40(%rsi),%r8
+       #   *(uint64 *) (out + 40) = x10
+       movq    %r8,40(%rdi)
+       #   x12 ^= *(uint64 *) (m + 48)
+       xorq    48(%rsi),%r14
+       #   *(uint64 *) (out + 48) = x12
+       movq    %r14,48(%rdi)
+       #   x14 ^= *(uint64 *) (m + 56)
+       xorq    56(%rsi),%rbx
+       #   *(uint64 *) (out + 56) = x14
+       movq    %rbx,56(%rdi)
+       #   bytes = bytes_backup
+       movq    152(%rsp),%rdx
+       #   in8 = j8
+       movq    88(%rsp),%rcx
+       #   in8 += 1
+       add     $1,%rcx
+       #   j8 = in8
+       movq    %rcx,88(%rsp)
+       #                          unsigned>? unsigned<? bytes - 64
+       cmp     $64,%rdx
+       # comment:fp stack unchanged by jump
+       #   goto bytesatleast65 if unsigned>
+       ja      ._bytesatleast65
+       # comment:fp stack unchanged by jump
+       #     goto bytesatleast64 if !unsigned<
+       jae     ._bytesatleast64
+       #       m = out
+       mov     %rdi,%rsi
+       #       out = ctarget
+       movq    128(%rsp),%rdi
+       #       i = bytes
+       mov     %rdx,%rcx
+       #       while (i) { *out++ = *m++; --i }
+       rep     movsb
+       # comment:fp stack unchanged by fallthrough
+#     bytesatleast64:
+._bytesatleast64:
+       #     x = x_backup
+       movq    120(%rsp),%rdi
+       #     in8 = j8
+       movq    88(%rsp),%rsi
+       #     *(uint64 *) (x + 32) = in8
+       movq    %rsi,32(%rdi)
+       #     r11 = r11_stack
+       movq    0(%rsp),%r11
+       #     r12 = r12_stack
+       movq    8(%rsp),%r12
+       #     r13 = r13_stack
+       movq    16(%rsp),%r13
+       #     r14 = r14_stack
+       movq    24(%rsp),%r14
+       #     r15 = r15_stack
+       movq    32(%rsp),%r15
+       #     rbx = rbx_stack
+       movq    40(%rsp),%rbx
+       #     rbp = rbp_stack
+       movq    48(%rsp),%rbp
+       # comment:fp stack unchanged by fallthrough
+#     done:
+._done:
+       #     leave
+       add     %r11,%rsp
+       mov     %rdi,%rax
+       mov     %rsi,%rdx
+       ret
+#   bytesatleast65:
+._bytesatleast65:
+       #   bytes -= 64
+       sub     $64,%rdx
+       #   out += 64
+       add     $64,%rdi
+       #   m += 64
+       add     $64,%rsi
+       # comment:fp stack unchanged by jump
+       # goto bytesatleast1
+       jmp     ._bytesatleast1
+# enter ECRYPT_keysetup
+.text
+.p2align 5
+.globl ECRYPT_keysetup
+ECRYPT_keysetup:
+       mov     %rsp,%r11
+       and     $31,%r11
+       add     $256,%r11
+       sub     %r11,%rsp
+       #   k = arg2
+       mov     %rsi,%rsi
+       #   kbits = arg3
+       mov     %rdx,%rdx
+       #   x = arg1
+       mov     %rdi,%rdi
+       #   in0 = *(uint64 *) (k + 0)
+       movq    0(%rsi),%r8
+       #   in2 = *(uint64 *) (k + 8)
+       movq    8(%rsi),%r9
+       #   *(uint64 *) (x + 4) = in0
+       movq    %r8,4(%rdi)
+       #   *(uint64 *) (x + 12) = in2
+       movq    %r9,12(%rdi)
+       #                    unsigned<? kbits - 256
+       cmp     $256,%rdx
+       # comment:fp stack unchanged by jump
+       #   goto kbits128 if unsigned<
+       jb      ._kbits128
+#   kbits256:
+._kbits256:
+       #     in10 = *(uint64 *) (k + 16)
+       movq    16(%rsi),%rdx
+       #     in12 = *(uint64 *) (k + 24)
+       movq    24(%rsi),%rsi
+       #     *(uint64 *) (x + 44) = in10
+       movq    %rdx,44(%rdi)
+       #     *(uint64 *) (x + 52) = in12
+       movq    %rsi,52(%rdi)
+       #     in0 = 1634760805
+       mov     $1634760805,%rsi
+       #     in4 = 857760878
+       mov     $857760878,%rdx
+       #     in10 = 2036477234
+       mov     $2036477234,%rcx
+       #     in14 = 1797285236
+       mov     $1797285236,%r8
+       #     *(uint32 *) (x + 0) = in0
+       movl    %esi,0(%rdi)
+       #     *(uint32 *) (x + 20) = in4
+       movl    %edx,20(%rdi)
+       #     *(uint32 *) (x + 40) = in10
+       movl    %ecx,40(%rdi)
+       #     *(uint32 *) (x + 60) = in14
+       movl    %r8d,60(%rdi)
+       # comment:fp stack unchanged by jump
+       #   goto keysetupdone
+       jmp     ._keysetupdone
+#   kbits128:
+._kbits128:
+       #     in10 = *(uint64 *) (k + 0)
+       movq    0(%rsi),%rdx
+       #     in12 = *(uint64 *) (k + 8)
+       movq    8(%rsi),%rsi
+       #     *(uint64 *) (x + 44) = in10
+       movq    %rdx,44(%rdi)
+       #     *(uint64 *) (x + 52) = in12
+       movq    %rsi,52(%rdi)
+       #     in0 = 1634760805
+       mov     $1634760805,%rsi
+       #     in4 = 824206446
+       mov     $824206446,%rdx
+       #     in10 = 2036477238
+       mov     $2036477238,%rcx
+       #     in14 = 1797285236
+       mov     $1797285236,%r8
+       #     *(uint32 *) (x + 0) = in0
+       movl    %esi,0(%rdi)
+       #     *(uint32 *) (x + 20) = in4
+       movl    %edx,20(%rdi)
+       #     *(uint32 *) (x + 40) = in10
+       movl    %ecx,40(%rdi)
+       #     *(uint32 *) (x + 60) = in14
+       movl    %r8d,60(%rdi)
+#   keysetupdone:
+._keysetupdone:
+       # leave
+       add     %r11,%rsp
+       mov     %rdi,%rax
+       mov     %rsi,%rdx
+       ret
+# enter ECRYPT_ivsetup
+.text
+.p2align 5
+.globl ECRYPT_ivsetup
+ECRYPT_ivsetup:
+       mov     %rsp,%r11
+       and     $31,%r11
+       add     $256,%r11
+       sub     %r11,%rsp
+       #   iv = arg2
+       mov     %rsi,%rsi
+       #   x = arg1
+       mov     %rdi,%rdi
+       #   in6 = *(uint64 *) (iv + 0)
+       movq    0(%rsi),%rsi
+       #   in8 = 0
+       mov     $0,%r8
+       #   *(uint64 *) (x + 24) = in6
+       movq    %rsi,24(%rdi)
+       #   *(uint64 *) (x + 32) = in8
+       movq    %r8,32(%rdi)
+       # leave
+       add     %r11,%rsp
+       mov     %rdi,%rax
+       mov     %rsi,%rdx
+       ret
diff --git a/arch/x86/crypto/salsa20_glue.c b/arch/x86/crypto/salsa20_glue.c
new file mode 100644 (file)
index 0000000..bccb76d
--- /dev/null
@@ -0,0 +1,129 @@
+/*
+ * Glue code for optimized assembly version of  Salsa20.
+ *
+ * Copyright (c) 2007 Tan Swee Heng <thesweeheng@gmail.com>
+ *
+ * The assembly codes are public domain assembly codes written by Daniel. J.
+ * Bernstein <djb@cr.yp.to>. The codes are modified to include indentation
+ * and to remove extraneous comments and functions that are not needed.
+ * - i586 version, renamed as salsa20-i586-asm_32.S
+ *   available from <http://cr.yp.to/snuffle/salsa20/x86-pm/salsa20.s>
+ * - x86-64 version, renamed as salsa20-x86_64-asm_64.S
+ *   available from <http://cr.yp.to/snuffle/salsa20/amd64-3/salsa20.s>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <crypto/algapi.h>
+#include <linux/module.h>
+#include <linux/crypto.h>
+
+#define SALSA20_IV_SIZE        8U
+#define SALSA20_MIN_KEY_SIZE  16U
+#define SALSA20_MAX_KEY_SIZE  32U
+
+// use the ECRYPT_* function names
+#define salsa20_keysetup        ECRYPT_keysetup
+#define salsa20_ivsetup         ECRYPT_ivsetup
+#define salsa20_encrypt_bytes   ECRYPT_encrypt_bytes
+
+struct salsa20_ctx
+{
+       u32 input[16];
+};
+
+asmlinkage void salsa20_keysetup(struct salsa20_ctx *ctx, const u8 *k,
+                                u32 keysize, u32 ivsize);
+asmlinkage void salsa20_ivsetup(struct salsa20_ctx *ctx, const u8 *iv);
+asmlinkage void salsa20_encrypt_bytes(struct salsa20_ctx *ctx,
+                                     const u8 *src, u8 *dst, u32 bytes);
+
+static int setkey(struct crypto_tfm *tfm, const u8 *key,
+                 unsigned int keysize)
+{
+       struct salsa20_ctx *ctx = crypto_tfm_ctx(tfm);
+       salsa20_keysetup(ctx, key, keysize*8, SALSA20_IV_SIZE*8);
+       return 0;
+}
+
+static int encrypt(struct blkcipher_desc *desc,
+                  struct scatterlist *dst, struct scatterlist *src,
+                  unsigned int nbytes)
+{
+       struct blkcipher_walk walk;
+       struct crypto_blkcipher *tfm = desc->tfm;
+       struct salsa20_ctx *ctx = crypto_blkcipher_ctx(tfm);
+       int err;
+
+       blkcipher_walk_init(&walk, dst, src, nbytes);
+       err = blkcipher_walk_virt_block(desc, &walk, 64);
+
+       salsa20_ivsetup(ctx, walk.iv);
+
+       if (likely(walk.nbytes == nbytes))
+       {
+               salsa20_encrypt_bytes(ctx, walk.src.virt.addr,
+                                     walk.dst.virt.addr, nbytes);
+               return blkcipher_walk_done(desc, &walk, 0);
+       }
+
+       while (walk.nbytes >= 64) {
+               salsa20_encrypt_bytes(ctx, walk.src.virt.addr,
+                                     walk.dst.virt.addr,
+                                     walk.nbytes - (walk.nbytes % 64));
+               err = blkcipher_walk_done(desc, &walk, walk.nbytes % 64);
+       }
+
+       if (walk.nbytes) {
+               salsa20_encrypt_bytes(ctx, walk.src.virt.addr,
+                                     walk.dst.virt.addr, walk.nbytes);
+               err = blkcipher_walk_done(desc, &walk, 0);
+       }
+
+       return err;
+}
+
+static struct crypto_alg alg = {
+       .cra_name           =   "salsa20",
+       .cra_driver_name    =   "salsa20-asm",
+       .cra_priority       =   200,
+       .cra_flags          =   CRYPTO_ALG_TYPE_BLKCIPHER,
+       .cra_type           =   &crypto_blkcipher_type,
+       .cra_blocksize      =   1,
+       .cra_ctxsize        =   sizeof(struct salsa20_ctx),
+       .cra_alignmask      =   3,
+       .cra_module         =   THIS_MODULE,
+       .cra_list           =   LIST_HEAD_INIT(alg.cra_list),
+       .cra_u              =   {
+               .blkcipher = {
+                       .setkey         =   setkey,
+                       .encrypt        =   encrypt,
+                       .decrypt        =   encrypt,
+                       .min_keysize    =   SALSA20_MIN_KEY_SIZE,
+                       .max_keysize    =   SALSA20_MAX_KEY_SIZE,
+                       .ivsize         =   SALSA20_IV_SIZE,
+               }
+       }
+};
+
+static int __init init(void)
+{
+       return crypto_register_alg(&alg);
+}
+
+static void __exit fini(void)
+{
+       crypto_unregister_alg(&alg);
+}
+
+module_init(init);
+module_exit(fini);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION ("Salsa20 stream cipher algorithm (optimized assembly version)");
+MODULE_ALIAS("salsa20");
+MODULE_ALIAS("salsa20-asm");
diff --git a/arch/x86/crypto/twofish_64.c b/arch/x86/crypto/twofish_64.c
deleted file mode 100644 (file)
index 182d91d..0000000
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Glue Code for optimized x86_64 assembler version of TWOFISH
- *
- * Originally Twofish for GPG
- * By Matthew Skala <mskala@ansuz.sooke.bc.ca>, July 26, 1998
- * 256-bit key length added March 20, 1999
- * Some modifications to reduce the text size by Werner Koch, April, 1998
- * Ported to the kerneli patch by Marc Mutz <Marc@Mutz.com>
- * Ported to CryptoAPI by Colin Slater <hoho@tacomeat.net>
- *
- * The original author has disclaimed all copyright interest in this
- * code and thus put it in the public domain. The subsequent authors
- * have put this under the GNU General Public License.
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; either version 2 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307
- * USA
- *
- * This code is a "clean room" implementation, written from the paper
- * _Twofish: A 128-Bit Block Cipher_ by Bruce Schneier, John Kelsey,
- * Doug Whiting, David Wagner, Chris Hall, and Niels Ferguson, available
- * through http://www.counterpane.com/twofish.html
- *
- * For background information on multiplication in finite fields, used for
- * the matrix operations in the key schedule, see the book _Contemporary
- * Abstract Algebra_ by Joseph A. Gallian, especially chapter 22 in the
- * Third Edition.
- */
-
-#include <crypto/twofish.h>
-#include <linux/crypto.h>
-#include <linux/init.h>
-#include <linux/kernel.h>
-#include <linux/module.h>
-#include <linux/types.h>
-
-asmlinkage void twofish_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
-asmlinkage void twofish_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
-
-static void twofish_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
-       twofish_enc_blk(tfm, dst, src);
-}
-
-static void twofish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
-       twofish_dec_blk(tfm, dst, src);
-}
-
-static struct crypto_alg alg = {
-       .cra_name               =       "twofish",
-       .cra_driver_name        =       "twofish-x86_64",
-       .cra_priority           =       200,
-       .cra_flags              =       CRYPTO_ALG_TYPE_CIPHER,
-       .cra_blocksize          =       TF_BLOCK_SIZE,
-       .cra_ctxsize            =       sizeof(struct twofish_ctx),
-       .cra_alignmask          =       3,
-       .cra_module             =       THIS_MODULE,
-       .cra_list               =       LIST_HEAD_INIT(alg.cra_list),
-       .cra_u                  =       {
-               .cipher = {
-                       .cia_min_keysize        =       TF_MIN_KEY_SIZE,
-                       .cia_max_keysize        =       TF_MAX_KEY_SIZE,
-                       .cia_setkey             =       twofish_setkey,
-                       .cia_encrypt            =       twofish_encrypt,
-                       .cia_decrypt            =       twofish_decrypt
-               }
-       }
-};
-
-static int __init init(void)
-{
-       return crypto_register_alg(&alg);
-}
-
-static void __exit fini(void)
-{
-       crypto_unregister_alg(&alg);
-}
-
-module_init(init);
-module_exit(fini);
-
-MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION ("Twofish Cipher Algorithm, x86_64 asm optimized");
-MODULE_ALIAS("twofish");
similarity index 94%
rename from arch/x86/crypto/twofish_32.c
rename to arch/x86/crypto/twofish_glue.c
index e3004df..cefaf8b 100644 (file)
@@ -1,5 +1,5 @@
 /*
- *  Glue Code for optimized 586 assembler version of TWOFISH
+ * Glue Code for assembler optimized version of TWOFISH
  *
  * Originally Twofish for GPG
  * By Matthew Skala <mskala@ansuz.sooke.bc.ca>, July 26, 1998
@@ -44,7 +44,6 @@
 #include <linux/module.h>
 #include <linux/types.h>
 
-
 asmlinkage void twofish_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
 asmlinkage void twofish_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
 
@@ -60,7 +59,7 @@ static void twofish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 
 static struct crypto_alg alg = {
        .cra_name               =       "twofish",
-       .cra_driver_name        =       "twofish-i586",
+       .cra_driver_name        =       "twofish-asm",
        .cra_priority           =       200,
        .cra_flags              =       CRYPTO_ALG_TYPE_CIPHER,
        .cra_blocksize          =       TF_BLOCK_SIZE,
@@ -93,5 +92,6 @@ module_init(init);
 module_exit(fini);
 
 MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION ("Twofish Cipher Algorithm, i586 asm optimized");
+MODULE_DESCRIPTION ("Twofish Cipher Algorithm, asm optimized");
 MODULE_ALIAS("twofish");
+MODULE_ALIAS("twofish-asm");
index 083d2e1..c3166a1 100644 (file)
@@ -24,10 +24,6 @@ config CRYPTO_ALGAPI
        help
          This option provides the API for cryptographic algorithms.
 
-config CRYPTO_ABLKCIPHER
-       tristate
-       select CRYPTO_BLKCIPHER
-
 config CRYPTO_AEAD
        tristate
        select CRYPTO_ALGAPI
@@ -36,6 +32,15 @@ config CRYPTO_BLKCIPHER
        tristate
        select CRYPTO_ALGAPI
 
+config CRYPTO_SEQIV
+       tristate "Sequence Number IV Generator"
+       select CRYPTO_AEAD
+       select CRYPTO_BLKCIPHER
+       help
+         This IV generator generates an IV based on a sequence number by
+         xoring it with a salt.  This algorithm is mainly useful for CTR
+         and similar modes.
+
 config CRYPTO_HASH
        tristate
        select CRYPTO_ALGAPI
@@ -91,7 +96,7 @@ config CRYPTO_SHA1
          SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2).
 
 config CRYPTO_SHA256
-       tristate "SHA256 digest algorithm"
+       tristate "SHA224 and SHA256 digest algorithm"
        select CRYPTO_ALGAPI
        help
          SHA256 secure hash standard (DFIPS 180-2).
@@ -99,6 +104,9 @@ config CRYPTO_SHA256
          This version of SHA implements a 256 bit hash with 128 bits of
          security against collision attacks.
 
+          This code also includes SHA-224, a 224 bit hash with 112 bits
+          of security against collision attacks.
+
 config CRYPTO_SHA512
        tristate "SHA384 and SHA512 digest algorithms"
        select CRYPTO_ALGAPI
@@ -195,9 +203,34 @@ config CRYPTO_XTS
          key size 256, 384 or 512 bits. This implementation currently
          can't handle a sectorsize which is not a multiple of 16 bytes.
 
+config CRYPTO_CTR
+       tristate "CTR support"
+       select CRYPTO_BLKCIPHER
+       select CRYPTO_SEQIV
+       select CRYPTO_MANAGER
+       help
+         CTR: Counter mode
+         This block cipher algorithm is required for IPSec.
+
+config CRYPTO_GCM
+       tristate "GCM/GMAC support"
+       select CRYPTO_CTR
+       select CRYPTO_AEAD
+       select CRYPTO_GF128MUL
+       help
+         Support for Galois/Counter Mode (GCM) and Galois Message
+         Authentication Code (GMAC). Required for IPSec.
+
+config CRYPTO_CCM
+       tristate "CCM support"
+       select CRYPTO_CTR
+       select CRYPTO_AEAD
+       help
+         Support for Counter with CBC MAC. Required for IPsec.
+
 config CRYPTO_CRYPTD
        tristate "Software async crypto daemon"
-       select CRYPTO_ABLKCIPHER
+       select CRYPTO_BLKCIPHER
        select CRYPTO_MANAGER
        help
          This is a generic software asynchronous crypto daemon that
@@ -320,6 +353,7 @@ config CRYPTO_AES_586
        tristate "AES cipher algorithms (i586)"
        depends on (X86 || UML_X86) && !64BIT
        select CRYPTO_ALGAPI
+       select CRYPTO_AES
        help
          AES cipher algorithms (FIPS-197). AES uses the Rijndael 
          algorithm.
@@ -341,6 +375,7 @@ config CRYPTO_AES_X86_64
        tristate "AES cipher algorithms (x86_64)"
        depends on (X86 || UML_X86) && 64BIT
        select CRYPTO_ALGAPI
+       select CRYPTO_AES
        help
          AES cipher algorithms (FIPS-197). AES uses the Rijndael 
          algorithm.
@@ -441,6 +476,46 @@ config CRYPTO_SEED
          See also:
          <http://www.kisa.or.kr/kisa/seed/jsp/seed_eng.jsp>
 
+config CRYPTO_SALSA20
+       tristate "Salsa20 stream cipher algorithm (EXPERIMENTAL)"
+       depends on EXPERIMENTAL
+       select CRYPTO_BLKCIPHER
+       help
+         Salsa20 stream cipher algorithm.
+
+         Salsa20 is a stream cipher submitted to eSTREAM, the ECRYPT
+         Stream Cipher Project. See <http://www.ecrypt.eu.org/stream/>
+
+         The Salsa20 stream cipher algorithm is designed by Daniel J.
+         Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html>
+
+config CRYPTO_SALSA20_586
+       tristate "Salsa20 stream cipher algorithm (i586) (EXPERIMENTAL)"
+       depends on (X86 || UML_X86) && !64BIT
+       depends on EXPERIMENTAL
+       select CRYPTO_BLKCIPHER
+       help
+         Salsa20 stream cipher algorithm.
+
+         Salsa20 is a stream cipher submitted to eSTREAM, the ECRYPT
+         Stream Cipher Project. See <http://www.ecrypt.eu.org/stream/>
+
+         The Salsa20 stream cipher algorithm is designed by Daniel J.
+         Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html>
+
+config CRYPTO_SALSA20_X86_64
+       tristate "Salsa20 stream cipher algorithm (x86_64) (EXPERIMENTAL)"
+       depends on (X86 || UML_X86) && 64BIT
+       depends on EXPERIMENTAL
+       select CRYPTO_BLKCIPHER
+       help
+         Salsa20 stream cipher algorithm.
+
+         Salsa20 is a stream cipher submitted to eSTREAM, the ECRYPT
+         Stream Cipher Project. See <http://www.ecrypt.eu.org/stream/>
+
+         The Salsa20 stream cipher algorithm is designed by Daniel J.
+         Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html>
 
 config CRYPTO_DEFLATE
        tristate "Deflate compression algorithm"
@@ -491,6 +566,7 @@ config CRYPTO_TEST
        tristate "Testing module"
        depends on m
        select CRYPTO_ALGAPI
+       select CRYPTO_AEAD
        help
          Quick & dirty crypto test module.
 
@@ -498,10 +574,19 @@ config CRYPTO_AUTHENC
        tristate "Authenc support"
        select CRYPTO_AEAD
        select CRYPTO_MANAGER
+       select CRYPTO_HASH
        help
          Authenc: Combined mode wrapper for IPsec.
          This is required for IPSec.
 
+config CRYPTO_LZO
+       tristate "LZO compression algorithm"
+       select CRYPTO_ALGAPI
+       select LZO_COMPRESS
+       select LZO_DECOMPRESS
+       help
+         This is the LZO algorithm.
+
 source "drivers/crypto/Kconfig"
 
 endif  # if CRYPTO
index 43c2a0d..48c7583 100644 (file)
@@ -8,9 +8,14 @@ crypto_algapi-$(CONFIG_PROC_FS) += proc.o
 crypto_algapi-objs := algapi.o scatterwalk.o $(crypto_algapi-y)
 obj-$(CONFIG_CRYPTO_ALGAPI) += crypto_algapi.o
 
-obj-$(CONFIG_CRYPTO_ABLKCIPHER) += ablkcipher.o
 obj-$(CONFIG_CRYPTO_AEAD) += aead.o
-obj-$(CONFIG_CRYPTO_BLKCIPHER) += blkcipher.o
+
+crypto_blkcipher-objs := ablkcipher.o
+crypto_blkcipher-objs += blkcipher.o
+obj-$(CONFIG_CRYPTO_BLKCIPHER) += crypto_blkcipher.o
+obj-$(CONFIG_CRYPTO_BLKCIPHER) += chainiv.o
+obj-$(CONFIG_CRYPTO_BLKCIPHER) += eseqiv.o
+obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
 
 crypto_hash-objs := hash.o
 obj-$(CONFIG_CRYPTO_HASH) += crypto_hash.o
@@ -32,6 +37,9 @@ obj-$(CONFIG_CRYPTO_CBC) += cbc.o
 obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o
 obj-$(CONFIG_CRYPTO_LRW) += lrw.o
 obj-$(CONFIG_CRYPTO_XTS) += xts.o
+obj-$(CONFIG_CRYPTO_CTR) += ctr.o
+obj-$(CONFIG_CRYPTO_GCM) += gcm.o
+obj-$(CONFIG_CRYPTO_CCM) += ccm.o
 obj-$(CONFIG_CRYPTO_CRYPTD) += cryptd.o
 obj-$(CONFIG_CRYPTO_DES) += des_generic.o
 obj-$(CONFIG_CRYPTO_FCRYPT) += fcrypt.o
@@ -48,10 +56,12 @@ obj-$(CONFIG_CRYPTO_TEA) += tea.o
 obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o
 obj-$(CONFIG_CRYPTO_ANUBIS) += anubis.o
 obj-$(CONFIG_CRYPTO_SEED) += seed.o
+obj-$(CONFIG_CRYPTO_SALSA20) += salsa20_generic.o
 obj-$(CONFIG_CRYPTO_DEFLATE) += deflate.o
 obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o
 obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o
 obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o
+obj-$(CONFIG_CRYPTO_LZO) += lzo.o
 
 obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o
 
index 2731acb..3bcb099 100644 (file)
  *
  */
 
-#include <crypto/algapi.h>
-#include <linux/errno.h>
+#include <crypto/internal/skcipher.h>
+#include <linux/err.h>
 #include <linux/init.h>
 #include <linux/kernel.h>
 #include <linux/module.h>
+#include <linux/rtnetlink.h>
+#include <linux/sched.h>
 #include <linux/slab.h>
 #include <linux/seq_file.h>
 
+#include "internal.h"
+
 static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key,
                            unsigned int keylen)
 {
@@ -66,6 +70,16 @@ static unsigned int crypto_ablkcipher_ctxsize(struct crypto_alg *alg, u32 type,
        return alg->cra_ctxsize;
 }
 
+int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req)
+{
+       return crypto_ablkcipher_encrypt(&req->creq);
+}
+
+int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req)
+{
+       return crypto_ablkcipher_decrypt(&req->creq);
+}
+
 static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type,
                                      u32 mask)
 {
@@ -78,6 +92,11 @@ static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type,
        crt->setkey = setkey;
        crt->encrypt = alg->encrypt;
        crt->decrypt = alg->decrypt;
+       if (!alg->ivsize) {
+               crt->givencrypt = skcipher_null_givencrypt;
+               crt->givdecrypt = skcipher_null_givdecrypt;
+       }
+       crt->base = __crypto_ablkcipher_cast(tfm);
        crt->ivsize = alg->ivsize;
 
        return 0;
@@ -90,10 +109,13 @@ static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg)
        struct ablkcipher_alg *ablkcipher = &alg->cra_ablkcipher;
 
        seq_printf(m, "type         : ablkcipher\n");
+       seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
+                                            "yes" : "no");
        seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
        seq_printf(m, "min keysize  : %u\n", ablkcipher->min_keysize);
        seq_printf(m, "max keysize  : %u\n", ablkcipher->max_keysize);
        seq_printf(m, "ivsize       : %u\n", ablkcipher->ivsize);
+       seq_printf(m, "geniv        : %s\n", ablkcipher->geniv ?: "<default>");
 }
 
 const struct crypto_type crypto_ablkcipher_type = {
@@ -105,5 +127,220 @@ const struct crypto_type crypto_ablkcipher_type = {
 };
 EXPORT_SYMBOL_GPL(crypto_ablkcipher_type);
 
+static int no_givdecrypt(struct skcipher_givcrypt_request *req)
+{
+       return -ENOSYS;
+}
+
+static int crypto_init_givcipher_ops(struct crypto_tfm *tfm, u32 type,
+                                     u32 mask)
+{
+       struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher;
+       struct ablkcipher_tfm *crt = &tfm->crt_ablkcipher;
+
+       if (alg->ivsize > PAGE_SIZE / 8)
+               return -EINVAL;
+
+       crt->setkey = tfm->__crt_alg->cra_flags & CRYPTO_ALG_GENIV ?
+                     alg->setkey : setkey;
+       crt->encrypt = alg->encrypt;
+       crt->decrypt = alg->decrypt;
+       crt->givencrypt = alg->givencrypt;
+       crt->givdecrypt = alg->givdecrypt ?: no_givdecrypt;
+       crt->base = __crypto_ablkcipher_cast(tfm);
+       crt->ivsize = alg->ivsize;
+
+       return 0;
+}
+
+static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg)
+       __attribute__ ((unused));
+static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg)
+{
+       struct ablkcipher_alg *ablkcipher = &alg->cra_ablkcipher;
+
+       seq_printf(m, "type         : givcipher\n");
+       seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
+                                            "yes" : "no");
+       seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
+       seq_printf(m, "min keysize  : %u\n", ablkcipher->min_keysize);
+       seq_printf(m, "max keysize  : %u\n", ablkcipher->max_keysize);
+       seq_printf(m, "ivsize       : %u\n", ablkcipher->ivsize);
+       seq_printf(m, "geniv        : %s\n", ablkcipher->geniv ?: "<built-in>");
+}
+
+const struct crypto_type crypto_givcipher_type = {
+       .ctxsize = crypto_ablkcipher_ctxsize,
+       .init = crypto_init_givcipher_ops,
+#ifdef CONFIG_PROC_FS
+       .show = crypto_givcipher_show,
+#endif
+};
+EXPORT_SYMBOL_GPL(crypto_givcipher_type);
+
+const char *crypto_default_geniv(const struct crypto_alg *alg)
+{
+       return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv";
+}
+
+static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
+{
+       struct rtattr *tb[3];
+       struct {
+               struct rtattr attr;
+               struct crypto_attr_type data;
+       } ptype;
+       struct {
+               struct rtattr attr;
+               struct crypto_attr_alg data;
+       } palg;
+       struct crypto_template *tmpl;
+       struct crypto_instance *inst;
+       struct crypto_alg *larval;
+       const char *geniv;
+       int err;
+
+       larval = crypto_larval_lookup(alg->cra_driver_name,
+                                     CRYPTO_ALG_TYPE_GIVCIPHER,
+                                     CRYPTO_ALG_TYPE_MASK);
+       err = PTR_ERR(larval);
+       if (IS_ERR(larval))
+               goto out;
+
+       err = -EAGAIN;
+       if (!crypto_is_larval(larval))
+               goto drop_larval;
+
+       ptype.attr.rta_len = sizeof(ptype);
+       ptype.attr.rta_type = CRYPTOA_TYPE;
+       ptype.data.type = type | CRYPTO_ALG_GENIV;
+       /* GENIV tells the template that we're making a default geniv. */
+       ptype.data.mask = mask | CRYPTO_ALG_GENIV;
+       tb[0] = &ptype.attr;
+
+       palg.attr.rta_len = sizeof(palg);
+       palg.attr.rta_type = CRYPTOA_ALG;
+       /* Must use the exact name to locate ourselves. */
+       memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME);
+       tb[1] = &palg.attr;
+
+       tb[2] = NULL;
+
+       if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
+           CRYPTO_ALG_TYPE_BLKCIPHER)
+               geniv = alg->cra_blkcipher.geniv;
+       else
+               geniv = alg->cra_ablkcipher.geniv;
+
+       if (!geniv)
+               geniv = crypto_default_geniv(alg);
+
+       tmpl = crypto_lookup_template(geniv);
+       err = -ENOENT;
+       if (!tmpl)
+               goto kill_larval;
+
+       inst = tmpl->alloc(tb);
+       err = PTR_ERR(inst);
+       if (IS_ERR(inst))
+               goto put_tmpl;
+
+       if ((err = crypto_register_instance(tmpl, inst))) {
+               tmpl->free(inst);
+               goto put_tmpl;
+       }
+
+       /* Redo the lookup to use the instance we just registered. */
+       err = -EAGAIN;
+
+put_tmpl:
+       crypto_tmpl_put(tmpl);
+kill_larval:
+       crypto_larval_kill(larval);
+drop_larval:
+       crypto_mod_put(larval);
+out:
+       crypto_mod_put(alg);
+       return err;
+}
+
+static struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type,
+                                                u32 mask)
+{
+       struct crypto_alg *alg;
+
+       alg = crypto_alg_mod_lookup(name, type, mask);
+       if (IS_ERR(alg))
+               return alg;
+
+       if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
+           CRYPTO_ALG_TYPE_GIVCIPHER)
+               return alg;
+
+       if (!((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
+             CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize :
+                                         alg->cra_ablkcipher.ivsize))
+               return alg;
+
+       return ERR_PTR(crypto_givcipher_default(alg, type, mask));
+}
+
+int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
+                        u32 type, u32 mask)
+{
+       struct crypto_alg *alg;
+       int err;
+
+       type = crypto_skcipher_type(type);
+       mask = crypto_skcipher_mask(mask);
+
+       alg = crypto_lookup_skcipher(name, type, mask);
+       if (IS_ERR(alg))
+               return PTR_ERR(alg);
+
+       err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask);
+       crypto_mod_put(alg);
+       return err;
+}
+EXPORT_SYMBOL_GPL(crypto_grab_skcipher);
+
+struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name,
+                                                 u32 type, u32 mask)
+{
+       struct crypto_tfm *tfm;
+       int err;
+
+       type = crypto_skcipher_type(type);
+       mask = crypto_skcipher_mask(mask);
+
+       for (;;) {
+               struct crypto_alg *alg;
+
+               alg = crypto_lookup_skcipher(alg_name, type, mask);
+               if (IS_ERR(alg)) {
+                       err = PTR_ERR(alg);
+                       goto err;
+               }
+
+               tfm = __crypto_alloc_tfm(alg, type, mask);
+               if (!IS_ERR(tfm))
+                       return __crypto_ablkcipher_cast(tfm);
+
+               crypto_mod_put(alg);
+               err = PTR_ERR(tfm);
+
+err:
+               if (err != -EAGAIN)
+                       break;
+               if (signal_pending(current)) {
+                       err = -EINTR;
+                       break;
+               }
+       }
+
+       return ERR_PTR(err);
+}
+EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher);
+
 MODULE_LICENSE("GPL");
 MODULE_DESCRIPTION("Asynchronous block chaining cipher type");
index 84a3501..3a6f3f5 100644 (file)
  *
  */
 
-#include <crypto/algapi.h>
-#include <linux/errno.h>
+#include <crypto/internal/aead.h>
+#include <linux/err.h>
 #include <linux/init.h>
 #include <linux/kernel.h>
 #include <linux/module.h>
+#include <linux/rtnetlink.h>
 #include <linux/slab.h>
 #include <linux/seq_file.h>
 
+#include "internal.h"
+
 static int setkey_unaligned(struct crypto_aead *tfm, const u8 *key,
                            unsigned int keylen)
 {
@@ -53,25 +56,54 @@ static int setkey(struct crypto_aead *tfm, const u8 *key, unsigned int keylen)
        return aead->setkey(tfm, key, keylen);
 }
 
+int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
+{
+       struct aead_tfm *crt = crypto_aead_crt(tfm);
+       int err;
+
+       if (authsize > crypto_aead_alg(tfm)->maxauthsize)
+               return -EINVAL;
+
+       if (crypto_aead_alg(tfm)->setauthsize) {
+               err = crypto_aead_alg(tfm)->setauthsize(crt->base, authsize);
+               if (err)
+                       return err;
+       }
+
+       crypto_aead_crt(crt->base)->authsize = authsize;
+       crt->authsize = authsize;
+       return 0;
+}
+EXPORT_SYMBOL_GPL(crypto_aead_setauthsize);
+
 static unsigned int crypto_aead_ctxsize(struct crypto_alg *alg, u32 type,
                                        u32 mask)
 {
        return alg->cra_ctxsize;
 }
 
+static int no_givcrypt(struct aead_givcrypt_request *req)
+{
+       return -ENOSYS;
+}
+
 static int crypto_init_aead_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
 {
        struct aead_alg *alg = &tfm->__crt_alg->cra_aead;
        struct aead_tfm *crt = &tfm->crt_aead;
 
-       if (max(alg->authsize, alg->ivsize) > PAGE_SIZE / 8)
+       if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
                return -EINVAL;
 
-       crt->setkey = setkey;
+       crt->setkey = tfm->__crt_alg->cra_flags & CRYPTO_ALG_GENIV ?
+                     alg->setkey : setkey;
        crt->encrypt = alg->encrypt;
        crt->decrypt = alg->decrypt;
+       crt->givencrypt = alg->givencrypt ?: no_givcrypt;
+       crt->givdecrypt = alg->givdecrypt ?: no_givcrypt;
+       crt->base = __crypto_aead_cast(tfm);
        crt->ivsize = alg->ivsize;
-       crt->authsize = alg->authsize;
+       crt->authsize = alg->maxauthsize;
 
        return 0;
 }
@@ -83,9 +115,12 @@ static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
        struct aead_alg *aead = &alg->cra_aead;
 
        seq_printf(m, "type         : aead\n");
+       seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
+                                            "yes" : "no");
        seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
        seq_printf(m, "ivsize       : %u\n", aead->ivsize);
-       seq_printf(m, "authsize     : %u\n", aead->authsize);
+       seq_printf(m, "maxauthsize  : %u\n", aead->maxauthsize);
+       seq_printf(m, "geniv        : %s\n", aead->geniv ?: "<built-in>");
 }
 
 const struct crypto_type crypto_aead_type = {
@@ -97,5 +132,358 @@ const struct crypto_type crypto_aead_type = {
 };
 EXPORT_SYMBOL_GPL(crypto_aead_type);
 
+static int aead_null_givencrypt(struct aead_givcrypt_request *req)
+{
+       return crypto_aead_encrypt(&req->areq);
+}
+
+static int aead_null_givdecrypt(struct aead_givcrypt_request *req)
+{
+       return crypto_aead_decrypt(&req->areq);
+}
+
+static int crypto_init_nivaead_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
+{
+       struct aead_alg *alg = &tfm->__crt_alg->cra_aead;
+       struct aead_tfm *crt = &tfm->crt_aead;
+
+       if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
+               return -EINVAL;
+
+       crt->setkey = setkey;
+       crt->encrypt = alg->encrypt;
+       crt->decrypt = alg->decrypt;
+       if (!alg->ivsize) {
+               crt->givencrypt = aead_null_givencrypt;
+               crt->givdecrypt = aead_null_givdecrypt;
+       }
+       crt->base = __crypto_aead_cast(tfm);
+       crt->ivsize = alg->ivsize;
+       crt->authsize = alg->maxauthsize;
+
+       return 0;
+}
+
+static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
+       __attribute__ ((unused));
+static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
+{
+       struct aead_alg *aead = &alg->cra_aead;
+
+       seq_printf(m, "type         : nivaead\n");
+       seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
+                                            "yes" : "no");
+       seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
+       seq_printf(m, "ivsize       : %u\n", aead->ivsize);
+       seq_printf(m, "maxauthsize  : %u\n", aead->maxauthsize);
+       seq_printf(m, "geniv        : %s\n", aead->geniv);
+}
+
+const struct crypto_type crypto_nivaead_type = {
+       .ctxsize = crypto_aead_ctxsize,
+       .init = crypto_init_nivaead_ops,
+#ifdef CONFIG_PROC_FS
+       .show = crypto_nivaead_show,
+#endif
+};
+EXPORT_SYMBOL_GPL(crypto_nivaead_type);
+
+static int crypto_grab_nivaead(struct crypto_aead_spawn *spawn,
+                              const char *name, u32 type, u32 mask)
+{
+       struct crypto_alg *alg;
+       int err;
+
+       type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
+       type |= CRYPTO_ALG_TYPE_AEAD;
+       mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV;
+
+       alg = crypto_alg_mod_lookup(name, type, mask);
+       if (IS_ERR(alg))
+               return PTR_ERR(alg);
+
+       err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask);
+       crypto_mod_put(alg);
+       return err;
+}
+
+struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl,
+                                        struct rtattr **tb, u32 type,
+                                        u32 mask)
+{
+       const char *name;
+       struct crypto_aead_spawn *spawn;
+       struct crypto_attr_type *algt;
+       struct crypto_instance *inst;
+       struct crypto_alg *alg;
+       int err;
+
+       algt = crypto_get_attr_type(tb);
+       err = PTR_ERR(algt);
+       if (IS_ERR(algt))
+               return ERR_PTR(err);
+
+       if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) &
+           algt->mask)
+               return ERR_PTR(-EINVAL);
+
+       name = crypto_attr_alg_name(tb[1]);
+       err = PTR_ERR(name);
+       if (IS_ERR(name))
+               return ERR_PTR(err);
+
+       inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+       if (!inst)
+               return ERR_PTR(-ENOMEM);
+
+       spawn = crypto_instance_ctx(inst);
+
+       /* Ignore async algorithms if necessary. */
+       mask |= crypto_requires_sync(algt->type, algt->mask);
+
+       crypto_set_aead_spawn(spawn, inst);
+       err = crypto_grab_nivaead(spawn, name, type, mask);
+       if (err)
+               goto err_free_inst;
+
+       alg = crypto_aead_spawn_alg(spawn);
+
+       err = -EINVAL;
+       if (!alg->cra_aead.ivsize)
+               goto err_drop_alg;
+
+       /*
+        * This is only true if we're constructing an algorithm with its
+        * default IV generator.  For the default generator we elide the
+        * template name and double-check the IV generator.
+        */
+       if (algt->mask & CRYPTO_ALG_GENIV) {
+               if (strcmp(tmpl->name, alg->cra_aead.geniv))
+                       goto err_drop_alg;
+
+               memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
+               memcpy(inst->alg.cra_driver_name, alg->cra_driver_name,
+                      CRYPTO_MAX_ALG_NAME);
+       } else {
+               err = -ENAMETOOLONG;
+               if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
+                            "%s(%s)", tmpl->name, alg->cra_name) >=
+                   CRYPTO_MAX_ALG_NAME)
+                       goto err_drop_alg;
+               if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+                            "%s(%s)", tmpl->name, alg->cra_driver_name) >=
+                   CRYPTO_MAX_ALG_NAME)
+                       goto err_drop_alg;
+       }
+
+       inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV;
+       inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
+       inst->alg.cra_priority = alg->cra_priority;
+       inst->alg.cra_blocksize = alg->cra_blocksize;
+       inst->alg.cra_alignmask = alg->cra_alignmask;
+       inst->alg.cra_type = &crypto_aead_type;
+
+       inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize;
+       inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
+       inst->alg.cra_aead.geniv = alg->cra_aead.geniv;
+
+       inst->alg.cra_aead.setkey = alg->cra_aead.setkey;
+       inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize;
+       inst->alg.cra_aead.encrypt = alg->cra_aead.encrypt;
+       inst->alg.cra_aead.decrypt = alg->cra_aead.decrypt;
+
+out:
+       return inst;
+
+err_drop_alg:
+       crypto_drop_aead(spawn);
+err_free_inst:
+       kfree(inst);
+       inst = ERR_PTR(err);
+       goto out;
+}
+EXPORT_SYMBOL_GPL(aead_geniv_alloc);
+
+void aead_geniv_free(struct crypto_instance *inst)
+{
+       crypto_drop_aead(crypto_instance_ctx(inst));
+       kfree(inst);
+}
+EXPORT_SYMBOL_GPL(aead_geniv_free);
+
+int aead_geniv_init(struct crypto_tfm *tfm)
+{
+       struct crypto_instance *inst = (void *)tfm->__crt_alg;
+       struct crypto_aead *aead;
+
+       aead = crypto_spawn_aead(crypto_instance_ctx(inst));
+       if (IS_ERR(aead))
+               return PTR_ERR(aead);
+
+       tfm->crt_aead.base = aead;
+       tfm->crt_aead.reqsize += crypto_aead_reqsize(aead);
+
+       return 0;
+}
+EXPORT_SYMBOL_GPL(aead_geniv_init);
+
+void aead_geniv_exit(struct crypto_tfm *tfm)
+{
+       crypto_free_aead(tfm->crt_aead.base);
+}
+EXPORT_SYMBOL_GPL(aead_geniv_exit);
+
+static int crypto_nivaead_default(struct crypto_alg *alg, u32 type, u32 mask)
+{
+       struct rtattr *tb[3];
+       struct {
+               struct rtattr attr;
+               struct crypto_attr_type data;
+       } ptype;
+       struct {
+               struct rtattr attr;
+               struct crypto_attr_alg data;
+       } palg;
+       struct crypto_template *tmpl;
+       struct crypto_instance *inst;
+       struct crypto_alg *larval;
+       const char *geniv;
+       int err;
+
+       larval = crypto_larval_lookup(alg->cra_driver_name,
+                                     CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV,
+                                     CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
+       err = PTR_ERR(larval);
+       if (IS_ERR(larval))
+               goto out;
+
+       err = -EAGAIN;
+       if (!crypto_is_larval(larval))
+               goto drop_larval;
+
+       ptype.attr.rta_len = sizeof(ptype);
+       ptype.attr.rta_type = CRYPTOA_TYPE;
+       ptype.data.type = type | CRYPTO_ALG_GENIV;
+       /* GENIV tells the template that we're making a default geniv. */
+       ptype.data.mask = mask | CRYPTO_ALG_GENIV;
+       tb[0] = &ptype.attr;
+
+       palg.attr.rta_len = sizeof(palg);
+       palg.attr.rta_type = CRYPTOA_ALG;
+       /* Must use the exact name to locate ourselves. */
+       memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME);
+       tb[1] = &palg.attr;
+
+       tb[2] = NULL;
+
+       geniv = alg->cra_aead.geniv;
+
+       tmpl = crypto_lookup_template(geniv);
+       err = -ENOENT;
+       if (!tmpl)
+               goto kill_larval;
+
+       inst = tmpl->alloc(tb);
+       err = PTR_ERR(inst);
+       if (IS_ERR(inst))
+               goto put_tmpl;
+
+       if ((err = crypto_register_instance(tmpl, inst))) {
+               tmpl->free(inst);
+               goto put_tmpl;
+       }
+
+       /* Redo the lookup to use the instance we just registered. */
+       err = -EAGAIN;
+
+put_tmpl:
+       crypto_tmpl_put(tmpl);
+kill_larval:
+       crypto_larval_kill(larval);
+drop_larval:
+       crypto_mod_put(larval);
+out:
+       crypto_mod_put(alg);
+       return err;
+}
+
+static struct crypto_alg *crypto_lookup_aead(const char *name, u32 type,
+                                            u32 mask)
+{
+       struct crypto_alg *alg;
+
+       alg = crypto_alg_mod_lookup(name, type, mask);
+       if (IS_ERR(alg))
+               return alg;
+
+       if (alg->cra_type == &crypto_aead_type)
+               return alg;
+
+       if (!alg->cra_aead.ivsize)
+               return alg;
+
+       return ERR_PTR(crypto_nivaead_default(alg, type, mask));
+}
+
+int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
+                    u32 type, u32 mask)
+{
+       struct crypto_alg *alg;
+       int err;
+
+       type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
+       type |= CRYPTO_ALG_TYPE_AEAD;
+       mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
+       mask |= CRYPTO_ALG_TYPE_MASK;
+
+       alg = crypto_lookup_aead(name, type, mask);
+       if (IS_ERR(alg))
+               return PTR_ERR(alg);
+
+       err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask);
+       crypto_mod_put(alg);
+       return err;
+}
+EXPORT_SYMBOL_GPL(crypto_grab_aead);
+
+struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask)
+{
+       struct crypto_tfm *tfm;
+       int err;
+
+       type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
+       type |= CRYPTO_ALG_TYPE_AEAD;
+       mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
+       mask |= CRYPTO_ALG_TYPE_MASK;
+
+       for (;;) {
+               struct crypto_alg *alg;
+
+               alg = crypto_lookup_aead(alg_name, type, mask);
+               if (IS_ERR(alg)) {
+                       err = PTR_ERR(alg);
+                       goto err;
+               }
+
+               tfm = __crypto_alloc_tfm(alg, type, mask);
+               if (!IS_ERR(tfm))
+                       return __crypto_aead_cast(tfm);
+
+               crypto_mod_put(alg);
+               err = PTR_ERR(tfm);
+
+err:
+               if (err != -EAGAIN)
+                       break;
+               if (signal_pending(current)) {
+                       err = -EINTR;
+                       break;
+               }
+       }
+
+       return ERR_PTR(err);
+}
+EXPORT_SYMBOL_GPL(crypto_alloc_aead);
+
 MODULE_LICENSE("GPL");
 MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");
index 9401dca..cf30af7 100644 (file)
  * ---------------------------------------------------------------------------
  */
 
-/* Some changes from the Gladman version:
-    s/RIJNDAEL(e_key)/E_KEY/g
-    s/RIJNDAEL(d_key)/D_KEY/g
-*/
-
+#include <crypto/aes.h>
 #include <linux/module.h>
 #include <linux/init.h>
 #include <linux/types.h>
 #include <linux/crypto.h>
 #include <asm/byteorder.h>
 
-#define AES_MIN_KEY_SIZE       16
-#define AES_MAX_KEY_SIZE       32
-
-#define AES_BLOCK_SIZE         16
-
-/*
- * #define byte(x, nr) ((unsigned char)((x) >> (nr*8))) 
- */
-static inline u8
-byte(const u32 x, const unsigned n)
+static inline u8 byte(const u32 x, const unsigned n)
 {
        return x >> (n << 3);
 }
 
-struct aes_ctx {
-       int key_length;
-       u32 buf[120];
-};
-
-#define E_KEY (&ctx->buf[0])
-#define D_KEY (&ctx->buf[60])
-
 static u8 pow_tab[256] __initdata;
 static u8 log_tab[256] __initdata;
 static u8 sbx_tab[256] __initdata;
 static u8 isb_tab[256] __initdata;
 static u32 rco_tab[10];
-static u32 ft_tab[4][256];
-static u32 it_tab[4][256];
 
-static u32 fl_tab[4][256];
-static u32 il_tab[4][256];
+u32 crypto_ft_tab[4][256];
+u32 crypto_fl_tab[4][256];
+u32 crypto_it_tab[4][256];
+u32 crypto_il_tab[4][256];
 
-static inline u8 __init
-f_mult (u8 a, u8 b)
+EXPORT_SYMBOL_GPL(crypto_ft_tab);
+EXPORT_SYMBOL_GPL(crypto_fl_tab);
+EXPORT_SYMBOL_GPL(crypto_it_tab);
+EXPORT_SYMBOL_GPL(crypto_il_tab);
+
+static inline u8 __init f_mult(u8 a, u8 b)
 {
        u8 aa = log_tab[a], cc = aa + log_tab[b];
 
        return pow_tab[cc + (cc < aa ? 1 : 0)];
 }
 
-#define ff_mult(a,b)    (a && b ? f_mult(a, b) : 0)
-
-#define f_rn(bo, bi, n, k)                                     \
-    bo[n] =  ft_tab[0][byte(bi[n],0)] ^                                \
-             ft_tab[1][byte(bi[(n + 1) & 3],1)] ^              \
-             ft_tab[2][byte(bi[(n + 2) & 3],2)] ^              \
-             ft_tab[3][byte(bi[(n + 3) & 3],3)] ^ *(k + n)
-
-#define i_rn(bo, bi, n, k)                                     \
-    bo[n] =  it_tab[0][byte(bi[n],0)] ^                                \
-             it_tab[1][byte(bi[(n + 3) & 3],1)] ^              \
-             it_tab[2][byte(bi[(n + 2) & 3],2)] ^              \
-             it_tab[3][byte(bi[(n + 1) & 3],3)] ^ *(k + n)
-
-#define ls_box(x)                              \
-    ( fl_tab[0][byte(x, 0)] ^                  \
-      fl_tab[1][byte(x, 1)] ^                  \
-      fl_tab[2][byte(x, 2)] ^                  \
-      fl_tab[3][byte(x, 3)] )
-
-#define f_rl(bo, bi, n, k)                                     \
-    bo[n] =  fl_tab[0][byte(bi[n],0)] ^                                \
-             fl_tab[1][byte(bi[(n + 1) & 3],1)] ^              \
-             fl_tab[2][byte(bi[(n + 2) & 3],2)] ^              \
-             fl_tab[3][byte(bi[(n + 3) & 3],3)] ^ *(k + n)
-
-#define i_rl(bo, bi, n, k)                                     \
-    bo[n] =  il_tab[0][byte(bi[n],0)] ^                                \
-             il_tab[1][byte(bi[(n + 3) & 3],1)] ^              \
-             il_tab[2][byte(bi[(n + 2) & 3],2)] ^              \
-             il_tab[3][byte(bi[(n + 1) & 3],3)] ^ *(k + n)
-
-static void __init
-gen_tabs (void)
+#define ff_mult(a, b)  (a && b ? f_mult(a, b) : 0)
+
+static void __init gen_tabs(void)
 {
        u32 i, t;
        u8 p, q;
 
-       /* log and power tables for GF(2**8) finite field with
-          0x011b as modular polynomial - the simplest primitive
-          root is 0x03, used here to generate the tables */
+       /*
+        * log and power tables for GF(2**8) finite field with
+        * 0x011b as modular polynomial - the simplest primitive
+        * root is 0x03, used here to generate the tables
+        */
 
        for (i = 0, p = 1; i < 256; ++i) {
                pow_tab[i] = (u8) p;
@@ -169,92 +123,119 @@ gen_tabs (void)
                p = sbx_tab[i];
 
                t = p;
-               fl_tab[0][i] = t;
-               fl_tab[1][i] = rol32(t, 8);
-               fl_tab[2][i] = rol32(t, 16);
-               fl_tab[3][i] = rol32(t, 24);
+               crypto_fl_tab[0][i] = t;
+               crypto_fl_tab[1][i] = rol32(t, 8);
+               crypto_fl_tab[2][i] = rol32(t, 16);
+               crypto_fl_tab[3][i] = rol32(t, 24);
 
-               t = ((u32) ff_mult (2, p)) |
+               t = ((u32) ff_mult(2, p)) |
                    ((u32) p << 8) |
-                   ((u32) p << 16) | ((u32) ff_mult (3, p) << 24);
+                   ((u32) p << 16) | ((u32) ff_mult(3, p) << 24);
 
-               ft_tab[0][i] = t;
-               ft_tab[1][i] = rol32(t, 8);
-               ft_tab[2][i] = rol32(t, 16);
-               ft_tab[3][i] = rol32(t, 24);
+               crypto_ft_tab[0][i] = t;
+               crypto_ft_tab[1][i] = rol32(t, 8);
+               crypto_ft_tab[2][i] = rol32(t, 16);
+               crypto_ft_tab[3][i] = rol32(t, 24);
 
                p = isb_tab[i];
 
                t = p;
-               il_tab[0][i] = t;
-               il_tab[1][i] = rol32(t, 8);
-               il_tab[2][i] = rol32(t, 16);
-               il_tab[3][i] = rol32(t, 24);
-
-               t = ((u32) ff_mult (14, p)) |
-                   ((u32) ff_mult (9, p) << 8) |
-                   ((u32) ff_mult (13, p) << 16) |
-                   ((u32) ff_mult (11, p) << 24);
-
-               it_tab[0][i] = t;
-               it_tab[1][i] = rol32(t, 8);
-               it_tab[2][i] = rol32(t, 16);
-               it_tab[3][i] = rol32(t, 24);
+               crypto_il_tab[0][i] = t;
+               crypto_il_tab[1][i] = rol32(t, 8);
+               crypto_il_tab[2][i] = rol32(t, 16);
+               crypto_il_tab[3][i] = rol32(t, 24);
+
+               t = ((u32) ff_mult(14, p)) |
+                   ((u32) ff_mult(9, p) << 8) |
+                   ((u32) ff_mult(13, p) << 16) |
+                   ((u32) ff_mult(11, p) << 24);
+
+               crypto_it_tab[0][i] = t;
+               crypto_it_tab[1][i] = rol32(t, 8);
+               crypto_it_tab[2][i] = rol32(t, 16);
+               crypto_it_tab[3][i] = rol32(t, 24);
        }
 }
 
-#define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b)
-
-#define imix_col(y,x)       \
-    u   = star_x(x);        \
-    v   = star_x(u);        \
-    w   = star_x(v);        \
-    t   = w ^ (x);          \
-   (y)  = u ^ v ^ w;        \
-   (y) ^= ror32(u ^ t,  8) ^ \
-          ror32(v ^ t, 16) ^ \
-          ror32(t,24)
-
 /* initialise the key schedule from the user supplied key */
 
-#define loop4(i)                                    \
-{   t = ror32(t,  8); t = ls_box(t) ^ rco_tab[i];    \
-    t ^= E_KEY[4 * i];     E_KEY[4 * i + 4] = t;    \
-    t ^= E_KEY[4 * i + 1]; E_KEY[4 * i + 5] = t;    \
-    t ^= E_KEY[4 * i + 2]; E_KEY[4 * i + 6] = t;    \
-    t ^= E_KEY[4 * i + 3]; E_KEY[4 * i + 7] = t;    \
-}
-
-#define loop6(i)                                    \
-{   t = ror32(t,  8); t = ls_box(t) ^ rco_tab[i];    \
-    t ^= E_KEY[6 * i];     E_KEY[6 * i + 6] = t;    \
-    t ^= E_KEY[6 * i + 1]; E_KEY[6 * i + 7] = t;    \
-    t ^= E_KEY[6 * i + 2]; E_KEY[6 * i + 8] = t;    \
-    t ^= E_KEY[6 * i + 3]; E_KEY[6 * i + 9] = t;    \
-    t ^= E_KEY[6 * i + 4]; E_KEY[6 * i + 10] = t;   \
-    t ^= E_KEY[6 * i + 5]; E_KEY[6 * i + 11] = t;   \
-}
-
-#define loop8(i)                                    \
-{   t = ror32(t,  8); ; t = ls_box(t) ^ rco_tab[i];  \
-    t ^= E_KEY[8 * i];     E_KEY[8 * i + 8] = t;    \
-    t ^= E_KEY[8 * i + 1]; E_KEY[8 * i + 9] = t;    \
-    t ^= E_KEY[8 * i + 2]; E_KEY[8 * i + 10] = t;   \
-    t ^= E_KEY[8 * i + 3]; E_KEY[8 * i + 11] = t;   \
-    t  = E_KEY[8 * i + 4] ^ ls_box(t);    \
-    E_KEY[8 * i + 12] = t;                \
-    t ^= E_KEY[8 * i + 5]; E_KEY[8 * i + 13] = t;   \
-    t ^= E_KEY[8 * i + 6]; E_KEY[8 * i + 14] = t;   \
-    t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t;   \
-}
+#define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b)
 
-static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
-                      unsigned int key_len)
+#define imix_col(y,x)  do {            \
+       u       = star_x(x);            \
+       v       = star_x(u);            \
+       w       = star_x(v);            \
+       t       = w ^ (x);              \
+       (y)     = u ^ v ^ w;            \
+       (y)     ^= ror32(u ^ t, 8) ^    \
+               ror32(v ^ t, 16) ^      \
+               ror32(t, 24);           \
+} while (0)
+
+#define ls_box(x)              \
+       crypto_fl_tab[0][byte(x, 0)] ^  \
+       crypto_fl_tab[1][byte(x, 1)] ^  \
+       crypto_fl_tab[2][byte(x, 2)] ^  \
+       crypto_fl_tab[3][byte(x, 3)]
+
+#define loop4(i)       do {            \
+       t = ror32(t, 8);                \
+       t = ls_box(t) ^ rco_tab[i];     \
+       t ^= ctx->key_enc[4 * i];               \
+       ctx->key_enc[4 * i + 4] = t;            \
+       t ^= ctx->key_enc[4 * i + 1];           \
+       ctx->key_enc[4 * i + 5] = t;            \
+       t ^= ctx->key_enc[4 * i + 2];           \
+       ctx->key_enc[4 * i + 6] = t;            \
+       t ^= ctx->key_enc[4 * i + 3];           \
+       ctx->key_enc[4 * i + 7] = t;            \
+} while (0)
+
+#define loop6(i)       do {            \
+       t = ror32(t, 8);                \
+       t = ls_box(t) ^ rco_tab[i];     \
+       t ^= ctx->key_enc[6 * i];               \
+       ctx->key_enc[6 * i + 6] = t;            \
+       t ^= ctx->key_enc[6 * i + 1];           \
+       ctx->key_enc[6 * i + 7] = t;            \
+       t ^= ctx->key_enc[6 * i + 2];           \
+       ctx->key_enc[6 * i + 8] = t;            \
+       t ^= ctx->key_enc[6 * i + 3];           \
+       ctx->key_enc[6 * i + 9] = t;            \
+       t ^= ctx->key_enc[6 * i + 4];           \
+       ctx->key_enc[6 * i + 10] = t;           \
+       t ^= ctx->key_enc[6 * i + 5];           \
+       ctx->key_enc[6 * i + 11] = t;           \
+} while (0)
+
+#define loop8(i)       do {                    \
+       t = ror32(t, 8);                        \
+       t = ls_box(t) ^ rco_tab[i];             \
+       t ^= ctx->key_enc[8 * i];                       \
+       ctx->key_enc[8 * i + 8] = t;                    \
+       t ^= ctx->key_enc[8 * i + 1];                   \
+       ctx->key_enc[8 * i + 9] = t;                    \
+       t ^= ctx->key_enc[8 * i + 2];                   \
+       ctx->key_enc[8 * i + 10] = t;                   \
+       t ^= ctx->key_enc[8 * i + 3];                   \
+       ctx->key_enc[8 * i + 11] = t;                   \
+       t  = ctx->key_enc[8 * i + 4] ^ ls_box(t);       \
+       ctx->key_enc[8 * i + 12] = t;                   \
+       t ^= ctx->key_enc[8 * i + 5];                   \
+       ctx->key_enc[8 * i + 13] = t;                   \
+       t ^= ctx->key_enc[8 * i + 6];                   \
+       ctx->key_enc[8 * i + 14] = t;                   \
+       t ^= ctx->key_enc[8 * i + 7];                   \
+       ctx->key_enc[8 * i + 15] = t;                   \
+} while (0)
+
+int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
+               unsigned int key_len)
 {
-       struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
+       struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
        const __le32 *key = (const __le32 *)in_key;
        u32 *flags = &tfm->crt_flags;
-       u32 i, t, u, v, w;
+       u32 i, t, u, v, w, j;
 
        if (key_len % 8) {
                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
@@ -263,95 +244,113 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 
        ctx->key_length = key_len;
 
-       E_KEY[0] = le32_to_cpu(key[0]);
-       E_KEY[1] = le32_to_cpu(key[1]);
-       E_KEY[2] = le32_to_cpu(key[2]);
-       E_KEY[3] = le32_to_cpu(key[3]);
+       ctx->key_dec[key_len + 24] = ctx->key_enc[0] = le32_to_cpu(key[0]);
+       ctx->key_dec[key_len + 25] = ctx->key_enc[1] = le32_to_cpu(key[1]);
+       ctx->key_dec[key_len + 26] = ctx->key_enc[2] = le32_to_cpu(key[2]);
+       ctx->key_dec[key_len + 27] = ctx->key_enc[3] = le32_to_cpu(key[3]);
 
        switch (key_len) {
        case 16:
-               t = E_KEY[3];
+               t = ctx->key_enc[3];
                for (i = 0; i < 10; ++i)
-                       loop4 (i);
+                       loop4(i);
                break;
 
        case 24:
-               E_KEY[4] = le32_to_cpu(key[4]);
-               t = E_KEY[5] = le32_to_cpu(key[5]);
+               ctx->key_enc[4] = le32_to_cpu(key[4]);
+               t = ctx->key_enc[5] = le32_to_cpu(key[5]);
                for (i = 0; i < 8; ++i)
-                       loop6 (i);
+                       loop6(i);
                break;
 
        case 32:
-               E_KEY[4] = le32_to_cpu(key[4]);
-               E_KEY[5] = le32_to_cpu(key[5]);
-               E_KEY[6] = le32_to_cpu(key[6]);
-               t = E_KEY[7] = le32_to_cpu(key[7]);
+               ctx->key_enc[4] = le32_to_cpu(key[4]);
+               ctx->key_enc[5] = le32_to_cpu(key[5]);
+               ctx->key_enc[6] = le32_to_cpu(key[6]);
+               t = ctx->key_enc[7] = le32_to_cpu(key[7]);
                for (i = 0; i < 7; ++i)
-                       loop8 (i);
+                       loop8(i);
                break;
        }
 
-       D_KEY[0] = E_KEY[0];
-       D_KEY[1] = E_KEY[1];
-       D_KEY[2] = E_KEY[2];
-       D_KEY[3] = E_KEY[3];
+       ctx->key_dec[0] = ctx->key_enc[key_len + 24];
+       ctx->key_dec[1] = ctx->key_enc[key_len + 25];
+       ctx->key_dec[2] = ctx->key_enc[key_len + 26];
+       ctx->key_dec[3] = ctx->key_enc[key_len + 27];
 
        for (i = 4; i < key_len + 24; ++i) {
-               imix_col (D_KEY[i], E_KEY[i]);
+               j = key_len + 24 - (i & ~3) + (i & 3);
+               imix_col(ctx->key_dec[j], ctx->key_enc[i]);
        }
-
        return 0;
 }
+EXPORT_SYMBOL_GPL(crypto_aes_set_key);
 
 /* encrypt a block of text */
 
-#define f_nround(bo, bi, k) \
-    f_rn(bo, bi, 0, k);     \
-    f_rn(bo, bi, 1, k);     \
-    f_rn(bo, bi, 2, k);     \
-    f_rn(bo, bi, 3, k);     \
-    k += 4
-
-#define f_lround(bo, bi, k) \
-    f_rl(bo, bi, 0, k);     \
-    f_rl(bo, bi, 1, k);     \
-    f_rl(bo, bi, 2, k);     \
-    f_rl(bo, bi, 3, k)
+#define f_rn(bo, bi, n, k)     do {                            \
+       bo[n] = crypto_ft_tab[0][byte(bi[n], 0)] ^                      \
+               crypto_ft_tab[1][byte(bi[(n + 1) & 3], 1)] ^            \
+               crypto_ft_tab[2][byte(bi[(n + 2) & 3], 2)] ^            \
+               crypto_ft_tab[3][byte(bi[(n + 3) & 3], 3)] ^ *(k + n);  \
+} while (0)
+
+#define f_nround(bo, bi, k)    do {\
+       f_rn(bo, bi, 0, k);     \
+       f_rn(bo, bi, 1, k);     \
+       f_rn(bo, bi, 2, k);     \
+       f_rn(bo, bi, 3, k);     \
+       k += 4;                 \
+} while (0)
+
+#define f_rl(bo, bi, n, k)     do {                            \
+       bo[n] = crypto_fl_tab[0][byte(bi[n], 0)] ^                      \
+               crypto_fl_tab[1][byte(bi[(n + 1) & 3], 1)] ^            \
+               crypto_fl_tab[2][byte(bi[(n + 2) & 3], 2)] ^            \
+               crypto_fl_tab[3][byte(bi[(n + 3) & 3], 3)] ^ *(k + n);  \
+} while (0)
+
+#define f_lround(bo, bi, k)    do {\
+       f_rl(bo, bi, 0, k);     \
+       f_rl(bo, bi, 1, k);     \
+       f_rl(bo, bi, 2, k);     \
+       f_rl(bo, bi, 3, k);     \
+} while (0)
 
 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {
-       const struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
+       const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
        const __le32 *src = (const __le32 *)in;
        __le32 *dst = (__le32 *)out;
        u32 b0[4], b1[4];
-       const u32 *kp = E_KEY + 4;
+       const u32 *kp = ctx->key_enc + 4;
+       const int key_len = ctx->key_length;
 
-       b0[0] = le32_to_cpu(src[0]) ^ E_KEY[0];
-       b0[1] = le32_to_cpu(src[1]) ^ E_KEY[1];
-       b0[2] = le32_to_cpu(src[2]) ^ E_KEY[2];
-       b0[3] = le32_to_cpu(src[3]) ^ E_KEY[3];
+       b0[0] = le32_to_cpu(src[0]) ^ ctx->key_enc[0];
+       b0[1] = le32_to_cpu(src[1]) ^ ctx->key_enc[1];
+       b0[2] = le32_to_cpu(src[2]) ^ ctx->key_enc[2];
+       b0[3] = le32_to_cpu(src[3]) ^ ctx->key_enc[3];
 
-       if (ctx->key_length > 24) {
-               f_nround (b1, b0, kp);
-               f_nround (b0, b1, kp);
+       if (key_len > 24) {
+               f_nround(b1, b0, kp);
+               f_nround(b0, b1, kp);
        }
 
-       if (ctx->key_length > 16) {
-               f_nround (b1, b0, kp);
-               f_nround (b0, b1, kp);
+       if (key_len > 16) {
+               f_nround(b1, b0, kp);
+               f_nround(b0, b1, kp);
        }
 
-       f_nround (b1, b0, kp);
-       f_nround (b0, b1, kp);
-       f_nround (b1, b0, kp);
-       f_nround (b0, b1, kp);
-       f_nround (b1, b0, kp);
-       f_nround (b0, b1, kp);
-       f_nround (b1, b0, kp);
-       f_nround (b0, b1, kp);
-       f_nround (b1, b0, kp);
-       f_lround (b0, b1, kp);
+       f_nround(b1, b0, kp);
+       f_nround(b0, b1, kp);
+       f_nround(b1, b0, kp);
+       f_nround(b0, b1, kp);
+       f_nround(b1, b0, kp);
+       f_nround(b0, b1, kp);
+       f_nround(b1, b0, kp);
+       f_nround(b0, b1, kp);
+       f_nround(b1, b0, kp);
+       f_lround(b0, b1, kp);
 
        dst[0] = cpu_to_le32(b0[0]);
        dst[1] = cpu_to_le32(b0[1]);
@@ -361,53 +360,69 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 
 /* decrypt a block of text */
 
-#define i_nround(bo, bi, k) \
-    i_rn(bo, bi, 0, k);     \
-    i_rn(bo, bi, 1, k);     \
-    i_rn(bo, bi, 2, k);     \
-    i_rn(bo, bi, 3, k);     \
-    k -= 4
-
-#define i_lround(bo, bi, k) \
-    i_rl(bo, bi, 0, k);     \
-    i_rl(bo, bi, 1, k);     \
-    i_rl(bo, bi, 2, k);     \
-    i_rl(bo, bi, 3, k)
+#define i_rn(bo, bi, n, k)     do {                            \
+       bo[n] = crypto_it_tab[0][byte(bi[n], 0)] ^                      \
+               crypto_it_tab[1][byte(bi[(n + 3) & 3], 1)] ^            \
+               crypto_it_tab[2][byte(bi[(n + 2) & 3], 2)] ^            \
+               crypto_it_tab[3][byte(bi[(n + 1) & 3], 3)] ^ *(k + n);  \
+} while (0)
+
+#define i_nround(bo, bi, k)    do {\
+       i_rn(bo, bi, 0, k);     \
+       i_rn(bo, bi, 1, k);     \
+       i_rn(bo, bi, 2, k);     \
+       i_rn(bo, bi, 3, k);     \
+       k += 4;                 \
+} while (0)
+
+#define i_rl(bo, bi, n, k)     do {                    \
+       bo[n] = crypto_il_tab[0][byte(bi[n], 0)] ^              \
+       crypto_il_tab[1][byte(bi[(n + 3) & 3], 1)] ^            \
+       crypto_il_tab[2][byte(bi[(n + 2) & 3], 2)] ^            \
+       crypto_il_tab[3][byte(bi[(n + 1) & 3], 3)] ^ *(k + n);  \
+} while (0)
+
+#define i_lround(bo, bi, k)    do {\
+       i_rl(bo, bi, 0, k);     \
+       i_rl(bo, bi, 1, k);     \
+       i_rl(bo, bi, 2, k);     \
+       i_rl(bo, bi, 3, k);     \
+} while (0)
 
 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {
-       const struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
+       const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
        const __le32 *src = (const __le32 *)in;
        __le32 *dst = (__le32 *)out;
        u32 b0[4], b1[4];
        const int key_len = ctx->key_length;
-       const u32 *kp = D_KEY + key_len + 20;
+       const u32 *kp = ctx->key_dec + 4;
 
-       b0[0] = le32_to_cpu(src[0]) ^ E_KEY[key_len + 24];
-       b0[1] = le32_to_cpu(src[1]) ^ E_KEY[key_len + 25];
-       b0[2] = le32_to_cpu(src[2]) ^ E_KEY[key_len + 26];
-       b0[3] = le32_to_cpu(src[3]) ^ E_KEY[key_len + 27];
+       b0[0] = le32_to_cpu(src[0]) ^  ctx->key_dec[0];
+       b0[1] = le32_to_cpu(src[1]) ^  ctx->key_dec[1];
+       b0[2] = le32_to_cpu(src[2]) ^  ctx->key_dec[2];
+       b0[3] = le32_to_cpu(src[3]) ^  ctx->key_dec[3];
 
        if (key_len > 24) {
-               i_nround (b1, b0, kp);
-               i_nround (b0, b1, kp);
+               i_nround(b1, b0, kp);
+               i_nround(b0, b1, kp);
        }
 
        if (key_len > 16) {
-               i_nround (b1, b0, kp);
-               i_nround (b0, b1, kp);
+               i_nround(b1, b0, kp);
+               i_nround(b0, b1, kp);
        }
 
-       i_nround (b1, b0, kp);
-       i_nround (b0, b1, kp);
-       i_nround (b1, b0, kp);
-       i_nround (b0, b1, kp);
-       i_nround (b1, b0, kp);
-       i_nround (b0, b1, kp);
-       i_nround (b1, b0, kp);
-       i_nround (b0, b1, kp);
-       i_nround (b1, b0, kp);
-       i_lround (b0, b1, kp);
+       i_nround(b1, b0, kp);
+       i_nround(b0, b1, kp);
+       i_nround(b1, b0, kp);
+       i_nround(b0, b1, kp);
+       i_nround(b1, b0, kp);
+       i_nround(b0, b1, kp);
+       i_nround(b1, b0, kp);
+       i_nround(b0, b1, kp);
+       i_nround(b1, b0, kp);
+       i_lround(b0, b1, kp);
 
        dst[0] = cpu_to_le32(b0[0]);
        dst[1] = cpu_to_le32(b0[1]);
@@ -415,14 +430,13 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
        dst[3] = cpu_to_le32(b0[3]);
 }
 
-
 static struct crypto_alg aes_alg = {
        .cra_name               =       "aes",
        .cra_driver_name        =       "aes-generic",
        .cra_priority           =       100,
        .cra_flags              =       CRYPTO_ALG_TYPE_CIPHER,
        .cra_blocksize          =       AES_BLOCK_SIZE,
-       .cra_ctxsize            =       sizeof(struct aes_ctx),
+       .cra_ctxsize            =       sizeof(struct crypto_aes_ctx),
        .cra_alignmask          =       3,
        .cra_module             =       THIS_MODULE,
        .cra_list               =       LIST_HEAD_INIT(aes_alg.cra_list),
@@ -430,9 +444,9 @@ static struct crypto_alg aes_alg = {
                .cipher = {
                        .cia_min_keysize        =       AES_MIN_KEY_SIZE,
                        .cia_max_keysize        =       AES_MAX_KEY_SIZE,
-                       .cia_setkey             =       aes_set_key,
-                       .cia_encrypt            =       aes_encrypt,
-                       .cia_decrypt            =       aes_decrypt
+                       .cia_setkey             =       crypto_aes_set_key,
+                       .cia_encrypt            =       aes_encrypt,
+                       .cia_decrypt            =       aes_decrypt
                }
        }
 };
index 8383282..e65cb50 100644 (file)
@@ -472,7 +472,7 @@ int crypto_check_attr_type(struct rtattr **tb, u32 type)
 }
 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
 
-struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask)
+const char *crypto_attr_alg_name(struct rtattr *rta)
 {
        struct crypto_attr_alg *alga;
 
@@ -486,7 +486,21 @@ struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask)
        alga = RTA_DATA(rta);
        alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
 
-       return crypto_alg_mod_lookup(alga->name, type, mask);
+       return alga->name;
+}
+EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
+
+struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask)
+{
+       const char *name;
+       int err;
+
+       name = crypto_attr_alg_name(rta);
+       err = PTR_ERR(name);
+       if (IS_ERR(name))
+               return ERR_PTR(err);
+
+       return crypto_alg_mod_lookup(name, type, mask);
 }
 EXPORT_SYMBOL_GPL(crypto_attr_alg);
 
@@ -605,6 +619,53 @@ int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm)
 }
 EXPORT_SYMBOL_GPL(crypto_tfm_in_queue);
 
+static inline void crypto_inc_byte(u8 *a, unsigned int size)
+{
+       u8 *b = (a + size);
+       u8 c;
+
+       for (; size; size--) {
+               c = *--b + 1;
+               *b = c;
+               if (c)
+                       break;
+       }
+}
+
+void crypto_inc(u8 *a, unsigned int size)
+{
+       __be32 *b = (__be32 *)(a + size);
+       u32 c;
+
+       for (; size >= 4; size -= 4) {
+               c = be32_to_cpu(*--b) + 1;
+               *b = cpu_to_be32(c);
+               if (c)
+                       return;
+       }
+
+       crypto_inc_byte(a, size);
+}
+EXPORT_SYMBOL_GPL(crypto_inc);
+
+static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size)
+{
+       for (; size; size--)
+               *a++ ^= *b++;
+}
+
+void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
+{
+       u32 *a = (u32 *)dst;
+       u32 *b = (u32 *)src;
+
+       for (; size >= 4; size -= 4)
+               *a++ ^= *b++;
+
+       crypto_xor_byte((u8 *)a, (u8 *)b, size);
+}
+EXPORT_SYMBOL_GPL(crypto_xor);
+
 static int __init crypto_algapi_init(void)
 {
        crypto_init_proc();
index 1f5c724..a2496d1 100644 (file)
@@ -137,7 +137,7 @@ static struct crypto_alg *crypto_larval_alloc(const char *name, u32 type,
        return alg;
 }
 
-static void crypto_larval_kill(struct crypto_alg *alg)
+void crypto_larval_kill(struct crypto_alg *alg)
 {
        struct crypto_larval *larval = (void *)alg;
 
@@ -147,6 +147,7 @@ static void crypto_larval_kill(struct crypto_alg *alg)
        complete_all(&larval->completion);
        crypto_alg_put(alg);
 }
+EXPORT_SYMBOL_GPL(crypto_larval_kill);
 
 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
 {
@@ -176,11 +177,9 @@ static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
        return alg;
 }
 
-struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
+struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
 {
        struct crypto_alg *alg;
-       struct crypto_alg *larval;
-       int ok;
 
        if (!name)
                return ERR_PTR(-ENOENT);
@@ -193,7 +192,17 @@ struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
        if (alg)
                return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
 
-       larval = crypto_larval_alloc(name, type, mask);
+       return crypto_larval_alloc(name, type, mask);
+}
+EXPORT_SYMBOL_GPL(crypto_larval_lookup);
+
+struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
+{
+       struct crypto_alg *alg;
+       struct crypto_alg *larval;
+       int ok;
+
+       larval = crypto_larval_lookup(name, type, mask);
        if (IS_ERR(larval) || !crypto_is_larval(larval))
                return larval;
 
index 126a529..ed8ac5a 100644 (file)
  *
  */
 
-#include <crypto/algapi.h>
+#include <crypto/aead.h>
+#include <crypto/internal/skcipher.h>
+#include <crypto/authenc.h>
+#include <crypto/scatterwalk.h>
 #include <linux/err.h>
 #include <linux/init.h>
 #include <linux/kernel.h>
 #include <linux/module.h>
+#include <linux/rtnetlink.h>
 #include <linux/slab.h>
 #include <linux/spinlock.h>
 
-#include "scatterwalk.h"
-
 struct authenc_instance_ctx {
        struct crypto_spawn auth;
-       struct crypto_spawn enc;
-
-       unsigned int authsize;
-       unsigned int enckeylen;
+       struct crypto_skcipher_spawn enc;
 };
 
 struct crypto_authenc_ctx {
@@ -37,19 +36,31 @@ struct crypto_authenc_ctx {
 static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
                                 unsigned int keylen)
 {
-       struct authenc_instance_ctx *ictx =
-               crypto_instance_ctx(crypto_aead_alg_instance(authenc));
-       unsigned int enckeylen = ictx->enckeylen;
        unsigned int authkeylen;
+       unsigned int enckeylen;
        struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
        struct crypto_hash *auth = ctx->auth;
        struct crypto_ablkcipher *enc = ctx->enc;
+       struct rtattr *rta = (void *)key;
+       struct crypto_authenc_key_param *param;
        int err = -EINVAL;
 
-       if (keylen < enckeylen) {
-               crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
-               goto out;
-       }
+       if (!RTA_OK(rta, keylen))
+               goto badkey;
+       if (rta->rta_type != CRYPTO_AUTHENC_KEYA_PARAM)
+               goto badkey;
+       if (RTA_PAYLOAD(rta) < sizeof(*param))
+               goto badkey;
+
+       param = RTA_DATA(rta);
+       enckeylen = be32_to_cpu(param->enckeylen);
+
+       key += RTA_ALIGN(rta->rta_len);
+       keylen -= RTA_ALIGN(rta->rta_len);
+
+       if (keylen < enckeylen)
+               goto badkey;
+
        authkeylen = keylen - enckeylen;
 
        crypto_hash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
@@ -71,21 +82,38 @@ static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
 
 out:
        return err;
+
+badkey:
+       crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
+       goto out;
 }
 
-static int crypto_authenc_hash(struct aead_request *req)
+static void authenc_chain(struct scatterlist *head, struct scatterlist *sg,
+                         int chain)
+{
+       if (chain) {
+               head->length += sg->length;
+               sg = scatterwalk_sg_next(sg);
+       }
+
+       if (sg)
+               scatterwalk_sg_chain(head, 2, sg);
+       else
+               sg_mark_end(head);
+}
+
+static u8 *crypto_authenc_hash(struct aead_request *req, unsigned int flags,
+                              struct scatterlist *cipher,
+                              unsigned int cryptlen)
 {
        struct crypto_aead *authenc = crypto_aead_reqtfm(req);
-       struct authenc_instance_ctx *ictx =
-               crypto_instance_ctx(crypto_aead_alg_instance(authenc));
        struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
        struct crypto_hash *auth = ctx->auth;
        struct hash_desc desc = {
                .tfm = auth,
+               .flags = aead_request_flags(req) & flags,
        };
        u8 *hash = aead_request_ctx(req);
-       struct scatterlist *dst = req->dst;
-       unsigned int cryptlen = req->cryptlen;
        int err;
 
        hash = (u8 *)ALIGN((unsigned long)hash + crypto_hash_alignmask(auth), 
@@ -100,7 +128,7 @@ static int crypto_authenc_hash(struct aead_request *req)
        if (err)
                goto auth_unlock;
 
-       err = crypto_hash_update(&desc, dst, cryptlen);
+       err = crypto_hash_update(&desc, cipher, cryptlen);
        if (err)
                goto auth_unlock;
 
@@ -109,17 +137,53 @@ auth_unlock:
        spin_unlock_bh(&ctx->auth_lock);
 
        if (err)
-               return err;
+               return ERR_PTR(err);
+
+       return hash;
+}
 
-       scatterwalk_map_and_copy(hash, dst, cryptlen, ictx->authsize, 1);
+static int crypto_authenc_genicv(struct aead_request *req, u8 *iv,
+                                unsigned int flags)
+{
+       struct crypto_aead *authenc = crypto_aead_reqtfm(req);
+       struct scatterlist *dst = req->dst;
+       struct scatterlist cipher[2];
+       struct page *dstp;
+       unsigned int ivsize = crypto_aead_ivsize(authenc);
+       unsigned int cryptlen;
+       u8 *vdst;
+       u8 *hash;
+
+       dstp = sg_page(dst);
+       vdst = PageHighMem(dstp) ? NULL : page_address(dstp) + dst->offset;
+
+       sg_init_table(cipher, 2);
+       sg_set_buf(cipher, iv, ivsize);
+       authenc_chain(cipher, dst, vdst == iv + ivsize);
+
+       cryptlen = req->cryptlen + ivsize;
+       hash = crypto_authenc_hash(req, flags, cipher, cryptlen);
+       if (IS_ERR(hash))
+               return PTR_ERR(hash);
+
+       scatterwalk_map_and_copy(hash, cipher, cryptlen,
+                                crypto_aead_authsize(authenc), 1);
        return 0;
 }
 
 static void crypto_authenc_encrypt_done(struct crypto_async_request *req,
                                        int err)
 {
-       if (!err)
-               err = crypto_authenc_hash(req->data);
+       if (!err) {
+               struct aead_request *areq = req->data;
+               struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
+               struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
+               struct ablkcipher_request *abreq = aead_request_ctx(areq);
+               u8 *iv = (u8 *)(abreq + 1) +
+                        crypto_ablkcipher_reqsize(ctx->enc);
+
+               err = crypto_authenc_genicv(areq, iv, 0);
+       }
 
        aead_request_complete(req->data, err);
 }
@@ -129,72 +193,99 @@ static int crypto_authenc_encrypt(struct aead_request *req)
        struct crypto_aead *authenc = crypto_aead_reqtfm(req);
        struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
        struct ablkcipher_request *abreq = aead_request_ctx(req);
+       struct crypto_ablkcipher *enc = ctx->enc;
+       struct scatterlist *dst = req->dst;
+       unsigned int cryptlen = req->cryptlen;
+       u8 *iv = (u8 *)(abreq + 1) + crypto_ablkcipher_reqsize(enc);
        int err;
 
-       ablkcipher_request_set_tfm(abreq, ctx->enc);
+       ablkcipher_request_set_tfm(abreq, enc);
        ablkcipher_request_set_callback(abreq, aead_request_flags(req),
                                        crypto_authenc_encrypt_done, req);
-       ablkcipher_request_set_crypt(abreq, req->src, req->dst, req->cryptlen,
-                                    req->iv);
+       ablkcipher_request_set_crypt(abreq, req->src, dst, cryptlen, req->iv);
+
+       memcpy(iv, req->iv, crypto_aead_ivsize(authenc));
 
        err = crypto_ablkcipher_encrypt(abreq);
        if (err)
                return err;
 
-       return crypto_authenc_hash(req);
+       return crypto_authenc_genicv(req, iv, CRYPTO_TFM_REQ_MAY_SLEEP);
 }
 
-static int crypto_authenc_verify(struct aead_request *req)
+static void crypto_authenc_givencrypt_done(struct crypto_async_request *req,
+                                          int err)
 {
-       struct crypto_aead *authenc = crypto_aead_reqtfm(req);
-       struct authenc_instance_ctx *ictx =
-               crypto_instance_ctx(crypto_aead_alg_instance(authenc));
+       if (!err) {
+               struct aead_givcrypt_request *greq = req->data;
+
+               err = crypto_authenc_genicv(&greq->areq, greq->giv, 0);
+       }
+
+       aead_request_complete(req->data, err);
+}
+
+static int crypto_authenc_givencrypt(struct aead_givcrypt_request *req)
+{
+       struct crypto_aead *authenc = aead_givcrypt_reqtfm(req);
        struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
-       struct crypto_hash *auth = ctx->auth;
-       struct hash_desc desc = {
-               .tfm = auth,
-               .flags = aead_request_flags(req),
-       };
-       u8 *ohash = aead_request_ctx(req);
-       u8 *ihash;
-       struct scatterlist *src = req->src;
-       unsigned int cryptlen = req->cryptlen;
-       unsigned int authsize;
+       struct aead_request *areq = &req->areq;
+       struct skcipher_givcrypt_request *greq = aead_request_ctx(areq);
+       u8 *iv = req->giv;
        int err;
 
-       ohash = (u8 *)ALIGN((unsigned long)ohash + crypto_hash_alignmask(auth), 
-                           crypto_hash_alignmask(auth) + 1);
-       ihash = ohash + crypto_hash_digestsize(auth);
-
-       spin_lock_bh(&ctx->auth_lock);
-       err = crypto_hash_init(&desc);
-       if (err)
-               goto auth_unlock;
+       skcipher_givcrypt_set_tfm(greq, ctx->enc);
+       skcipher_givcrypt_set_callback(greq, aead_request_flags(areq),
+                                      crypto_authenc_givencrypt_done, areq);
+       skcipher_givcrypt_set_crypt(greq, areq->src, areq->dst, areq->cryptlen,
+                                   areq->iv);
+       skcipher_givcrypt_set_giv(greq, iv, req->seq);
 
-       err = crypto_hash_update(&desc, req->assoc, req->assoclen);
+       err = crypto_skcipher_givencrypt(greq);
        if (err)
-               goto auth_unlock;
+               return err;
 
-       err = crypto_hash_update(&desc, src, cryptlen);
-       if (err)
-               goto auth_unlock;
+       return crypto_authenc_genicv(areq, iv, CRYPTO_TFM_REQ_MAY_SLEEP);
+}
 
-       err = crypto_hash_final(&desc, ohash);
-auth_unlock:
-       spin_unlock_bh(&ctx->auth_lock);
+static int crypto_authenc_verify(struct aead_request *req,
+                                struct scatterlist *cipher,
+                                unsigned int cryptlen)
+{
+       struct crypto_aead *authenc = crypto_aead_reqtfm(req);
+       u8 *ohash;
+       u8 *ihash;
+       unsigned int authsize;
 
-       if (err)
-               return err;
+       ohash = crypto_authenc_hash(req, CRYPTO_TFM_REQ_MAY_SLEEP, cipher,
+                                   cryptlen);
+       if (IS_ERR(ohash))
+               return PTR_ERR(ohash);
 
-       authsize = ictx->authsize;
-       scatterwalk_map_and_copy(ihash, src, cryptlen, authsize, 0);
-       return memcmp(ihash, ohash, authsize) ? -EINVAL : 0;
+       authsize = crypto_aead_authsize(authenc);
+       ihash = ohash + authsize;
+       scatterwalk_map_and_copy(ihash, cipher, cryptlen, authsize, 0);
+       return memcmp(ihash, ohash, authsize) ? -EBADMSG: 0;
 }
 
-static void crypto_authenc_decrypt_done(struct crypto_async_request *req,
-                                       int err)
+static int crypto_authenc_iverify(struct aead_request *req, u8 *iv,
+                                 unsigned int cryptlen)
 {
-       aead_request_complete(req->data, err);
+       struct crypto_aead *authenc = crypto_aead_reqtfm(req);
+       struct scatterlist *src = req->src;
+       struct scatterlist cipher[2];
+       struct page *srcp;
+       unsigned int ivsize = crypto_aead_ivsize(authenc);
+       u8 *vsrc;
+
+       srcp = sg_page(src);
+       vsrc = PageHighMem(srcp) ? NULL : page_address(srcp) + src->offset;
+
+       sg_init_table(cipher, 2);
+       sg_set_buf(cipher, iv, ivsize);
+       authenc_chain(cipher, src, vsrc == iv + ivsize);
+
+       return crypto_authenc_verify(req, cipher, cryptlen + ivsize);
 }
 
 static int crypto_authenc_decrypt(struct aead_request *req)
@@ -202,17 +293,23 @@ static int crypto_authenc_decrypt(struct aead_request *req)
        struct crypto_aead *authenc = crypto_aead_reqtfm(req);
        struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
        struct ablkcipher_request *abreq = aead_request_ctx(req);
+       unsigned int cryptlen = req->cryptlen;
+       unsigned int authsize = crypto_aead_authsize(authenc);
+       u8 *iv = req->iv;
        int err;
 
-       err = crypto_authenc_verify(req);
+       if (cryptlen < authsize)
+               return -EINVAL;
+       cryptlen -= authsize;
+
+       err = crypto_authenc_iverify(req, iv, cryptlen);
        if (err)
                return err;
 
        ablkcipher_request_set_tfm(abreq, ctx->enc);
        ablkcipher_request_set_callback(abreq, aead_request_flags(req),
-                                       crypto_authenc_decrypt_done, req);
-       ablkcipher_request_set_crypt(abreq, req->src, req->dst, req->cryptlen,
-                                    req->iv);
+                                       req->base.complete, req->base.data);
+       ablkcipher_request_set_crypt(abreq, req->src, req->dst, cryptlen, iv);
 
        return crypto_ablkcipher_decrypt(abreq);
 }
@@ -224,19 +321,13 @@ static int crypto_authenc_init_tfm(struct crypto_tfm *tfm)
        struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm);
        struct crypto_hash *auth;
        struct crypto_ablkcipher *enc;
-       unsigned int digestsize;
        int err;
 
        auth = crypto_spawn_hash(&ictx->auth);
        if (IS_ERR(auth))
                return PTR_ERR(auth);
 
-       err = -EINVAL;
-       digestsize = crypto_hash_digestsize(auth);
-       if (ictx->authsize > digestsize)
-               goto err_free_hash;
-
-       enc = crypto_spawn_ablkcipher(&ictx->enc);
+       enc = crypto_spawn_skcipher(&ictx->enc);
        err = PTR_ERR(enc);
        if (IS_ERR(enc))
                goto err_free_hash;
@@ -246,9 +337,10 @@ static int crypto_authenc_init_tfm(struct crypto_tfm *tfm)
        tfm->crt_aead.reqsize = max_t(unsigned int,
                                      (crypto_hash_alignmask(auth) &
                                       ~(crypto_tfm_ctx_alignment() - 1)) +
-                                     digestsize * 2,
-                                     sizeof(struct ablkcipher_request) +
-                                     crypto_ablkcipher_reqsize(enc));
+                                     crypto_hash_digestsize(auth) * 2,
+                                     sizeof(struct skcipher_givcrypt_request) +
+                                     crypto_ablkcipher_reqsize(enc) +
+                                     crypto_ablkcipher_ivsize(enc));
 
        spin_lock_init(&ctx->auth_lock);
 
@@ -269,75 +361,74 @@ static void crypto_authenc_exit_tfm(struct crypto_tfm *tfm)
 
 static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
 {
+       struct crypto_attr_type *algt;
        struct crypto_instance *inst;
        struct crypto_alg *auth;
        struct crypto_alg *enc;
        struct authenc_instance_ctx *ctx;
-       unsigned int authsize;
-       unsigned int enckeylen;
+       const char *enc_name;
        int err;
 
-       err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD);
-       if (err)
+       algt = crypto_get_attr_type(tb);
+       err = PTR_ERR(algt);
+       if (IS_ERR(algt))
                return ERR_PTR(err);
 
+       if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
+               return ERR_PTR(-EINVAL);
+
        auth = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
                               CRYPTO_ALG_TYPE_HASH_MASK);
        if (IS_ERR(auth))
                return ERR_PTR(PTR_ERR(auth));
 
-       err = crypto_attr_u32(tb[2], &authsize);
-       inst = ERR_PTR(err);
-       if (err)
-               goto out_put_auth;
-
-       enc = crypto_attr_alg(tb[3], CRYPTO_ALG_TYPE_BLKCIPHER,
-                             CRYPTO_ALG_TYPE_MASK);
-       inst = ERR_PTR(PTR_ERR(enc));
-       if (IS_ERR(enc))
+       enc_name = crypto_attr_alg_name(tb[2]);
+       err = PTR_ERR(enc_name);
+       if (IS_ERR(enc_name))
                goto out_put_auth;
 
-       err = crypto_attr_u32(tb[4], &enckeylen);
-       if (err)
-               goto out_put_enc;
-
        inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
        err = -ENOMEM;
        if (!inst)
-               goto out_put_enc;
-
-       err = -ENAMETOOLONG;
-       if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
-                    "authenc(%s,%u,%s,%u)", auth->cra_name, authsize,
-                    enc->cra_name, enckeylen) >= CRYPTO_MAX_ALG_NAME)
-               goto err_free_inst;
-
-       if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
-                    "authenc(%s,%u,%s,%u)", auth->cra_driver_name,
-                    authsize, enc->cra_driver_name, enckeylen) >=
-           CRYPTO_MAX_ALG_NAME)
-               goto err_free_inst;
+               goto out_put_auth;
 
        ctx = crypto_instance_ctx(inst);
-       ctx->authsize = authsize;
-       ctx->enckeylen = enckeylen;
 
        err = crypto_init_spawn(&ctx->auth, auth, inst, CRYPTO_ALG_TYPE_MASK);
        if (err)
                goto err_free_inst;
 
-       err = crypto_init_spawn(&ctx->enc, enc, inst, CRYPTO_ALG_TYPE_MASK);
+       crypto_set_skcipher_spawn(&ctx->enc, inst);
+       err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
+                                  crypto_requires_sync(algt->type,
+                                                       algt->mask));
        if (err)
                goto err_drop_auth;
 
-       inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
+       enc = crypto_skcipher_spawn_alg(&ctx->enc);
+
+       err = -ENAMETOOLONG;
+       if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
+                    "authenc(%s,%s)", auth->cra_name, enc->cra_name) >=
+           CRYPTO_MAX_ALG_NAME)
+               goto err_drop_enc;
+
+       if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+                    "authenc(%s,%s)", auth->cra_driver_name,
+                    enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
+               goto err_drop_enc;
+
+       inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
+       inst->alg.cra_flags |= enc->cra_flags & CRYPTO_ALG_ASYNC;
        inst->alg.cra_priority = enc->cra_priority * 10 + auth->cra_priority;
        inst->alg.cra_blocksize = enc->cra_blocksize;
-       inst->alg.cra_alignmask = max(auth->cra_alignmask, enc->cra_alignmask);
+       inst->alg.cra_alignmask = auth->cra_alignmask | enc->cra_alignmask;
        inst->alg.cra_type = &crypto_aead_type;
 
-       inst->alg.cra_aead.ivsize = enc->cra_blkcipher.ivsize;
-       inst->alg.cra_aead.authsize = authsize;
+       inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize;
+       inst->alg.cra_aead.maxauthsize = auth->cra_type == &crypto_hash_type ?
+                                        auth->cra_hash.digestsize :
+                                        auth->cra_digest.dia_digestsize;
 
        inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx);
 
@@ -347,18 +438,19 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
        inst->alg.cra_aead.setkey = crypto_authenc_setkey;
        inst->alg.cra_aead.encrypt = crypto_authenc_encrypt;
        inst->alg.cra_aead.decrypt = crypto_authenc_decrypt;
+       inst->alg.cra_aead.givencrypt = crypto_authenc_givencrypt;
 
 out:
-       crypto_mod_put(enc);
-out_put_auth:
        crypto_mod_put(auth);
        return inst;
 
+err_drop_enc:
+       crypto_drop_skcipher(&ctx->enc);
 err_drop_auth:
        crypto_drop_spawn(&ctx->auth);
 err_free_inst:
        kfree(inst);
-out_put_enc:
+out_put_auth:
        inst = ERR_PTR(err);
        goto out;
 }
@@ -367,7 +459,7 @@ static void crypto_authenc_free(struct crypto_instance *inst)
 {
        struct authenc_instance_ctx *ctx = crypto_instance_ctx(inst);
 
-       crypto_drop_spawn(&ctx->enc);
+       crypto_drop_skcipher(&ctx->enc);
        crypto_drop_spawn(&ctx->auth);
        kfree(inst);
 }
index f6c67f9..4a7e65c 100644 (file)
@@ -14,7 +14,8 @@
  *
  */
 
-#include <linux/crypto.h>
+#include <crypto/internal/skcipher.h>
+#include <crypto/scatterwalk.h>
 #include <linux/errno.h>
 #include <linux/hardirq.h>
 #include <linux/kernel.h>
@@ -25,7 +26,6 @@
 #include <linux/string.h>
 
 #include "internal.h"
-#include "scatterwalk.h"
 
 enum {
        BLKCIPHER_WALK_PHYS = 1 << 0,
@@ -433,9 +433,8 @@ static unsigned int crypto_blkcipher_ctxsize(struct crypto_alg *alg, u32 type,
        struct blkcipher_alg *cipher = &alg->cra_blkcipher;
        unsigned int len = alg->cra_ctxsize;
 
-       type ^= CRYPTO_ALG_ASYNC;
-       mask &= CRYPTO_ALG_ASYNC;
-       if ((type & mask) && cipher->ivsize) {
+       if ((mask & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_MASK &&
+           cipher->ivsize) {
                len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1);
                len += cipher->ivsize;
        }
@@ -451,6 +450,11 @@ static int crypto_init_blkcipher_ops_async(struct crypto_tfm *tfm)
        crt->setkey = async_setkey;
        crt->encrypt = async_encrypt;
        crt->decrypt = async_decrypt;
+       if (!alg->ivsize) {
+               crt->givencrypt = skcipher_null_givencrypt;
+               crt->givdecrypt = skcipher_null_givdecrypt;
+       }
+       crt->base = __crypto_ablkcipher_cast(tfm);
        crt->ivsize = alg->ivsize;
 
        return 0;
@@ -482,9 +486,7 @@ static int crypto_init_blkcipher_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
        if (alg->ivsize > PAGE_SIZE / 8)
                return -EINVAL;
 
-       type ^= CRYPTO_ALG_ASYNC;
-       mask &= CRYPTO_ALG_ASYNC;
-       if (type & mask)
+       if ((mask & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_MASK)
                return crypto_init_blkcipher_ops_sync(tfm);
        else
                return crypto_init_blkcipher_ops_async(tfm);
@@ -499,6 +501,8 @@ static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
        seq_printf(m, "min keysize  : %u\n", alg->cra_blkcipher.min_keysize);
        seq_printf(m, "max keysize  : %u\n", alg->cra_blkcipher.max_keysize);
        seq_printf(m, "ivsize       : %u\n", alg->cra_blkcipher.ivsize);
+       seq_printf(m, "geniv        : %s\n", alg->cra_blkcipher.geniv ?:
+                                            "<default>");
 }
 
 const struct crypto_type crypto_blkcipher_type = {
@@ -510,5 +514,187 @@ const struct crypto_type crypto_blkcipher_type = {
 };
 EXPORT_SYMBOL_GPL(crypto_blkcipher_type);
 
+static int crypto_grab_nivcipher(struct crypto_skcipher_spawn *spawn,
+                               const char *name, u32 type, u32 mask)
+{
+       struct crypto_alg *alg;
+       int err;
+
+       type = crypto_skcipher_type(type);
+       mask = crypto_skcipher_mask(mask) | CRYPTO_ALG_GENIV;
+
+       alg = crypto_alg_mod_lookup(name, type, mask);
+       if (IS_ERR(alg))
+               return PTR_ERR(alg);
+
+       err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask);
+       crypto_mod_put(alg);
+       return err;
+}
+
+struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl,
+                                            struct rtattr **tb, u32 type,
+                                            u32 mask)
+{
+       struct {
+               int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
+                             unsigned int keylen);
+               int (*encrypt)(struct ablkcipher_request *req);
+               int (*decrypt)(struct ablkcipher_request *req);
+
+               unsigned int min_keysize;
+               unsigned int max_keysize;
+               unsigned int ivsize;
+
+               const char *geniv;
+       } balg;
+       const char *name;
+       struct crypto_skcipher_spawn *spawn;
+       struct crypto_attr_type *algt;
+       struct crypto_instance *inst;
+       struct crypto_alg *alg;
+       int err;
+
+       algt = crypto_get_attr_type(tb);
+       err = PTR_ERR(algt);
+       if (IS_ERR(algt))
+               return ERR_PTR(err);
+
+       if ((algt->type ^ (CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV)) &
+           algt->mask)
+               return ERR_PTR(-EINVAL);
+
+       name = crypto_attr_alg_name(tb[1]);
+       err = PTR_ERR(name);
+       if (IS_ERR(name))
+               return ERR_PTR(err);
+
+       inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+       if (!inst)
+               return ERR_PTR(-ENOMEM);
+
+       spawn = crypto_instance_ctx(inst);
+
+       /* Ignore async algorithms if necessary. */
+       mask |= crypto_requires_sync(algt->type, algt->mask);
+
+       crypto_set_skcipher_spawn(spawn, inst);
+       err = crypto_grab_nivcipher(spawn, name, type, mask);
+       if (err)
+               goto err_free_inst;
+
+       alg = crypto_skcipher_spawn_alg(spawn);
+
+       if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
+           CRYPTO_ALG_TYPE_BLKCIPHER) {
+               balg.ivsize = alg->cra_blkcipher.ivsize;
+               balg.min_keysize = alg->cra_blkcipher.min_keysize;
+               balg.max_keysize = alg->cra_blkcipher.max_keysize;
+
+               balg.setkey = async_setkey;
+               balg.encrypt = async_encrypt;
+               balg.decrypt = async_decrypt;
+
+               balg.geniv = alg->cra_blkcipher.geniv;
+       } else {
+               balg.ivsize = alg->cra_ablkcipher.ivsize;
+               balg.min_keysize = alg->cra_ablkcipher.min_keysize;
+               balg.max_keysize = alg->cra_ablkcipher.max_keysize;
+
+               balg.setkey = alg->cra_ablkcipher.setkey;
+               balg.encrypt = alg->cra_ablkcipher.encrypt;
+               balg.decrypt = alg->cra_ablkcipher.decrypt;
+
+               balg.geniv = alg->cra_ablkcipher.geniv;
+       }
+
+       err = -EINVAL;
+       if (!balg.ivsize)
+               goto err_drop_alg;
+
+       /*
+        * This is only true if we're constructing an algorithm with its
+        * default IV generator.  For the default generator we elide the
+        * template name and double-check the IV generator.
+        */
+       if (algt->mask & CRYPTO_ALG_GENIV) {
+               if (!balg.geniv)
+                       balg.geniv = crypto_default_geniv(alg);
+               err = -EAGAIN;
+               if (strcmp(tmpl->name, balg.geniv))
+                       goto err_drop_alg;
+
+               memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
+               memcpy(inst->alg.cra_driver_name, alg->cra_driver_name,
+                      CRYPTO_MAX_ALG_NAME);
+       } else {
+               err = -ENAMETOOLONG;
+               if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
+                            "%s(%s)", tmpl->name, alg->cra_name) >=
+                   CRYPTO_MAX_ALG_NAME)
+                       goto err_drop_alg;
+               if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+                            "%s(%s)", tmpl->name, alg->cra_driver_name) >=
+                   CRYPTO_MAX_ALG_NAME)
+                       goto err_drop_alg;
+       }
+
+       inst->alg.cra_flags = CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV;
+       inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
+       inst->alg.cra_priority = alg->cra_priority;
+       inst->alg.cra_blocksize = alg->cra_blocksize;
+       inst->alg.cra_alignmask = alg->cra_alignmask;
+       inst->alg.cra_type = &crypto_givcipher_type;
+
+       inst->alg.cra_ablkcipher.ivsize = balg.ivsize;
+       inst->alg.cra_ablkcipher.min_keysize = balg.min_keysize;
+       inst->alg.cra_ablkcipher.max_keysize = balg.max_keysize;
+       inst->alg.cra_ablkcipher.geniv = balg.geniv;
+
+       inst->alg.cra_ablkcipher.setkey = balg.setkey;
+       inst->alg.cra_ablkcipher.encrypt = balg.encrypt;
+       inst->alg.cra_ablkcipher.decrypt = balg.decrypt;
+
+out:
+       return inst;
+
+err_drop_alg:
+       crypto_drop_skcipher(spawn);
+err_free_inst:
+       kfree(inst);
+       inst = ERR_PTR(err);
+       goto out;
+}
+EXPORT_SYMBOL_GPL(skcipher_geniv_alloc);
+
+void skcipher_geniv_free(struct crypto_instance *inst)
+{
+       crypto_drop_skcipher(crypto_instance_ctx(inst));
+       kfree(inst);
+}
+EXPORT_SYMBOL_GPL(skcipher_geniv_free);
+
+int skcipher_geniv_init(struct crypto_tfm *tfm)
+{
+       struct crypto_instance *inst = (void *)tfm->__crt_alg;
+       struct crypto_ablkcipher *cipher;
+
+       cipher = crypto_spawn_skcipher(crypto_instance_ctx(inst));
+       if (IS_ERR(cipher))
+               return PTR_ERR(cipher);
+
+       tfm->crt_ablkcipher.base = cipher;
+       tfm->crt_ablkcipher.reqsize += crypto_ablkcipher_reqsize(cipher);
+
+       return 0;
+}
+EXPORT_SYMBOL_GPL(skcipher_geniv_init);
+
+void skcipher_geniv_exit(struct crypto_tfm *tfm)
+{
+       crypto_free_ablkcipher(tfm->crt_ablkcipher.base);
+}
+EXPORT_SYMBOL_GPL(skcipher_geniv_exit);
+
 MODULE_LICENSE("GPL");
 MODULE_DESCRIPTION("Generic block chaining cipher type");
index 6877ecf..493fee7 100644 (file)
 #include <linux/kernel.h>
 #include <linux/module.h>
 
-
-#define CAMELLIA_MIN_KEY_SIZE        16
-#define CAMELLIA_MAX_KEY_SIZE        32
-#define CAMELLIA_BLOCK_SIZE 16
-#define CAMELLIA_TABLE_BYTE_LEN 272
-#define CAMELLIA_TABLE_WORD_LEN (CAMELLIA_TABLE_BYTE_LEN / 4)
-
-typedef u32 KEY_TABLE_TYPE[CAMELLIA_TABLE_WORD_LEN];
-
-
-/* key constants */
-
-#define CAMELLIA_SIGMA1L (0xA09E667FL)
-#define CAMELLIA_SIGMA1R (0x3BCC908BL)
-#define CAMELLIA_SIGMA2L (0xB67AE858L)
-#define CAMELLIA_SIGMA2R (0x4CAA73B2L)
-#define CAMELLIA_SIGMA3L (0xC6EF372FL)
-#define CAMELLIA_SIGMA3R (0xE94F82BEL)
-#define CAMELLIA_SIGMA4L (0x54FF53A5L)
-#define CAMELLIA_SIGMA4R (0xF1D36F1CL)
-#define CAMELLIA_SIGMA5L (0x10E527FAL)
-#define CAMELLIA_SIGMA5R (0xDE682D1DL)
-#define CAMELLIA_SIGMA6L (0xB05688C2L)
-#define CAMELLIA_SIGMA6R (0xB3E6C1FDL)
-
-struct camellia_ctx {
-       int key_length;
-       KEY_TABLE_TYPE key_table;
-};
-
-
-/*
- *  macros
- */
-
-
-# define GETU32(pt) (((u32)(pt)[0] << 24)      \
-                    ^ ((u32)(pt)[1] << 16)     \
-                    ^ ((u32)(pt)[2] <<  8)     \
-                    ^ ((u32)(pt)[3]))
-
-#define COPY4WORD(dst, src)                    \
-    do {                                       \
-       (dst)[0]=(src)[0];                      \
-       (dst)[1]=(src)[1];                      \
-       (dst)[2]=(src)[2];                      \
-       (dst)[3]=(src)[3];                      \
-    }while(0)
-
-#define SWAP4WORD(word)                                \
-    do {                                       \
-       CAMELLIA_SWAP4((word)[0]);              \
-       CAMELLIA_SWAP4((word)[1]);              \
-       CAMELLIA_SWAP4((word)[2]);              \
-       CAMELLIA_SWAP4((word)[3]);              \
-    }while(0)
-
-#define XOR4WORD(a, b)/* a = a ^ b */          \
-    do {                                       \
-       (a)[0]^=(b)[0];                         \
-       (a)[1]^=(b)[1];                         \
-       (a)[2]^=(b)[2];                         \
-       (a)[3]^=(b)[3];                         \
-    }while(0)
-
-#define XOR4WORD2(a, b, c)/* a = b ^ c */      \
-    do {                                       \
-       (a)[0]=(b)[0]^(c)[0];                   \
-       (a)[1]=(b)[1]^(c)[1];                   \
-       (a)[2]=(b)[2]^(c)[2];                   \
-       (a)[3]=(b)[3]^(c)[3];                   \
-    }while(0)
-
-#define CAMELLIA_SUBKEY_L(INDEX) (subkey[(INDEX)*2])
-#define CAMELLIA_SUBKEY_R(INDEX) (subkey[(INDEX)*2 + 1])
-
-/* rotation right shift 1byte */
-#define CAMELLIA_RR8(x) (((x) >> 8) + ((x) << 24))
-/* rotation left shift 1bit */
-#define CAMELLIA_RL1(x) (((x) << 1) + ((x) >> 31))
-/* rotation left shift 1byte */
-#define CAMELLIA_RL8(x) (((x) << 8) + ((x) >> 24))
-
-#define CAMELLIA_ROLDQ(ll, lr, rl, rr, w0, w1, bits)   \
-    do {                                               \
-       w0 = ll;                                        \
-       ll = (ll << bits) + (lr >> (32 - bits));        \
-       lr = (lr << bits) + (rl >> (32 - bits));        \
-       rl = (rl << bits) + (rr >> (32 - bits));        \
-       rr = (rr << bits) + (w0 >> (32 - bits));        \
-    } while(0)
-
-#define CAMELLIA_ROLDQo32(ll, lr, rl, rr, w0, w1, bits)        \
-    do {                                               \
-       w0 = ll;                                        \
-       w1 = lr;                                        \
-       ll = (lr << (bits - 32)) + (rl >> (64 - bits)); \
-       lr = (rl << (bits - 32)) + (rr >> (64 - bits)); \
-       rl = (rr << (bits - 32)) + (w0 >> (64 - bits)); \
-       rr = (w0 << (bits - 32)) + (w1 >> (64 - bits)); \
-    } while(0)
-
-#define CAMELLIA_SP1110(INDEX) (camellia_sp1110[(INDEX)])
-#define CAMELLIA_SP0222(INDEX) (camellia_sp0222[(INDEX)])
-#define CAMELLIA_SP3033(INDEX) (camellia_sp3033[(INDEX)])
-#define CAMELLIA_SP4404(INDEX) (camellia_sp4404[(INDEX)])
-
-#define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1)     \
-    do {                                                       \
-       il = xl ^ kl;                                           \
-       ir = xr ^ kr;                                           \
-       t0 = il >> 16;                                          \
-       t1 = ir >> 16;                                          \
-       yl = CAMELLIA_SP1110(ir & 0xff)                         \
-           ^ CAMELLIA_SP0222((t1 >> 8) & 0xff)                 \
-           ^ CAMELLIA_SP3033(t1 & 0xff)                        \
-           ^ CAMELLIA_SP4404((ir >> 8) & 0xff);                \
-       yr = CAMELLIA_SP1110((t0 >> 8) & 0xff)                  \
-           ^ CAMELLIA_SP0222(t0 & 0xff)                        \
-           ^ CAMELLIA_SP3033((il >> 8) & 0xff)                 \
-           ^ CAMELLIA_SP4404(il & 0xff);                       \
-       yl ^= yr;                                               \
-       yr = CAMELLIA_RR8(yr);                                  \
-       yr ^= yl;                                               \
-    } while(0)
-
-
-/*
- * for speed up
- *
- */
-#define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) \
-    do {                                                               \
-       t0 = kll;                                                       \
-       t2 = krr;                                                       \
-       t0 &= ll;                                                       \
-       t2 |= rr;                                                       \
-       rl ^= t2;                                                       \
-       lr ^= CAMELLIA_RL1(t0);                                         \
-       t3 = krl;                                                       \
-       t1 = klr;                                                       \
-       t3 &= rl;                                                       \
-       t1 |= lr;                                                       \
-       ll ^= t1;                                                       \
-       rr ^= CAMELLIA_RL1(t3);                                         \
-    } while(0)
-
-#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir, t0, t1)       \
-    do {                                                               \
-       ir =  CAMELLIA_SP1110(xr & 0xff);                               \
-       il =  CAMELLIA_SP1110((xl>>24) & 0xff);                         \
-       ir ^= CAMELLIA_SP0222((xr>>24) & 0xff);                         \
-       il ^= CAMELLIA_SP0222((xl>>16) & 0xff);                         \
-       ir ^= CAMELLIA_SP3033((xr>>16) & 0xff);                         \
-       il ^= CAMELLIA_SP3033((xl>>8) & 0xff);                          \
-       ir ^= CAMELLIA_SP4404((xr>>8) & 0xff);                          \
-       il ^= CAMELLIA_SP4404(xl & 0xff);                               \
-       il ^= kl;                                                       \
-       ir ^= il ^ kr;                                                  \
-       yl ^= ir;                                                       \
-       yr ^= CAMELLIA_RR8(il) ^ ir;                                    \
-    } while(0)
-
-/**
- * Stuff related to the Camellia key schedule
- */
-#define SUBL(x) subL[(x)]
-#define SUBR(x) subR[(x)]
-
-
 static const u32 camellia_sp1110[256] = {
        0x70707000,0x82828200,0x2c2c2c00,0xececec00,
        0xb3b3b300,0x27272700,0xc0c0c000,0xe5e5e500,
@@ -475,67 +305,348 @@ static const u32 camellia_sp4404[256] = {
 };
 
 
+#define CAMELLIA_MIN_KEY_SIZE        16
+#define CAMELLIA_MAX_KEY_SIZE        32
+#define CAMELLIA_BLOCK_SIZE          16
+#define CAMELLIA_TABLE_BYTE_LEN     272
+
+/*
+ * NB: L and R below stand for 'left' and 'right' as in written numbers.
+ * That is, in (xxxL,xxxR) pair xxxL holds most significant digits,
+ * _not_ least significant ones!
+ */
+
+
+/* key constants */
+
+#define CAMELLIA_SIGMA1L (0xA09E667FL)
+#define CAMELLIA_SIGMA1R (0x3BCC908BL)
+#define CAMELLIA_SIGMA2L (0xB67AE858L)
+#define CAMELLIA_SIGMA2R (0x4CAA73B2L)
+#define CAMELLIA_SIGMA3L (0xC6EF372FL)
+#define CAMELLIA_SIGMA3R (0xE94F82BEL)
+#define CAMELLIA_SIGMA4L (0x54FF53A5L)
+#define CAMELLIA_SIGMA4R (0xF1D36F1CL)
+#define CAMELLIA_SIGMA5L (0x10E527FAL)
+#define CAMELLIA_SIGMA5R (0xDE682D1DL)
+#define CAMELLIA_SIGMA6L (0xB05688C2L)
+#define CAMELLIA_SIGMA6R (0xB3E6C1FDL)
+
+/*
+ *  macros
+ */
+#define GETU32(v, pt) \
+    do { \
+       /* latest breed of gcc is clever enough to use move */ \
+       memcpy(&(v), (pt), 4); \
+       (v) = be32_to_cpu(v); \
+    } while(0)
+
+/* rotation right shift 1byte */
+#define ROR8(x) (((x) >> 8) + ((x) << 24))
+/* rotation left shift 1bit */
+#define ROL1(x) (((x) << 1) + ((x) >> 31))
+/* rotation left shift 1byte */
+#define ROL8(x) (((x) << 8) + ((x) >> 24))
+
+#define ROLDQ(ll, lr, rl, rr, w0, w1, bits)            \
+    do {                                               \
+       w0 = ll;                                        \
+       ll = (ll << bits) + (lr >> (32 - bits));        \
+       lr = (lr << bits) + (rl >> (32 - bits));        \
+       rl = (rl << bits) + (rr >> (32 - bits));        \
+       rr = (rr << bits) + (w0 >> (32 - bits));        \
+    } while(0)
+
+#define ROLDQo32(ll, lr, rl, rr, w0, w1, bits)         \
+    do {                                               \
+       w0 = ll;                                        \
+       w1 = lr;                                        \
+       ll = (lr << (bits - 32)) + (rl >> (64 - bits)); \
+       lr = (rl << (bits - 32)) + (rr >> (64 - bits)); \
+       rl = (rr << (bits - 32)) + (w0 >> (64 - bits)); \
+       rr = (w0 << (bits - 32)) + (w1 >> (64 - bits)); \
+    } while(0)
+
+#define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1)     \
+    do {                                                       \
+       il = xl ^ kl;                                           \
+       ir = xr ^ kr;                                           \
+       t0 = il >> 16;                                          \
+       t1 = ir >> 16;                                          \
+       yl = camellia_sp1110[(u8)(ir     )]                     \
+          ^ camellia_sp0222[    (t1 >> 8)]                     \
+          ^ camellia_sp3033[(u8)(t1     )]                     \
+          ^ camellia_sp4404[(u8)(ir >> 8)];                    \
+       yr = camellia_sp1110[    (t0 >> 8)]                     \
+          ^ camellia_sp0222[(u8)(t0     )]                     \
+          ^ camellia_sp3033[(u8)(il >> 8)]                     \
+          ^ camellia_sp4404[(u8)(il     )];                    \
+       yl ^= yr;                                               \
+       yr = ROR8(yr);                                          \
+       yr ^= yl;                                               \
+    } while(0)
+
+#define SUBKEY_L(INDEX) (subkey[(INDEX)*2])
+#define SUBKEY_R(INDEX) (subkey[(INDEX)*2 + 1])
+
+static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
+{
+       u32 dw, tl, tr;
+       u32 kw4l, kw4r;
+       int i;
+
+       /* absorb kw2 to other subkeys */
+       /* round 2 */
+       subL[3] ^= subL[1]; subR[3] ^= subR[1];
+       /* round 4 */
+       subL[5] ^= subL[1]; subR[5] ^= subR[1];
+       /* round 6 */
+       subL[7] ^= subL[1]; subR[7] ^= subR[1];
+       subL[1] ^= subR[1] & ~subR[9];
+       dw = subL[1] & subL[9],
+               subR[1] ^= ROL1(dw); /* modified for FLinv(kl2) */
+       /* round 8 */
+       subL[11] ^= subL[1]; subR[11] ^= subR[1];
+       /* round 10 */
+       subL[13] ^= subL[1]; subR[13] ^= subR[1];
+       /* round 12 */
+       subL[15] ^= subL[1]; subR[15] ^= subR[1];
+       subL[1] ^= subR[1] & ~subR[17];
+       dw = subL[1] & subL[17],
+               subR[1] ^= ROL1(dw); /* modified for FLinv(kl4) */
+       /* round 14 */
+       subL[19] ^= subL[1]; subR[19] ^= subR[1];
+       /* round 16 */
+       subL[21] ^= subL[1]; subR[21] ^= subR[1];
+       /* round 18 */
+       subL[23] ^= subL[1]; subR[23] ^= subR[1];
+       if (max == 24) {
+               /* kw3 */
+               subL[24] ^= subL[1]; subR[24] ^= subR[1];
+
+       /* absorb kw4 to other subkeys */
+               kw4l = subL[25]; kw4r = subR[25];
+       } else {
+               subL[1] ^= subR[1] & ~subR[25];
+               dw = subL[1] & subL[25],
+                       subR[1] ^= ROL1(dw); /* modified for FLinv(kl6) */
+               /* round 20 */
+               subL[27] ^= subL[1]; subR[27] ^= subR[1];
+               /* round 22 */
+               subL[29] ^= subL[1]; subR[29] ^= subR[1];
+               /* round 24 */
+               subL[31] ^= subL[1]; subR[31] ^= subR[1];
+               /* kw3 */
+               subL[32] ^= subL[1]; subR[32] ^= subR[1];
+
+       /* absorb kw4 to other subkeys */
+               kw4l = subL[33]; kw4r = subR[33];
+               /* round 23 */
+               subL[30] ^= kw4l; subR[30] ^= kw4r;
+               /* round 21 */
+               subL[28] ^= kw4l; subR[28] ^= kw4r;
+               /* round 19 */
+               subL[26] ^= kw4l; subR[26] ^= kw4r;
+               kw4l ^= kw4r & ~subR[24];
+               dw = kw4l & subL[24],
+                       kw4r ^= ROL1(dw); /* modified for FL(kl5) */
+       }
+       /* round 17 */
+       subL[22] ^= kw4l; subR[22] ^= kw4r;
+       /* round 15 */
+       subL[20] ^= kw4l; subR[20] ^= kw4r;
+       /* round 13 */
+       subL[18] ^= kw4l; subR[18] ^= kw4r;
+       kw4l ^= kw4r & ~subR[16];
+       dw = kw4l & subL[16],
+               kw4r ^= ROL1(dw); /* modified for FL(kl3) */
+       /* round 11 */
+       subL[14] ^= kw4l; subR[14] ^= kw4r;
+       /* round 9 */
+       subL[12] ^= kw4l; subR[12] ^= kw4r;
+       /* round 7 */
+       subL[10] ^= kw4l; subR[10] ^= kw4r;
+       kw4l ^= kw4r & ~subR[8];
+       dw = kw4l & subL[8],
+               kw4r ^= ROL1(dw); /* modified for FL(kl1) */
+       /* round 5 */
+       subL[6] ^= kw4l; subR[6] ^= kw4r;
+       /* round 3 */
+       subL[4] ^= kw4l; subR[4] ^= kw4r;
+       /* round 1 */
+       subL[2] ^= kw4l; subR[2] ^= kw4r;
+       /* kw1 */
+       subL[0] ^= kw4l; subR[0] ^= kw4r;
+
+       /* key XOR is end of F-function */
+       SUBKEY_L(0) = subL[0] ^ subL[2];/* kw1 */
+       SUBKEY_R(0) = subR[0] ^ subR[2];
+       SUBKEY_L(2) = subL[3];       /* round 1 */
+       SUBKEY_R(2) = subR[3];
+       SUBKEY_L(3) = subL[2] ^ subL[4]; /* round 2 */
+       SUBKEY_R(3) = subR[2] ^ subR[4];
+       SUBKEY_L(4) = subL[3] ^ subL[5]; /* round 3 */
+       SUBKEY_R(4) = subR[3] ^ subR[5];
+       SUBKEY_L(5) = subL[4] ^ subL[6]; /* round 4 */
+       SUBKEY_R(5) = subR[4] ^ subR[6];
+       SUBKEY_L(6) = subL[5] ^ subL[7]; /* round 5 */
+       SUBKEY_R(6) = subR[5] ^ subR[7];
+       tl = subL[10] ^ (subR[10] & ~subR[8]);
+       dw = tl & subL[8],  /* FL(kl1) */
+               tr = subR[10] ^ ROL1(dw);
+       SUBKEY_L(7) = subL[6] ^ tl; /* round 6 */
+       SUBKEY_R(7) = subR[6] ^ tr;
+       SUBKEY_L(8) = subL[8];       /* FL(kl1) */
+       SUBKEY_R(8) = subR[8];
+       SUBKEY_L(9) = subL[9];       /* FLinv(kl2) */
+       SUBKEY_R(9) = subR[9];
+       tl = subL[7] ^ (subR[7] & ~subR[9]);
+       dw = tl & subL[9],  /* FLinv(kl2) */
+               tr = subR[7] ^ ROL1(dw);
+       SUBKEY_L(10) = tl ^ subL[11]; /* round 7 */
+       SUBKEY_R(10) = tr ^ subR[11];
+       SUBKEY_L(11) = subL[10] ^ subL[12]; /* round 8 */
+       SUBKEY_R(11) = subR[10] ^ subR[12];
+       SUBKEY_L(12) = subL[11] ^ subL[13]; /* round 9 */
+       SUBKEY_R(12) = subR[11] ^ subR[13];
+       SUBKEY_L(13) = subL[12] ^ subL[14]; /* round 10 */
+       SUBKEY_R(13) = subR[12] ^ subR[14];
+       SUBKEY_L(14) = subL[13] ^ subL[15]; /* round 11 */
+       SUBKEY_R(14) = subR[13] ^ subR[15];
+       tl = subL[18] ^ (subR[18] & ~subR[16]);
+       dw = tl & subL[16], /* FL(kl3) */
+               tr = subR[18] ^ ROL1(dw);
+       SUBKEY_L(15) = subL[14] ^ tl; /* round 12 */
+       SUBKEY_R(15) = subR[14] ^ tr;
+       SUBKEY_L(16) = subL[16];     /* FL(kl3) */
+       SUBKEY_R(16) = subR[16];
+       SUBKEY_L(17) = subL[17];     /* FLinv(kl4) */
+       SUBKEY_R(17) = subR[17];
+       tl = subL[15] ^ (subR[15] & ~subR[17]);
+       dw = tl & subL[17], /* FLinv(kl4) */
+               tr = subR[15] ^ ROL1(dw);
+       SUBKEY_L(18) = tl ^ subL[19]; /* round 13 */
+       SUBKEY_R(18) = tr ^ subR[19];
+       SUBKEY_L(19) = subL[18] ^ subL[20]; /* round 14 */
+       SUBKEY_R(19) = subR[18] ^ subR[20];
+       SUBKEY_L(20) = subL[19] ^ subL[21]; /* round 15 */
+       SUBKEY_R(20) = subR[19] ^ subR[21];
+       SUBKEY_L(21) = subL[20] ^ subL[22]; /* round 16 */
+       SUBKEY_R(21) = subR[20] ^ subR[22];
+       SUBKEY_L(22) = subL[21] ^ subL[23]; /* round 17 */
+       SUBKEY_R(22) = subR[21] ^ subR[23];
+       if (max == 24) {
+               SUBKEY_L(23) = subL[22];     /* round 18 */
+               SUBKEY_R(23) = subR[22];
+               SUBKEY_L(24) = subL[24] ^ subL[23]; /* kw3 */
+               SUBKEY_R(24) = subR[24] ^ subR[23];
+       } else {
+               tl = subL[26] ^ (subR[26] & ~subR[24]);
+               dw = tl & subL[24], /* FL(kl5) */
+                       tr = subR[26] ^ ROL1(dw);
+               SUBKEY_L(23) = subL[22] ^ tl; /* round 18 */
+               SUBKEY_R(23) = subR[22] ^ tr;
+               SUBKEY_L(24) = subL[24];     /* FL(kl5) */
+               SUBKEY_R(24) = subR[24];
+               SUBKEY_L(25) = subL[25];     /* FLinv(kl6) */
+               SUBKEY_R(25) = subR[25];
+               tl = subL[23] ^ (subR[23] & ~subR[25]);
+               dw = tl & subL[25], /* FLinv(kl6) */
+                       tr = subR[23] ^ ROL1(dw);
+               SUBKEY_L(26) = tl ^ subL[27]; /* round 19 */
+               SUBKEY_R(26) = tr ^ subR[27];
+               SUBKEY_L(27) = subL[26] ^ subL[28]; /* round 20 */
+               SUBKEY_R(27) = subR[26] ^ subR[28];
+               SUBKEY_L(28) = subL[27] ^ subL[29]; /* round 21 */
+               SUBKEY_R(28) = subR[27] ^ subR[29];
+               SUBKEY_L(29) = subL[28] ^ subL[30]; /* round 22 */
+               SUBKEY_R(29) = subR[28] ^ subR[30];
+               SUBKEY_L(30) = subL[29] ^ subL[31]; /* round 23 */
+               SUBKEY_R(30) = subR[29] ^ subR[31];
+               SUBKEY_L(31) = subL[30];     /* round 24 */
+               SUBKEY_R(31) = subR[30];
+               SUBKEY_L(32) = subL[32] ^ subL[31]; /* kw3 */
+               SUBKEY_R(32) = subR[32] ^ subR[31];
+       }
+
+       /* apply the inverse of the last half of P-function */
+       i = 2;
+       do {
+               dw = SUBKEY_L(i + 0) ^ SUBKEY_R(i + 0); dw = ROL8(dw);/* round 1 */
+               SUBKEY_R(i + 0) = SUBKEY_L(i + 0) ^ dw; SUBKEY_L(i + 0) = dw;
+               dw = SUBKEY_L(i + 1) ^ SUBKEY_R(i + 1); dw = ROL8(dw);/* round 2 */
+               SUBKEY_R(i + 1) = SUBKEY_L(i + 1) ^ dw; SUBKEY_L(i + 1) = dw;
+               dw = SUBKEY_L(i + 2) ^ SUBKEY_R(i + 2); dw = ROL8(dw);/* round 3 */
+               SUBKEY_R(i + 2) = SUBKEY_L(i + 2) ^ dw; SUBKEY_L(i + 2) = dw;
+               dw = SUBKEY_L(i + 3) ^ SUBKEY_R(i + 3); dw = ROL8(dw);/* round 4 */
+               SUBKEY_R(i + 3) = SUBKEY_L(i + 3) ^ dw; SUBKEY_L(i + 3) = dw;
+               dw = SUBKEY_L(i + 4) ^ SUBKEY_R(i + 4); dw = ROL8(dw);/* round 5 */
+               SUBKEY_R(i + 4) = SUBKEY_L(i + 4) ^ dw; SUBKEY_L(i + 4) = dw;
+               dw = SUBKEY_L(i + 5) ^ SUBKEY_R(i + 5); dw = ROL8(dw);/* round 6 */
+               SUBKEY_R(i + 5) = SUBKEY_L(i + 5) ^ dw; SUBKEY_L(i + 5) = dw;
+               i += 8;
+       } while (i < max);
+}
 
 static void camellia_setup128(const unsigned char *key, u32 *subkey)
 {
        u32 kll, klr, krl, krr;
        u32 il, ir, t0, t1, w0, w1;
-       u32 kw4l, kw4r, dw, tl, tr;
        u32 subL[26];
        u32 subR[26];
 
        /**
-        *  k == kll || klr || krl || krr (|| is concatination)
-        */
-       kll = GETU32(key     );
-       klr = GETU32(key +  4);
-       krl = GETU32(key +  8);
-       krr = GETU32(key + 12);
-       /**
-        * generate KL dependent subkeys
+        *  k == kll || klr || krl || krr (|| is concatenation)
         */
+       GETU32(kll, key     );
+       GETU32(klr, key +  4);
+       GETU32(krl, key +  8);
+       GETU32(krr, key + 12);
+
+       /* generate KL dependent subkeys */
        /* kw1 */
-       SUBL(0) = kll; SUBR(0) = klr;
+       subL[0] = kll; subR[0] = klr;
        /* kw2 */
-       SUBL(1) = krl; SUBR(1) = krr;
+       subL[1] = krl; subR[1] = krr;
        /* rotation left shift 15bit */
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15);
+       ROLDQ(kll, klr, krl, krr, w0, w1, 15);
        /* k3 */
-       SUBL(4) = kll; SUBR(4) = klr;
+       subL[4] = kll; subR[4] = klr;
        /* k4 */
-       SUBL(5) = krl; SUBR(5) = krr;
+       subL[5] = krl; subR[5] = krr;
        /* rotation left shift 15+30bit */
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 30);
+       ROLDQ(kll, klr, krl, krr, w0, w1, 30);
        /* k7 */
-       SUBL(10) = kll; SUBR(10) = klr;
+       subL[10] = kll; subR[10] = klr;
        /* k8 */
-       SUBL(11) = krl; SUBR(11) = krr;
+       subL[11] = krl; subR[11] = krr;
        /* rotation left shift 15+30+15bit */
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15);
+       ROLDQ(kll, klr, krl, krr, w0, w1, 15);
        /* k10 */
-       SUBL(13) = krl; SUBR(13) = krr;
+       subL[13] = krl; subR[13] = krr;
        /* rotation left shift 15+30+15+17 bit */
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17);
+       ROLDQ(kll, klr, krl, krr, w0, w1, 17);
        /* kl3 */
-       SUBL(16) = kll; SUBR(16) = klr;
+       subL[16] = kll; subR[16] = klr;
        /* kl4 */
-       SUBL(17) = krl; SUBR(17) = krr;
+       subL[17] = krl; subR[17] = krr;
        /* rotation left shift 15+30+15+17+17 bit */
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17);
+       ROLDQ(kll, klr, krl, krr, w0, w1, 17);
        /* k13 */
-       SUBL(18) = kll; SUBR(18) = klr;
+       subL[18] = kll; subR[18] = klr;
        /* k14 */
-       SUBL(19) = krl; SUBR(19) = krr;
+       subL[19] = krl; subR[19] = krr;
        /* rotation left shift 15+30+15+17+17+17 bit */
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17);
+       ROLDQ(kll, klr, krl, krr, w0, w1, 17);
        /* k17 */
-       SUBL(22) = kll; SUBR(22) = klr;
+       subL[22] = kll; subR[22] = klr;
        /* k18 */
-       SUBL(23) = krl; SUBR(23) = krr;
+       subL[23] = krl; subR[23] = krr;
 
        /* generate KA */
-       kll = SUBL(0); klr = SUBR(0);
-       krl = SUBL(1); krr = SUBR(1);
+       kll = subL[0]; klr = subR[0];
+       krl = subL[1]; krr = subR[1];
        CAMELLIA_F(kll, klr,
                   CAMELLIA_SIGMA1L, CAMELLIA_SIGMA1R,
                   w0, w1, il, ir, t0, t1);
@@ -555,306 +666,108 @@ static void camellia_setup128(const unsigned char *key, u32 *subkey)
 
        /* generate KA dependent subkeys */
        /* k1, k2 */
-       SUBL(2) = kll; SUBR(2) = klr;
-       SUBL(3) = krl; SUBR(3) = krr;
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15);
+       subL[2] = kll; subR[2] = klr;
+       subL[3] = krl; subR[3] = krr;
+       ROLDQ(kll, klr, krl, krr, w0, w1, 15);
        /* k5,k6 */
-       SUBL(6) = kll; SUBR(6) = klr;
-       SUBL(7) = krl; SUBR(7) = krr;
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15);
+       subL[6] = kll; subR[6] = klr;
+       subL[7] = krl; subR[7] = krr;
+       ROLDQ(kll, klr, krl, krr, w0, w1, 15);
        /* kl1, kl2 */
-       SUBL(8) = kll; SUBR(8) = klr;
-       SUBL(9) = krl; SUBR(9) = krr;
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15);
+       subL[8] = kll; subR[8] = klr;
+       subL[9] = krl; subR[9] = krr;
+       ROLDQ(kll, klr, krl, krr, w0, w1, 15);
        /* k9 */
-       SUBL(12) = kll; SUBR(12) = klr;
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15);
+       subL[12] = kll; subR[12] = klr;
+       ROLDQ(kll, klr, krl, krr, w0, w1, 15);
        /* k11, k12 */
-       SUBL(14) = kll; SUBR(14) = klr;
-       SUBL(15) = krl; SUBR(15) = krr;
-       CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 34);
+       subL[14] = kll; subR[14] = klr;
+       subL[15] = krl; subR[15] = krr;
+       ROLDQo32(kll, klr, krl, krr, w0, w1, 34);
        /* k15, k16 */
-       SUBL(20) = kll; SUBR(20) = klr;
-       SUBL(21) = krl; SUBR(21) = krr;
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17);
+       subL[20] = kll; subR[20] = klr;
+       subL[21] = krl; subR[21] = krr;
+       ROLDQ(kll, klr, krl, krr, w0, w1, 17);
        /* kw3, kw4 */
-       SUBL(24) = kll; SUBR(24) = klr;
-       SUBL(25) = krl; SUBR(25) = krr;
+       subL[24] = kll; subR[24] = klr;
+       subL[25] = krl; subR[25] = krr;
 
-
-       /* absorb kw2 to other subkeys */
-       /* round 2 */
-       SUBL(3) ^= SUBL(1); SUBR(3) ^= SUBR(1);
-       /* round 4 */
-       SUBL(5) ^= SUBL(1); SUBR(5) ^= SUBR(1);
-       /* round 6 */
-       SUBL(7) ^= SUBL(1); SUBR(7) ^= SUBR(1);
-       SUBL(1) ^= SUBR(1) & ~SUBR(9);
-       dw = SUBL(1) & SUBL(9),
-               SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl2) */
-       /* round 8 */
-       SUBL(11) ^= SUBL(1); SUBR(11) ^= SUBR(1);
-       /* round 10 */
-       SUBL(13) ^= SUBL(1); SUBR(13) ^= SUBR(1);
-       /* round 12 */
-       SUBL(15) ^= SUBL(1); SUBR(15) ^= SUBR(1);
-       SUBL(1) ^= SUBR(1) & ~SUBR(17);
-       dw = SUBL(1) & SUBL(17),
-               SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl4) */
-       /* round 14 */
-       SUBL(19) ^= SUBL(1); SUBR(19) ^= SUBR(1);
-       /* round 16 */
-       SUBL(21) ^= SUBL(1); SUBR(21) ^= SUBR(1);
-       /* round 18 */
-       SUBL(23) ^= SUBL(1); SUBR(23) ^= SUBR(1);
-       /* kw3 */
-       SUBL(24) ^= SUBL(1); SUBR(24) ^= SUBR(1);
-
-       /* absorb kw4 to other subkeys */
-       kw4l = SUBL(25); kw4r = SUBR(25);
-       /* round 17 */
-       SUBL(22) ^= kw4l; SUBR(22) ^= kw4r;
-       /* round 15 */
-       SUBL(20) ^= kw4l; SUBR(20) ^= kw4r;
-       /* round 13 */
-       SUBL(18) ^= kw4l; SUBR(18) ^= kw4r;
-       kw4l ^= kw4r & ~SUBR(16);
-       dw = kw4l & SUBL(16),
-               kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl3) */
-       /* round 11 */
-       SUBL(14) ^= kw4l; SUBR(14) ^= kw4r;
-       /* round 9 */
-       SUBL(12) ^= kw4l; SUBR(12) ^= kw4r;
-       /* round 7 */
-       SUBL(10) ^= kw4l; SUBR(10) ^= kw4r;
-       kw4l ^= kw4r & ~SUBR(8);
-       dw = kw4l & SUBL(8),
-               kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl1) */
-       /* round 5 */
-       SUBL(6) ^= kw4l; SUBR(6) ^= kw4r;
-       /* round 3 */
-       SUBL(4) ^= kw4l; SUBR(4) ^= kw4r;
-       /* round 1 */
-       SUBL(2) ^= kw4l; SUBR(2) ^= kw4r;
-       /* kw1 */
-       SUBL(0) ^= kw4l; SUBR(0) ^= kw4r;
-
-
-       /* key XOR is end of F-function */
-       CAMELLIA_SUBKEY_L(0) = SUBL(0) ^ SUBL(2);/* kw1 */
-       CAMELLIA_SUBKEY_R(0) = SUBR(0) ^ SUBR(2);
-       CAMELLIA_SUBKEY_L(2) = SUBL(3);       /* round 1 */
-       CAMELLIA_SUBKEY_R(2) = SUBR(3);
-       CAMELLIA_SUBKEY_L(3) = SUBL(2) ^ SUBL(4); /* round 2 */
-       CAMELLIA_SUBKEY_R(3) = SUBR(2) ^ SUBR(4);
-       CAMELLIA_SUBKEY_L(4) = SUBL(3) ^ SUBL(5); /* round 3 */
-       CAMELLIA_SUBKEY_R(4) = SUBR(3) ^ SUBR(5);
-       CAMELLIA_SUBKEY_L(5) = SUBL(4) ^ SUBL(6); /* round 4 */
-       CAMELLIA_SUBKEY_R(5) = SUBR(4) ^ SUBR(6);
-       CAMELLIA_SUBKEY_L(6) = SUBL(5) ^ SUBL(7); /* round 5 */
-       CAMELLIA_SUBKEY_R(6) = SUBR(5) ^ SUBR(7);
-       tl = SUBL(10) ^ (SUBR(10) & ~SUBR(8));
-       dw = tl & SUBL(8),  /* FL(kl1) */
-               tr = SUBR(10) ^ CAMELLIA_RL1(dw);
-       CAMELLIA_SUBKEY_L(7) = SUBL(6) ^ tl; /* round 6 */
-       CAMELLIA_SUBKEY_R(7) = SUBR(6) ^ tr;
-       CAMELLIA_SUBKEY_L(8) = SUBL(8);       /* FL(kl1) */
-       CAMELLIA_SUBKEY_R(8) = SUBR(8);
-       CAMELLIA_SUBKEY_L(9) = SUBL(9);       /* FLinv(kl2) */
-       CAMELLIA_SUBKEY_R(9) = SUBR(9);
-       tl = SUBL(7) ^ (SUBR(7) & ~SUBR(9));
-       dw = tl & SUBL(9),  /* FLinv(kl2) */
-               tr = SUBR(7) ^ CAMELLIA_RL1(dw);
-       CAMELLIA_SUBKEY_L(10) = tl ^ SUBL(11); /* round 7 */
-       CAMELLIA_SUBKEY_R(10) = tr ^ SUBR(11);
-       CAMELLIA_SUBKEY_L(11) = SUBL(10) ^ SUBL(12); /* round 8 */
-       CAMELLIA_SUBKEY_R(11) = SUBR(10) ^ SUBR(12);
-       CAMELLIA_SUBKEY_L(12) = SUBL(11) ^ SUBL(13); /* round 9 */
-       CAMELLIA_SUBKEY_R(12) = SUBR(11) ^ SUBR(13);
-       CAMELLIA_SUBKEY_L(13) = SUBL(12) ^ SUBL(14); /* round 10 */
-       CAMELLIA_SUBKEY_R(13) = SUBR(12) ^ SUBR(14);
-       CAMELLIA_SUBKEY_L(14) = SUBL(13) ^ SUBL(15); /* round 11 */
-       CAMELLIA_SUBKEY_R(14) = SUBR(13) ^ SUBR(15);
-       tl = SUBL(18) ^ (SUBR(18) & ~SUBR(16));
-       dw = tl & SUBL(16), /* FL(kl3) */
-               tr = SUBR(18) ^ CAMELLIA_RL1(dw);
-       CAMELLIA_SUBKEY_L(15) = SUBL(14) ^ tl; /* round 12 */
-       CAMELLIA_SUBKEY_R(15) = SUBR(14) ^ tr;
-       CAMELLIA_SUBKEY_L(16) = SUBL(16);     /* FL(kl3) */
-       CAMELLIA_SUBKEY_R(16) = SUBR(16);
-       CAMELLIA_SUBKEY_L(17) = SUBL(17);     /* FLinv(kl4) */
-       CAMELLIA_SUBKEY_R(17) = SUBR(17);
-       tl = SUBL(15) ^ (SUBR(15) & ~SUBR(17));
-       dw = tl & SUBL(17), /* FLinv(kl4) */
-               tr = SUBR(15) ^ CAMELLIA_RL1(dw);
-       CAMELLIA_SUBKEY_L(18) = tl ^ SUBL(19); /* round 13 */
-       CAMELLIA_SUBKEY_R(18) = tr ^ SUBR(19);
-       CAMELLIA_SUBKEY_L(19) = SUBL(18) ^ SUBL(20); /* round 14 */
-       CAMELLIA_SUBKEY_R(19) = SUBR(18) ^ SUBR(20);
-       CAMELLIA_SUBKEY_L(20) = SUBL(19) ^ SUBL(21); /* round 15 */
-       CAMELLIA_SUBKEY_R(20) = SUBR(19) ^ SUBR(21);
-       CAMELLIA_SUBKEY_L(21) = SUBL(20) ^ SUBL(22); /* round 16 */
-       CAMELLIA_SUBKEY_R(21) = SUBR(20) ^ SUBR(22);
-       CAMELLIA_SUBKEY_L(22) = SUBL(21) ^ SUBL(23); /* round 17 */
-       CAMELLIA_SUBKEY_R(22) = SUBR(21) ^ SUBR(23);
-       CAMELLIA_SUBKEY_L(23) = SUBL(22);     /* round 18 */
-       CAMELLIA_SUBKEY_R(23) = SUBR(22);
-       CAMELLIA_SUBKEY_L(24) = SUBL(24) ^ SUBL(23); /* kw3 */
-       CAMELLIA_SUBKEY_R(24) = SUBR(24) ^ SUBR(23);
-
-       /* apply the inverse of the last half of P-function */
-       dw = CAMELLIA_SUBKEY_L(2) ^ CAMELLIA_SUBKEY_R(2),
-               dw = CAMELLIA_RL8(dw);/* round 1 */
-       CAMELLIA_SUBKEY_R(2) = CAMELLIA_SUBKEY_L(2) ^ dw,
-               CAMELLIA_SUBKEY_L(2) = dw;
-       dw = CAMELLIA_SUBKEY_L(3) ^ CAMELLIA_SUBKEY_R(3),
-               dw = CAMELLIA_RL8(dw);/* round 2 */
-       CAMELLIA_SUBKEY_R(3) = CAMELLIA_SUBKEY_L(3) ^ dw,
-               CAMELLIA_SUBKEY_L(3) = dw;
-       dw = CAMELLIA_SUBKEY_L(4) ^ CAMELLIA_SUBKEY_R(4),
-               dw = CAMELLIA_RL8(dw);/* round 3 */
-       CAMELLIA_SUBKEY_R(4) = CAMELLIA_SUBKEY_L(4) ^ dw,
-               CAMELLIA_SUBKEY_L(4) = dw;
-       dw = CAMELLIA_SUBKEY_L(5) ^ CAMELLIA_SUBKEY_R(5),
-               dw = CAMELLIA_RL8(dw);/* round 4 */
-       CAMELLIA_SUBKEY_R(5) = CAMELLIA_SUBKEY_L(5) ^ dw,
-               CAMELLIA_SUBKEY_L(5) = dw;
-       dw = CAMELLIA_SUBKEY_L(6) ^ CAMELLIA_SUBKEY_R(6),
-               dw = CAMELLIA_RL8(dw);/* round 5 */
-       CAMELLIA_SUBKEY_R(6) = CAMELLIA_SUBKEY_L(6) ^ dw,
-               CAMELLIA_SUBKEY_L(6) = dw;
-       dw = CAMELLIA_SUBKEY_L(7) ^ CAMELLIA_SUBKEY_R(7),
-               dw = CAMELLIA_RL8(dw);/* round 6 */
-       CAMELLIA_SUBKEY_R(7) = CAMELLIA_SUBKEY_L(7) ^ dw,
-               CAMELLIA_SUBKEY_L(7) = dw;
-       dw = CAMELLIA_SUBKEY_L(10) ^ CAMELLIA_SUBKEY_R(10),
-               dw = CAMELLIA_RL8(dw);/* round 7 */
-       CAMELLIA_SUBKEY_R(10) = CAMELLIA_SUBKEY_L(10) ^ dw,
-               CAMELLIA_SUBKEY_L(10) = dw;
-       dw = CAMELLIA_SUBKEY_L(11) ^ CAMELLIA_SUBKEY_R(11),
-               dw = CAMELLIA_RL8(dw);/* round 8 */
-       CAMELLIA_SUBKEY_R(11) = CAMELLIA_SUBKEY_L(11) ^ dw,
-               CAMELLIA_SUBKEY_L(11) = dw;
-       dw = CAMELLIA_SUBKEY_L(12) ^ CAMELLIA_SUBKEY_R(12),
-               dw = CAMELLIA_RL8(dw);/* round 9 */
-       CAMELLIA_SUBKEY_R(12) = CAMELLIA_SUBKEY_L(12) ^ dw,
-               CAMELLIA_SUBKEY_L(12) = dw;
-       dw = CAMELLIA_SUBKEY_L(13) ^ CAMELLIA_SUBKEY_R(13),
-               dw = CAMELLIA_RL8(dw);/* round 10 */
-       CAMELLIA_SUBKEY_R(13) = CAMELLIA_SUBKEY_L(13) ^ dw,
-               CAMELLIA_SUBKEY_L(13) = dw;
-       dw = CAMELLIA_SUBKEY_L(14) ^ CAMELLIA_SUBKEY_R(14),
-               dw = CAMELLIA_RL8(dw);/* round 11 */
-       CAMELLIA_SUBKEY_R(14) = CAMELLIA_SUBKEY_L(14) ^ dw,
-               CAMELLIA_SUBKEY_L(14) = dw;
-       dw = CAMELLIA_SUBKEY_L(15) ^ CAMELLIA_SUBKEY_R(15),
-               dw = CAMELLIA_RL8(dw);/* round 12 */
-       CAMELLIA_SUBKEY_R(15) = CAMELLIA_SUBKEY_L(15) ^ dw,
-               CAMELLIA_SUBKEY_L(15) = dw;
-       dw = CAMELLIA_SUBKEY_L(18) ^ CAMELLIA_SUBKEY_R(18),
-               dw = CAMELLIA_RL8(dw);/* round 13 */
-       CAMELLIA_SUBKEY_R(18) = CAMELLIA_SUBKEY_L(18) ^ dw,
-               CAMELLIA_SUBKEY_L(18) = dw;
-       dw = CAMELLIA_SUBKEY_L(19) ^ CAMELLIA_SUBKEY_R(19),
-               dw = CAMELLIA_RL8(dw);/* round 14 */
-       CAMELLIA_SUBKEY_R(19) = CAMELLIA_SUBKEY_L(19) ^ dw,
-               CAMELLIA_SUBKEY_L(19) = dw;
-       dw = CAMELLIA_SUBKEY_L(20) ^ CAMELLIA_SUBKEY_R(20),
-               dw = CAMELLIA_RL8(dw);/* round 15 */
-       CAMELLIA_SUBKEY_R(20) = CAMELLIA_SUBKEY_L(20) ^ dw,
-               CAMELLIA_SUBKEY_L(20) = dw;
-       dw = CAMELLIA_SUBKEY_L(21) ^ CAMELLIA_SUBKEY_R(21),
-               dw = CAMELLIA_RL8(dw);/* round 16 */
-       CAMELLIA_SUBKEY_R(21) = CAMELLIA_SUBKEY_L(21) ^ dw,
-               CAMELLIA_SUBKEY_L(21) = dw;
-       dw = CAMELLIA_SUBKEY_L(22) ^ CAMELLIA_SUBKEY_R(22),
-               dw = CAMELLIA_RL8(dw);/* round 17 */
-       CAMELLIA_SUBKEY_R(22) = CAMELLIA_SUBKEY_L(22) ^ dw,
-               CAMELLIA_SUBKEY_L(22) = dw;
-       dw = CAMELLIA_SUBKEY_L(23) ^ CAMELLIA_SUBKEY_R(23),
-               dw = CAMELLIA_RL8(dw);/* round 18 */
-       CAMELLIA_SUBKEY_R(23) = CAMELLIA_SUBKEY_L(23) ^ dw,
-               CAMELLIA_SUBKEY_L(23) = dw;
-
-       return;
+       camellia_setup_tail(subkey, subL, subR, 24);
 }
 
-
 static void camellia_setup256(const unsigned char *key, u32 *subkey)
 {
-       u32 kll,klr,krl,krr;           /* left half of key */
-       u32 krll,krlr,krrl,krrr;       /* right half of key */
+       u32 kll, klr, krl, krr;        /* left half of key */
+       u32 krll, krlr, krrl, krrr;    /* right half of key */
        u32 il, ir, t0, t1, w0, w1;    /* temporary variables */
-       u32 kw4l, kw4r, dw, tl, tr;
        u32 subL[34];
        u32 subR[34];
 
        /**
         *  key = (kll || klr || krl || krr || krll || krlr || krrl || krrr)
-        *  (|| is concatination)
+        *  (|| is concatenation)
         */
-
-       kll  = GETU32(key     );
-       klr  = GETU32(key +  4);
-       krl  = GETU32(key +  8);
-       krr  = GETU32(key + 12);
-       krll = GETU32(key + 16);
-       krlr = GETU32(key + 20);
-       krrl = GETU32(key + 24);
-       krrr = GETU32(key + 28);
+       GETU32(kll,  key     );
+       GETU32(klr,  key +  4);
+       GETU32(krl,  key +  8);
+       GETU32(krr,  key + 12);
+       GETU32(krll, key + 16);
+       GETU32(krlr, key + 20);
+       GETU32(krrl, key + 24);
+       GETU32(krrr, key + 28);
 
        /* generate KL dependent subkeys */
        /* kw1 */
-       SUBL(0) = kll; SUBR(0) = klr;
+       subL[0] = kll; subR[0] = klr;
        /* kw2 */
-       SUBL(1) = krl; SUBR(1) = krr;
-       CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 45);
+       subL[1] = krl; subR[1] = krr;
+       ROLDQo32(kll, klr, krl, krr, w0, w1, 45);
        /* k9 */
-       SUBL(12) = kll; SUBR(12) = klr;
+       subL[12] = kll; subR[12] = klr;
        /* k10 */
-       SUBL(13) = krl; SUBR(13) = krr;
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15);
+       subL[13] = krl; subR[13] = krr;
+       ROLDQ(kll, klr, krl, krr, w0, w1, 15);
        /* kl3 */
-       SUBL(16) = kll; SUBR(16) = klr;
+       subL[16] = kll; subR[16] = klr;
        /* kl4 */
-       SUBL(17) = krl; SUBR(17) = krr;
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17);
+       subL[17] = krl; subR[17] = krr;
+       ROLDQ(kll, klr, krl, krr, w0, w1, 17);
        /* k17 */
-       SUBL(22) = kll; SUBR(22) = klr;
+       subL[22] = kll; subR[22] = klr;
        /* k18 */
-       SUBL(23) = krl; SUBR(23) = krr;
-       CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 34);
+       subL[23] = krl; subR[23] = krr;
+       ROLDQo32(kll, klr, krl, krr, w0, w1, 34);
        /* k23 */
-       SUBL(30) = kll; SUBR(30) = klr;
+       subL[30] = kll; subR[30] = klr;
        /* k24 */
-       SUBL(31) = krl; SUBR(31) = krr;
+       subL[31] = krl; subR[31] = krr;
 
        /* generate KR dependent subkeys */
-       CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15);
+       ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15);
        /* k3 */
-       SUBL(4) = krll; SUBR(4) = krlr;
+       subL[4] = krll; subR[4] = krlr;
        /* k4 */
-       SUBL(5) = krrl; SUBR(5) = krrr;
-       CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15);
+       subL[5] = krrl; subR[5] = krrr;
+       ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15);
        /* kl1 */
-       SUBL(8) = krll; SUBR(8) = krlr;
+       subL[8] = krll; subR[8] = krlr;
        /* kl2 */
-       SUBL(9) = krrl; SUBR(9) = krrr;
-       CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30);
+       subL[9] = krrl; subR[9] = krrr;
+       ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30);
        /* k13 */
-       SUBL(18) = krll; SUBR(18) = krlr;
+       subL[18] = krll; subR[18] = krlr;
        /* k14 */
-       SUBL(19) = krrl; SUBR(19) = krrr;
-       CAMELLIA_ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34);
+       subL[19] = krrl; subR[19] = krrr;
+       ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34);
        /* k19 */
-       SUBL(26) = krll; SUBR(26) = krlr;
+       subL[26] = krll; subR[26] = krlr;
        /* k20 */
-       SUBL(27) = krrl; SUBR(27) = krrr;
-       CAMELLIA_ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34);
+       subL[27] = krrl; subR[27] = krrr;
+       ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34);
 
        /* generate KA */
-       kll = SUBL(0) ^ krll; klr = SUBR(0) ^ krlr;
-       krl = SUBL(1) ^ krrl; krr = SUBR(1) ^ krrr;
+       kll = subL[0] ^ krll; klr = subR[0] ^ krlr;
+       krl = subL[1] ^ krrl; krr = subR[1] ^ krrr;
        CAMELLIA_F(kll, klr,
                   CAMELLIA_SIGMA1L, CAMELLIA_SIGMA1R,
                   w0, w1, il, ir, t0, t1);
@@ -885,310 +798,50 @@ static void camellia_setup256(const unsigned char *key, u32 *subkey)
        krll ^= w0; krlr ^= w1;
 
        /* generate KA dependent subkeys */
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15);
+       ROLDQ(kll, klr, krl, krr, w0, w1, 15);
        /* k5 */
-       SUBL(6) = kll; SUBR(6) = klr;
+       subL[6] = kll; subR[6] = klr;
        /* k6 */
-       SUBL(7) = krl; SUBR(7) = krr;
-       CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 30);
+       subL[7] = krl; subR[7] = krr;
+       ROLDQ(kll, klr, krl, krr, w0, w1, 30);
        /* k11 */
-       SUBL(14) = kll; SUBR(14) = klr;
+       subL[14] = kll; subR[14] = klr;
        /* k12 */
-       SUBL(15) = krl; SUBR(15) = krr;
+       subL[15] = krl; subR[15] = krr;
        /* rotation left shift 32bit */
        /* kl5 */
-       SUBL(24) = klr; SUBR(24) = krl;
+       subL[24] = klr; subR[24] = krl;
        /* kl6 */
-       SUBL(25) = krr; SUBR(25) = kll;
+       subL[25] = krr; subR[25] = kll;
        /* rotation left shift 49 from k11,k12 -> k21,k22 */
-       CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 49);
+       ROLDQo32(kll, klr, krl, krr, w0, w1, 49);
        /* k21 */
-       SUBL(28) = kll; SUBR(28) = klr;
+       subL[28] = kll; subR[28] = klr;
        /* k22 */
-       SUBL(29) = krl; SUBR(29) = krr;
+       subL[29] = krl; subR[29] = krr;
 
        /* generate KB dependent subkeys */
        /* k1 */
-       SUBL(2) = krll; SUBR(2) = krlr;
+       subL[2] = krll; subR[2] = krlr;
        /* k2 */
-       SUBL(3) = krrl; SUBR(3) = krrr;
-       CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30);
+       subL[3] = krrl; subR[3] = krrr;
+       ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30);
        /* k7 */
-       SUBL(10) = krll; SUBR(10) = krlr;
+       subL[10] = krll; subR[10] = krlr;
        /* k8 */
-       SUBL(11) = krrl; SUBR(11) = krrr;
-       CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30);
+       subL[11] = krrl; subR[11] = krrr;
+       ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30);
        /* k15 */
-       SUBL(20) = krll; SUBR(20) = krlr;
+       subL[20] = krll; subR[20] = krlr;
        /* k16 */
-       SUBL(21) = krrl; SUBR(21) = krrr;
-       CAMELLIA_ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 51);
+       subL[21] = krrl; subR[21] = krrr;
+       ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 51);
        /* kw3 */
-       SUBL(32) = krll; SUBR(32) = krlr;
+       subL[32] = krll; subR[32] = krlr;
        /* kw4 */
-       SUBL(33) = krrl; SUBR(33) = krrr;
-
-       /* absorb kw2 to other subkeys */
-       /* round 2 */
-       SUBL(3) ^= SUBL(1); SUBR(3) ^= SUBR(1);
-       /* round 4 */
-       SUBL(5) ^= SUBL(1); SUBR(5) ^= SUBR(1);
-       /* round 6 */
-       SUBL(7) ^= SUBL(1); SUBR(7) ^= SUBR(1);
-       SUBL(1) ^= SUBR(1) & ~SUBR(9);
-       dw = SUBL(1) & SUBL(9),
-               SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl2) */
-       /* round 8 */
-       SUBL(11) ^= SUBL(1); SUBR(11) ^= SUBR(1);
-       /* round 10 */
-       SUBL(13) ^= SUBL(1); SUBR(13) ^= SUBR(1);
-       /* round 12 */
-       SUBL(15) ^= SUBL(1); SUBR(15) ^= SUBR(1);
-       SUBL(1) ^= SUBR(1) & ~SUBR(17);
-       dw = SUBL(1) & SUBL(17),
-               SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl4) */
-       /* round 14 */
-       SUBL(19) ^= SUBL(1); SUBR(19) ^= SUBR(1);
-       /* round 16 */
-       SUBL(21) ^= SUBL(1); SUBR(21) ^= SUBR(1);
-       /* round 18 */
-       SUBL(23) ^= SUBL(1); SUBR(23) ^= SUBR(1);
-       SUBL(1) ^= SUBR(1) & ~SUBR(25);
-       dw = SUBL(1) & SUBL(25),
-               SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl6) */
-       /* round 20 */
-       SUBL(27) ^= SUBL(1); SUBR(27) ^= SUBR(1);
-       /* round 22 */
-       SUBL(29) ^= SUBL(1); SUBR(29) ^= SUBR(1);
-       /* round 24 */
-       SUBL(31) ^= SUBL(1); SUBR(31) ^= SUBR(1);
-       /* kw3 */
-       SUBL(32) ^= SUBL(1); SUBR(32) ^= SUBR(1);
-
-
-       /* absorb kw4 to other subkeys */
-       kw4l = SUBL(33); kw4r = SUBR(33);
-       /* round 23 */
-       SUBL(30) ^= kw4l; SUBR(30) ^= kw4r;
-       /* round 21 */
-       SUBL(28) ^= kw4l; SUBR(28) ^= kw4r;
-       /* round 19 */
-       SUBL(26) ^= kw4l; SUBR(26) ^= kw4r;
-       kw4l ^= kw4r & ~SUBR(24);
-       dw = kw4l & SUBL(24),
-               kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl5) */
-       /* round 17 */
-       SUBL(22) ^= kw4l; SUBR(22) ^= kw4r;
-       /* round 15 */
-       SUBL(20) ^= kw4l; SUBR(20) ^= kw4r;
-       /* round 13 */
-       SUBL(18) ^= kw4l; SUBR(18) ^= kw4r;
-       kw4l ^= kw4r & ~SUBR(16);
-       dw = kw4l & SUBL(16),
-               kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl3) */
-       /* round 11 */
-       SUBL(14) ^= kw4l; SUBR(14) ^= kw4r;
-       /* round 9 */
-       SUBL(12) ^= kw4l; SUBR(12) ^= kw4r;
-       /* round 7 */
-       SUBL(10) ^= kw4l; SUBR(10) ^= kw4r;
-       kw4l ^= kw4r & ~SUBR(8);
-       dw = kw4l & SUBL(8),
-               kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl1) */
-       /* round 5 */
-       SUBL(6) ^= kw4l; SUBR(6) ^= kw4r;
-       /* round 3 */
-       SUBL(4) ^= kw4l; SUBR(4) ^= kw4r;
-       /* round 1 */
-       SUBL(2) ^= kw4l; SUBR(2) ^= kw4r;
-       /* kw1 */
-       SUBL(0) ^= kw4l; SUBR(0) ^= kw4r;
+       subL[33] = krrl; subR[33] = krrr;
 
-       /* key XOR is end of F-function */
-       CAMELLIA_SUBKEY_L(0) = SUBL(0) ^ SUBL(2);/* kw1 */
-       CAMELLIA_SUBKEY_R(0) = SUBR(0) ^ SUBR(2);
-       CAMELLIA_SUBKEY_L(2) = SUBL(3);       /* round 1 */
-       CAMELLIA_SUBKEY_R(2) = SUBR(3);
-       CAMELLIA_SUBKEY_L(3) = SUBL(2) ^ SUBL(4); /* round 2 */
-       CAMELLIA_SUBKEY_R(3) = SUBR(2) ^ SUBR(4);
-       CAMELLIA_SUBKEY_L(4) = SUBL(3) ^ SUBL(5); /* round 3 */
-       CAMELLIA_SUBKEY_R(4) = SUBR(3) ^ SUBR(5);
-       CAMELLIA_SUBKEY_L(5) = SUBL(4) ^ SUBL(6); /* round 4 */
-       CAMELLIA_SUBKEY_R(5) = SUBR(4) ^ SUBR(6);
-       CAMELLIA_SUBKEY_L(6) = SUBL(5) ^ SUBL(7); /* round 5 */
-       CAMELLIA_SUBKEY_R(6) = SUBR(5) ^ SUBR(7);
-       tl = SUBL(10) ^ (SUBR(10) & ~SUBR(8));
-       dw = tl & SUBL(8),  /* FL(kl1) */
-               tr = SUBR(10) ^ CAMELLIA_RL1(dw);
-       CAMELLIA_SUBKEY_L(7) = SUBL(6) ^ tl; /* round 6 */
-       CAMELLIA_SUBKEY_R(7) = SUBR(6) ^ tr;
-       CAMELLIA_SUBKEY_L(8) = SUBL(8);       /* FL(kl1) */
-       CAMELLIA_SUBKEY_R(8) = SUBR(8);
-       CAMELLIA_SUBKEY_L(9) = SUBL(9);       /* FLinv(kl2) */
-       CAMELLIA_SUBKEY_R(9) = SUBR(9);
-       tl = SUBL(7) ^ (SUBR(7) & ~SUBR(9));
-       dw = tl & SUBL(9),  /* FLinv(kl2) */
-               tr = SUBR(7) ^ CAMELLIA_RL1(dw);
-       CAMELLIA_SUBKEY_L(10) = tl ^ SUBL(11); /* round 7 */
-       CAMELLIA_SUBKEY_R(10) = tr ^ SUBR(11);
-       CAMELLIA_SUBKEY_L(11) = SUBL(10) ^ SUBL(12); /* round 8 */
-       CAMELLIA_SUBKEY_R(11) = SUBR(10) ^ SUBR(12);
-       CAMELLIA_SUBKEY_L(12) = SUBL(11) ^ SUBL(13); /* round 9 */
-       CAMELLIA_SUBKEY_R(12) = SUBR(11) ^ SUBR(13);
-       CAMELLIA_SUBKEY_L(13) = SUBL(12) ^ SUBL(14); /* round 10 */
-       CAMELLIA_SUBKEY_R(13) = SUBR(12) ^ SUBR(14);
-       CAMELLIA_SUBKEY_L(14) = SUBL(13) ^ SUBL(15); /* round 11 */
-       CAMELLIA_SUBKEY_R(14) = SUBR(13) ^ SUBR(15);
-       tl = SUBL(18) ^ (SUBR(18) & ~SUBR(16));
-       dw = tl & SUBL(16), /* FL(kl3) */
-               tr = SUBR(18) ^ CAMELLIA_RL1(dw);
-       CAMELLIA_SUBKEY_L(15) = SUBL(14) ^ tl; /* round 12 */
-       CAMELLIA_SUBKEY_R(15) = SUBR(14) ^ tr;
-       CAMELLIA_SUBKEY_L(16) = SUBL(16);     /* FL(kl3) */
-       CAMELLIA_SUBKEY_R(16) = SUBR(16);
-       CAMELLIA_SUBKEY_L(17) = SUBL(17);     /* FLinv(kl4) */
-       CAMELLIA_SUBKEY_R(17) = SUBR(17);
-       tl = SUBL(15) ^ (SUBR(15) & ~SUBR(17));
-       dw = tl & SUBL(17), /* FLinv(kl4) */
-               tr = SUBR(15) ^ CAMELLIA_RL1(dw);
-       CAMELLIA_SUBKEY_L(18) = tl ^ SUBL(19); /* round 13 */
-       CAMELLIA_SUBKEY_R(18) = tr ^ SUBR(19);
-       CAMELLIA_SUBKEY_L(19) = SUBL(18) ^ SUBL(20); /* round 14 */
-       CAMELLIA_SUBKEY_R(19) = SUBR(18) ^ SUBR(20);
-       CAMELLIA_SUBKEY_L(20) = SUBL(19) ^ SUBL(21); /* round 15 */
-       CAMELLIA_SUBKEY_R(20) = SUBR(19) ^ SUBR(21);
-       CAMELLIA_SUBKEY_L(21) = SUBL(20) ^ SUBL(22); /* round 16 */
-       CAMELLIA_SUBKEY_R(21) = SUBR(20) ^ SUBR(22);
-       CAMELLIA_SUBKEY_L(22) = SUBL(21) ^ SUBL(23); /* round 17 */
-       CAMELLIA_SUBKEY_R(22) = SUBR(21) ^ SUBR(23);
-       tl = SUBL(26) ^ (SUBR(26)
-                        & ~SUBR(24));
-       dw = tl & SUBL(24), /* FL(kl5) */
-               tr = SUBR(26) ^ CAMELLIA_RL1(dw);
-       CAMELLIA_SUBKEY_L(23) = SUBL(22) ^ tl; /* round 18 */
-       CAMELLIA_SUBKEY_R(23) = SUBR(22) ^ tr;
-       CAMELLIA_SUBKEY_L(24) = SUBL(24);     /* FL(kl5) */
-       CAMELLIA_SUBKEY_R(24) = SUBR(24);
-       CAMELLIA_SUBKEY_L(25) = SUBL(25);     /* FLinv(kl6) */
-       CAMELLIA_SUBKEY_R(25) = SUBR(25);
-       tl = SUBL(23) ^ (SUBR(23) &
-                        ~SUBR(25));
-       dw = tl & SUBL(25), /* FLinv(kl6) */
-               tr = SUBR(23) ^ CAMELLIA_RL1(dw);
-       CAMELLIA_SUBKEY_L(26) = tl ^ SUBL(27); /* round 19 */
-       CAMELLIA_SUBKEY_R(26) = tr ^ SUBR(27);
-       CAMELLIA_SUBKEY_L(27) = SUBL(26) ^ SUBL(28); /* round 20 */
-       CAMELLIA_SUBKEY_R(27) = SUBR(26) ^ SUBR(28);
-       CAMELLIA_SUBKEY_L(28) = SUBL(27) ^ SUBL(29); /* round 21 */
-       CAMELLIA_SUBKEY_R(28) = SUBR(27) ^ SUBR(29);
-       CAMELLIA_SUBKEY_L(29) = SUBL(28) ^ SUBL(30); /* round 22 */
-       CAMELLIA_SUBKEY_R(29) = SUBR(28) ^ SUBR(30);
-       CAMELLIA_SUBKEY_L(30) = SUBL(29) ^ SUBL(31); /* round 23 */
-       CAMELLIA_SUBKEY_R(30) = SUBR(29) ^ SUBR(31);
-       CAMELLIA_SUBKEY_L(31) = SUBL(30);     /* round 24 */
-       CAMELLIA_SUBKEY_R(31) = SUBR(30);
-       CAMELLIA_SUBKEY_L(32) = SUBL(32) ^ SUBL(31); /* kw3 */
-       CAMELLIA_SUBKEY_R(32) = SUBR(32) ^ SUBR(31);
-
-       /* apply the inverse of the last half of P-function */
-       dw = CAMELLIA_SUBKEY_L(2) ^ CAMELLIA_SUBKEY_R(2),
-               dw = CAMELLIA_RL8(dw);/* round 1 */
-       CAMELLIA_SUBKEY_R(2) = CAMELLIA_SUBKEY_L(2) ^ dw,
-               CAMELLIA_SUBKEY_L(2) = dw;
-       dw = CAMELLIA_SUBKEY_L(3) ^ CAMELLIA_SUBKEY_R(3),
-               dw = CAMELLIA_RL8(dw);/* round 2 */
-       CAMELLIA_SUBKEY_R(3) = CAMELLIA_SUBKEY_L(3) ^ dw,
-               CAMELLIA_SUBKEY_L(3) = dw;
-       dw = CAMELLIA_SUBKEY_L(4) ^ CAMELLIA_SUBKEY_R(4),
-               dw = CAMELLIA_RL8(dw);/* round 3 */
-       CAMELLIA_SUBKEY_R(4) = CAMELLIA_SUBKEY_L(4) ^ dw,
-               CAMELLIA_SUBKEY_L(4) = dw;
-       dw = CAMELLIA_SUBKEY_L(5) ^ CAMELLIA_SUBKEY_R(5),
-               dw = CAMELLIA_RL8(dw);/* round 4 */
-       CAMELLIA_SUBKEY_R(5) = CAMELLIA_SUBKEY_L(5) ^ dw,
-       CAMELLIA_SUBKEY_L(5) = dw;
-       dw = CAMELLIA_SUBKEY_L(6) ^ CAMELLIA_SUBKEY_R(6),
-               dw = CAMELLIA_RL8(dw);/* round 5 */
-       CAMELLIA_SUBKEY_R(6) = CAMELLIA_SUBKEY_L(6) ^ dw,
-               CAMELLIA_SUBKEY_L(6) = dw;
-       dw = CAMELLIA_SUBKEY_L(7) ^ CAMELLIA_SUBKEY_R(7),
-               dw = CAMELLIA_RL8(dw);/* round 6 */
-       CAMELLIA_SUBKEY_R(7) = CAMELLIA_SUBKEY_L(7) ^ dw,
-               CAMELLIA_SUBKEY_L(7) = dw;
-       dw = CAMELLIA_SUBKEY_L(10) ^ CAMELLIA_SUBKEY_R(10),
-               dw = CAMELLIA_RL8(dw);/* round 7 */
-       CAMELLIA_SUBKEY_R(10) = CAMELLIA_SUBKEY_L(10) ^ dw,
-               CAMELLIA_SUBKEY_L(10) = dw;
-       dw = CAMELLIA_SUBKEY_L(11) ^ CAMELLIA_SUBKEY_R(11),
-           dw = CAMELLIA_RL8(dw);/* round 8 */
-       CAMELLIA_SUBKEY_R(11) = CAMELLIA_SUBKEY_L(11) ^ dw,
-               CAMELLIA_SUBKEY_L(11) = dw;
-       dw = CAMELLIA_SUBKEY_L(12) ^ CAMELLIA_SUBKEY_R(12),
-               dw = CAMELLIA_RL8(dw);/* round 9 */
-       CAMELLIA_SUBKEY_R(12) = CAMELLIA_SUBKEY_L(12) ^ dw,
-               CAMELLIA_SUBKEY_L(12) = dw;
-       dw = CAMELLIA_SUBKEY_L(13) ^ CAMELLIA_SUBKEY_R(13),
-               dw = CAMELLIA_RL8(dw);/* round 10 */
-       CAMELLIA_SUBKEY_R(13) = CAMELLIA_SUBKEY_L(13) ^ dw,
-               CAMELLIA_SUBKEY_L(13) = dw;
-       dw = CAMELLIA_SUBKEY_L(14) ^ CAMELLIA_SUBKEY_R(14),
-               dw = CAMELLIA_RL8(dw);/* round 11 */
-       CAMELLIA_SUBKEY_R(14) = CAMELLIA_SUBKEY_L(14) ^ dw,
-               CAMELLIA_SUBKEY_L(14) = dw;
-       dw = CAMELLIA_SUBKEY_L(15) ^ CAMELLIA_SUBKEY_R(15),
-               dw = CAMELLIA_RL8(dw);/* round 12 */
-       CAMELLIA_SUBKEY_R(15) = CAMELLIA_SUBKEY_L(15) ^ dw,
-               CAMELLIA_SUBKEY_L(15) = dw;
-       dw = CAMELLIA_SUBKEY_L(18) ^ CAMELLIA_SUBKEY_R(18),
-               dw = CAMELLIA_RL8(dw);/* round 13 */
-       CAMELLIA_SUBKEY_R(18) = CAMELLIA_SUBKEY_L(18) ^ dw,
-               CAMELLIA_SUBKEY_L(18) = dw;
-       dw = CAMELLIA_SUBKEY_L(19) ^ CAMELLIA_SUBKEY_R(19),
-               dw = CAMELLIA_RL8(dw);/* round 14 */
-       CAMELLIA_SUBKEY_R(19) = CAMELLIA_SUBKEY_L(19) ^ dw,
-               CAMELLIA_SUBKEY_L(19) = dw;
-       dw = CAMELLIA_SUBKEY_L(20) ^ CAMELLIA_SUBKEY_R(20),
-               dw = CAMELLIA_RL8(dw);/* round 15 */
-       CAMELLIA_SUBKEY_R(20) = CAMELLIA_SUBKEY_L(20) ^ dw,
-               CAMELLIA_SUBKEY_L(20) = dw;
-       dw = CAMELLIA_SUBKEY_L(21) ^ CAMELLIA_SUBKEY_R(21),
-               dw = CAMELLIA_RL8(dw);/* round 16 */
-       CAMELLIA_SUBKEY_R(21) = CAMELLIA_SUBKEY_L(21) ^ dw,
-               CAMELLIA_SUBKEY_L(21) = dw;
-       dw = CAMELLIA_SUBKEY_L(22) ^ CAMELLIA_SUBKEY_R(22),
-               dw = CAMELLIA_RL8(dw);/* round 17 */
-       CAMELLIA_SUBKEY_R(22) = CAMELLIA_SUBKEY_L(22) ^ dw,
-               CAMELLIA_SUBKEY_L(22) = dw;
-       dw = CAMELLIA_SUBKEY_L(23) ^ CAMELLIA_SUBKEY_R(23),
-               dw = CAMELLIA_RL8(dw);/* round 18 */
-       CAMELLIA_SUBKEY_R(23) = CAMELLIA_SUBKEY_L(23) ^ dw,
-               CAMELLIA_SUBKEY_L(23) = dw;
-       dw = CAMELLIA_SUBKEY_L(26) ^ CAMELLIA_SUBKEY_R(26),
-               dw = CAMELLIA_RL8(dw);/* round 19 */
-       CAMELLIA_SUBKEY_R(26) = CAMELLIA_SUBKEY_L(26) ^ dw,
-               CAMELLIA_SUBKEY_L(26) = dw;
-       dw = CAMELLIA_SUBKEY_L(27) ^ CAMELLIA_SUBKEY_R(27),
-               dw = CAMELLIA_RL8(dw);/* round 20 */
-       CAMELLIA_SUBKEY_R(27) = CAMELLIA_SUBKEY_L(27) ^ dw,
-               CAMELLIA_SUBKEY_L(27) = dw;
-       dw = CAMELLIA_SUBKEY_L(28) ^ CAMELLIA_SUBKEY_R(28),
-               dw = CAMELLIA_RL8(dw);/* round 21 */
-       CAMELLIA_SUBKEY_R(28) = CAMELLIA_SUBKEY_L(28) ^ dw,
-               CAMELLIA_SUBKEY_L(28) = dw;
-       dw = CAMELLIA_SUBKEY_L(29) ^ CAMELLIA_SUBKEY_R(29),
-               dw = CAMELLIA_RL8(dw);/* round 22 */
-       CAMELLIA_SUBKEY_R(29) = CAMELLIA_SUBKEY_L(29) ^ dw,
-               CAMELLIA_SUBKEY_L(29) = dw;
-       dw = CAMELLIA_SUBKEY_L(30) ^ CAMELLIA_SUBKEY_R(30),
-               dw = CAMELLIA_RL8(dw);/* round 23 */
-       CAMELLIA_SUBKEY_R(30) = CAMELLIA_SUBKEY_L(30) ^ dw,
-               CAMELLIA_SUBKEY_L(30) = dw;
-       dw = CAMELLIA_SUBKEY_L(31) ^ CAMELLIA_SUBKEY_R(31),
-               dw = CAMELLIA_RL8(dw);/* round 24 */
-       CAMELLIA_SUBKEY_R(31) = CAMELLIA_SUBKEY_L(31) ^ dw,
-               CAMELLIA_SUBKEY_L(31) = dw;
-
-       return;
+       camellia_setup_tail(subkey, subL, subR, 32);
 }
 
 static void camellia_setup192(const unsigned char *key, u32 *subkey)
@@ -1197,482 +850,168 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey)
        u32 krll, krlr, krrl,krrr;
 
        memcpy(kk, key, 24);
-       memcpy((unsigned char *)&krll, key+16,4);
-       memcpy((unsigned char *)&krlr, key+20,4);
+       memcpy((unsigned char *)&krll, key+16, 4);
+       memcpy((unsigned char *)&krlr, key+20, 4);
        krrl = ~krll;
        krrr = ~krlr;
        memcpy(kk+24, (unsigned char *)&krrl, 4);
        memcpy(kk+28, (unsigned char *)&krrr, 4);
        camellia_setup256(kk, subkey);
-       return;
 }
 
 
-/**
- * Stuff related to camellia encryption/decryption
+/*
+ * Encrypt/decrypt
  */
-static void camellia_encrypt128(const u32 *subkey, __be32 *io_text)
-{
-       u32 il,ir,t0,t1;               /* temporary valiables */
-
-       u32 io[4];
-
-       io[0] = be32_to_cpu(io_text[0]);
-       io[1] = be32_to_cpu(io_text[1]);
-       io[2] = be32_to_cpu(io_text[2]);
-       io[3] = be32_to_cpu(io_text[3]);
-
-       /* pre whitening but absorb kw2*/
-       io[0] ^= CAMELLIA_SUBKEY_L(0);
-       io[1] ^= CAMELLIA_SUBKEY_R(0);
-       /* main iteration */
-
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(2),CAMELLIA_SUBKEY_R(2),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(3),CAMELLIA_SUBKEY_R(3),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(4),CAMELLIA_SUBKEY_R(4),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(5),CAMELLIA_SUBKEY_R(5),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(6),CAMELLIA_SUBKEY_R(6),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(7),CAMELLIA_SUBKEY_R(7),
-                        io[0],io[1],il,ir,t0,t1);
-
-       CAMELLIA_FLS(io[0],io[1],io[2],io[3],
-                    CAMELLIA_SUBKEY_L(8),CAMELLIA_SUBKEY_R(8),
-                    CAMELLIA_SUBKEY_L(9),CAMELLIA_SUBKEY_R(9),
-                    t0,t1,il,ir);
-
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(10),CAMELLIA_SUBKEY_R(10),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(11),CAMELLIA_SUBKEY_R(11),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(12),CAMELLIA_SUBKEY_R(12),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(13),CAMELLIA_SUBKEY_R(13),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(14),CAMELLIA_SUBKEY_R(14),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(15),CAMELLIA_SUBKEY_R(15),
-                        io[0],io[1],il,ir,t0,t1);
-
-       CAMELLIA_FLS(io[0],io[1],io[2],io[3],
-                    CAMELLIA_SUBKEY_L(16),CAMELLIA_SUBKEY_R(16),
-                    CAMELLIA_SUBKEY_L(17),CAMELLIA_SUBKEY_R(17),
-                    t0,t1,il,ir);
-
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(18),CAMELLIA_SUBKEY_R(18),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(19),CAMELLIA_SUBKEY_R(19),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(20),CAMELLIA_SUBKEY_R(20),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(21),CAMELLIA_SUBKEY_R(21),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(22),CAMELLIA_SUBKEY_R(22),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(23),CAMELLIA_SUBKEY_R(23),
-                        io[0],io[1],il,ir,t0,t1);
+#define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) \
+    do {                                                               \
+       t0 = kll;                                                       \
+       t2 = krr;                                                       \
+       t0 &= ll;                                                       \
+       t2 |= rr;                                                       \
+       rl ^= t2;                                                       \
+       lr ^= ROL1(t0);                                                 \
+       t3 = krl;                                                       \
+       t1 = klr;                                                       \
+       t3 &= rl;                                                       \
+       t1 |= lr;                                                       \
+       ll ^= t1;                                                       \
+       rr ^= ROL1(t3);                                                 \
+    } while(0)
 
-       /* post whitening but kw4 */
-       io[2] ^= CAMELLIA_SUBKEY_L(24);
-       io[3] ^= CAMELLIA_SUBKEY_R(24);
-
-       t0 = io[0];
-       t1 = io[1];
-       io[0] = io[2];
-       io[1] = io[3];
-       io[2] = t0;
-       io[3] = t1;
-
-       io_text[0] = cpu_to_be32(io[0]);
-       io_text[1] = cpu_to_be32(io[1]);
-       io_text[2] = cpu_to_be32(io[2]);
-       io_text[3] = cpu_to_be32(io[3]);
-
-       return;
-}
+#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir)               \
+    do {                                                               \
+       ir =  camellia_sp1110[(u8)xr];                                  \
+       il =  camellia_sp1110[    (xl >> 24)];                          \
+       ir ^= camellia_sp0222[    (xr >> 24)];                          \
+       il ^= camellia_sp0222[(u8)(xl >> 16)];                          \
+       ir ^= camellia_sp3033[(u8)(xr >> 16)];                          \
+       il ^= camellia_sp3033[(u8)(xl >> 8)];                           \
+       ir ^= camellia_sp4404[(u8)(xr >> 8)];                           \
+       il ^= camellia_sp4404[(u8)xl];                                  \
+       il ^= kl;                                                       \
+       ir ^= il ^ kr;                                                  \
+       yl ^= ir;                                                       \
+       yr ^= ROR8(il) ^ ir;                                            \
+    } while(0)
 
-static void camellia_decrypt128(const u32 *subkey, __be32 *io_text)
+/* max = 24: 128bit encrypt, max = 32: 256bit encrypt */
+static void camellia_do_encrypt(const u32 *subkey, u32 *io, unsigned max)
 {
-       u32 il,ir,t0,t1;               /* temporary valiables */
+       u32 il,ir,t0,t1;               /* temporary variables */
 
-       u32 io[4];
-
-       io[0] = be32_to_cpu(io_text[0]);
-       io[1] = be32_to_cpu(io_text[1]);
-       io[2] = be32_to_cpu(io_text[2]);
-       io[3] = be32_to_cpu(io_text[3]);
-
-       /* pre whitening but absorb kw2*/
-       io[0] ^= CAMELLIA_SUBKEY_L(24);
-       io[1] ^= CAMELLIA_SUBKEY_R(24);
+       /* pre whitening but absorb kw2 */
+       io[0] ^= SUBKEY_L(0);
+       io[1] ^= SUBKEY_R(0);
 
        /* main iteration */
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(23),CAMELLIA_SUBKEY_R(23),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(22),CAMELLIA_SUBKEY_R(22),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(21),CAMELLIA_SUBKEY_R(21),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(20),CAMELLIA_SUBKEY_R(20),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(19),CAMELLIA_SUBKEY_R(19),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(18),CAMELLIA_SUBKEY_R(18),
-                        io[0],io[1],il,ir,t0,t1);
-
-       CAMELLIA_FLS(io[0],io[1],io[2],io[3],
-                    CAMELLIA_SUBKEY_L(17),CAMELLIA_SUBKEY_R(17),
-                    CAMELLIA_SUBKEY_L(16),CAMELLIA_SUBKEY_R(16),
-                    t0,t1,il,ir);
-
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(15),CAMELLIA_SUBKEY_R(15),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(14),CAMELLIA_SUBKEY_R(14),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(13),CAMELLIA_SUBKEY_R(13),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(12),CAMELLIA_SUBKEY_R(12),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(11),CAMELLIA_SUBKEY_R(11),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(10),CAMELLIA_SUBKEY_R(10),
-                        io[0],io[1],il,ir,t0,t1);
-
-       CAMELLIA_FLS(io[0],io[1],io[2],io[3],
-                    CAMELLIA_SUBKEY_L(9),CAMELLIA_SUBKEY_R(9),
-                    CAMELLIA_SUBKEY_L(8),CAMELLIA_SUBKEY_R(8),
-                    t0,t1,il,ir);
-
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(7),CAMELLIA_SUBKEY_R(7),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(6),CAMELLIA_SUBKEY_R(6),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(5),CAMELLIA_SUBKEY_R(5),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(4),CAMELLIA_SUBKEY_R(4),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(3),CAMELLIA_SUBKEY_R(3),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(2),CAMELLIA_SUBKEY_R(2),
-                        io[0],io[1],il,ir,t0,t1);
-
-       /* post whitening but kw4 */
-       io[2] ^= CAMELLIA_SUBKEY_L(0);
-       io[3] ^= CAMELLIA_SUBKEY_R(0);
-
-       t0 = io[0];
-       t1 = io[1];
-       io[0] = io[2];
-       io[1] = io[3];
-       io[2] = t0;
-       io[3] = t1;
-
-       io_text[0] = cpu_to_be32(io[0]);
-       io_text[1] = cpu_to_be32(io[1]);
-       io_text[2] = cpu_to_be32(io[2]);
-       io_text[3] = cpu_to_be32(io[3]);
-
-       return;
-}
-
-
-/**
- * stuff for 192 and 256bit encryption/decryption
- */
-static void camellia_encrypt256(const u32 *subkey, __be32 *io_text)
-{
-       u32 il,ir,t0,t1;           /* temporary valiables */
-
-       u32 io[4];
-
-       io[0] = be32_to_cpu(io_text[0]);
-       io[1] = be32_to_cpu(io_text[1]);
-       io[2] = be32_to_cpu(io_text[2]);
-       io[3] = be32_to_cpu(io_text[3]);
+#define ROUNDS(i) do { \
+       CAMELLIA_ROUNDSM(io[0],io[1], \
+                        SUBKEY_L(i + 2),SUBKEY_R(i + 2), \
+                        io[2],io[3],il,ir); \
+       CAMELLIA_ROUNDSM(io[2],io[3], \
+                        SUBKEY_L(i + 3),SUBKEY_R(i + 3), \
+                        io[0],io[1],il,ir); \
+       CAMELLIA_ROUNDSM(io[0],io[1], \
+                        SUBKEY_L(i + 4),SUBKEY_R(i + 4), \
+                        io[2],io[3],il,ir); \
+       CAMELLIA_ROUNDSM(io[2],io[3], \
+                        SUBKEY_L(i + 5),SUBKEY_R(i + 5), \
+                        io[0],io[1],il,ir); \
+       CAMELLIA_ROUNDSM(io[0],io[1], \
+                        SUBKEY_L(i + 6),SUBKEY_R(i + 6), \
+                        io[2],io[3],il,ir); \
+       CAMELLIA_ROUNDSM(io[2],io[3], \
+                        SUBKEY_L(i + 7),SUBKEY_R(i + 7), \
+                        io[0],io[1],il,ir); \
+} while (0)
+#define FLS(i) do { \
+       CAMELLIA_FLS(io[0],io[1],io[2],io[3], \
+                    SUBKEY_L(i + 0),SUBKEY_R(i + 0), \
+                    SUBKEY_L(i + 1),SUBKEY_R(i + 1), \
+                    t0,t1,il,ir); \
+} while (0)
+
+       ROUNDS(0);
+       FLS(8);
+       ROUNDS(8);
+       FLS(16);
+       ROUNDS(16);
+       if (max == 32) {
+               FLS(24);
+               ROUNDS(24);
+       }
 
-       /* pre whitening but absorb kw2*/
-       io[0] ^= CAMELLIA_SUBKEY_L(0);
-       io[1] ^= CAMELLIA_SUBKEY_R(0);
-
-       /* main iteration */
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(2),CAMELLIA_SUBKEY_R(2),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(3),CAMELLIA_SUBKEY_R(3),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(4),CAMELLIA_SUBKEY_R(4),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(5),CAMELLIA_SUBKEY_R(5),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(6),CAMELLIA_SUBKEY_R(6),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(7),CAMELLIA_SUBKEY_R(7),
-                        io[0],io[1],il,ir,t0,t1);
-
-       CAMELLIA_FLS(io[0],io[1],io[2],io[3],
-                    CAMELLIA_SUBKEY_L(8),CAMELLIA_SUBKEY_R(8),
-                    CAMELLIA_SUBKEY_L(9),CAMELLIA_SUBKEY_R(9),
-                    t0,t1,il,ir);
-
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(10),CAMELLIA_SUBKEY_R(10),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(11),CAMELLIA_SUBKEY_R(11),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(12),CAMELLIA_SUBKEY_R(12),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(13),CAMELLIA_SUBKEY_R(13),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(14),CAMELLIA_SUBKEY_R(14),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(15),CAMELLIA_SUBKEY_R(15),
-                        io[0],io[1],il,ir,t0,t1);
-
-       CAMELLIA_FLS(io[0],io[1],io[2],io[3],
-                    CAMELLIA_SUBKEY_L(16),CAMELLIA_SUBKEY_R(16),
-                    CAMELLIA_SUBKEY_L(17),CAMELLIA_SUBKEY_R(17),
-                    t0,t1,il,ir);
-
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(18),CAMELLIA_SUBKEY_R(18),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(19),CAMELLIA_SUBKEY_R(19),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(20),CAMELLIA_SUBKEY_R(20),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(21),CAMELLIA_SUBKEY_R(21),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(22),CAMELLIA_SUBKEY_R(22),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(23),CAMELLIA_SUBKEY_R(23),
-                        io[0],io[1],il,ir,t0,t1);
-
-       CAMELLIA_FLS(io[0],io[1],io[2],io[3],
-                    CAMELLIA_SUBKEY_L(24),CAMELLIA_SUBKEY_R(24),
-                    CAMELLIA_SUBKEY_L(25),CAMELLIA_SUBKEY_R(25),
-                    t0,t1,il,ir);
-
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(26),CAMELLIA_SUBKEY_R(26),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(27),CAMELLIA_SUBKEY_R(27),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(28),CAMELLIA_SUBKEY_R(28),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(29),CAMELLIA_SUBKEY_R(29),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(30),CAMELLIA_SUBKEY_R(30),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(31),CAMELLIA_SUBKEY_R(31),
-                        io[0],io[1],il,ir,t0,t1);
+#undef ROUNDS
+#undef FLS
 
        /* post whitening but kw4 */
-       io[2] ^= CAMELLIA_SUBKEY_L(32);
-       io[3] ^= CAMELLIA_SUBKEY_R(32);
-
-       t0 = io[0];
-       t1 = io[1];
-       io[0] = io[2];
-       io[1] = io[3];
-       io[2] = t0;
-       io[3] = t1;
-
-       io_text[0] = cpu_to_be32(io[0]);
-       io_text[1] = cpu_to_be32(io[1]);
-       io_text[2] = cpu_to_be32(io[2]);
-       io_text[3] = cpu_to_be32(io[3]);
-
-       return;
+       io[2] ^= SUBKEY_L(max);
+       io[3] ^= SUBKEY_R(max);
+       /* NB: io[0],[1] should be swapped with [2],[3] by caller! */
 }
 
-
-static void camellia_decrypt256(const u32 *subkey, __be32 *io_text)
+static void camellia_do_decrypt(const u32 *subkey, u32 *io, unsigned i)
 {
-       u32 il,ir,t0,t1;           /* temporary valiables */
+       u32 il,ir,t0,t1;               /* temporary variables */
 
-       u32 io[4];
-
-       io[0] = be32_to_cpu(io_text[0]);
-       io[1] = be32_to_cpu(io_text[1]);
-       io[2] = be32_to_cpu(io_text[2]);
-       io[3] = be32_to_cpu(io_text[3]);
-
-       /* pre whitening but absorb kw2*/
-       io[0] ^= CAMELLIA_SUBKEY_L(32);
-       io[1] ^= CAMELLIA_SUBKEY_R(32);
+       /* pre whitening but absorb kw2 */
+       io[0] ^= SUBKEY_L(i);
+       io[1] ^= SUBKEY_R(i);
 
        /* main iteration */
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(31),CAMELLIA_SUBKEY_R(31),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(30),CAMELLIA_SUBKEY_R(30),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(29),CAMELLIA_SUBKEY_R(29),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(28),CAMELLIA_SUBKEY_R(28),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(27),CAMELLIA_SUBKEY_R(27),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(26),CAMELLIA_SUBKEY_R(26),
-                        io[0],io[1],il,ir,t0,t1);
-
-       CAMELLIA_FLS(io[0],io[1],io[2],io[3],
-                    CAMELLIA_SUBKEY_L(25),CAMELLIA_SUBKEY_R(25),
-                    CAMELLIA_SUBKEY_L(24),CAMELLIA_SUBKEY_R(24),
-                    t0,t1,il,ir);
-
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(23),CAMELLIA_SUBKEY_R(23),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(22),CAMELLIA_SUBKEY_R(22),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(21),CAMELLIA_SUBKEY_R(21),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(20),CAMELLIA_SUBKEY_R(20),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(19),CAMELLIA_SUBKEY_R(19),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(18),CAMELLIA_SUBKEY_R(18),
-                        io[0],io[1],il,ir,t0,t1);
-
-       CAMELLIA_FLS(io[0],io[1],io[2],io[3],
-                    CAMELLIA_SUBKEY_L(17),CAMELLIA_SUBKEY_R(17),
-                    CAMELLIA_SUBKEY_L(16),CAMELLIA_SUBKEY_R(16),
-                    t0,t1,il,ir);
-
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(15),CAMELLIA_SUBKEY_R(15),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(14),CAMELLIA_SUBKEY_R(14),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(13),CAMELLIA_SUBKEY_R(13),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(12),CAMELLIA_SUBKEY_R(12),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(11),CAMELLIA_SUBKEY_R(11),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(10),CAMELLIA_SUBKEY_R(10),
-                        io[0],io[1],il,ir,t0,t1);
-
-       CAMELLIA_FLS(io[0],io[1],io[2],io[3],
-                    CAMELLIA_SUBKEY_L(9),CAMELLIA_SUBKEY_R(9),
-                    CAMELLIA_SUBKEY_L(8),CAMELLIA_SUBKEY_R(8),
-                    t0,t1,il,ir);
-
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(7),CAMELLIA_SUBKEY_R(7),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(6),CAMELLIA_SUBKEY_R(6),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(5),CAMELLIA_SUBKEY_R(5),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(4),CAMELLIA_SUBKEY_R(4),
-                        io[0],io[1],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[0],io[1],
-                        CAMELLIA_SUBKEY_L(3),CAMELLIA_SUBKEY_R(3),
-                        io[2],io[3],il,ir,t0,t1);
-       CAMELLIA_ROUNDSM(io[2],io[3],
-                        CAMELLIA_SUBKEY_L(2),CAMELLIA_SUBKEY_R(2),
-                        io[0],io[1],il,ir,t0,t1);
+#define ROUNDS(i) do { \
+       CAMELLIA_ROUNDSM(io[0],io[1], \
+                        SUBKEY_L(i + 7),SUBKEY_R(i + 7), \
+                        io[2],io[3],il,ir); \
+       CAMELLIA_ROUNDSM(io[2],io[3], \
+                        SUBKEY_L(i + 6),SUBKEY_R(i + 6), \
+                        io[0],io[1],il,ir); \
+       CAMELLIA_ROUNDSM(io[0],io[1], \
+                        SUBKEY_L(i + 5),SUBKEY_R(i + 5), \
+                        io[2],io[3],il,ir); \
+       CAMELLIA_ROUNDSM(io[2],io[3], \
+                        SUBKEY_L(i + 4),SUBKEY_R(i + 4), \
+                        io[0],io[1],il,ir); \
+       CAMELLIA_ROUNDSM(io[0],io[1], \
+                        SUBKEY_L(i + 3),SUBKEY_R(i + 3), \
+                        io[2],io[3],il,ir); \
+       CAMELLIA_ROUNDSM(io[2],io[3], \
+                        SUBKEY_L(i + 2),SUBKEY_R(i + 2), \
+                        io[0],io[1],il,ir); \
+} while (0)
+#define FLS(i) do { \
+       CAMELLIA_FLS(io[0],io[1],io[2],io[3], \
+                    SUBKEY_L(i + 1),SUBKEY_R(i + 1), \
+                    SUBKEY_L(i + 0),SUBKEY_R(i + 0), \
+                    t0,t1,il,ir); \
+} while (0)
+
+       if (i == 32) {
+               ROUNDS(24);
+               FLS(24);
+       }
+       ROUNDS(16);
+       FLS(16);
+       ROUNDS(8);
+       FLS(8);
+       ROUNDS(0);
+
+#undef ROUNDS
+#undef FLS
 
        /* post whitening but kw4 */
-       io[2] ^= CAMELLIA_SUBKEY_L(0);
-       io[3] ^= CAMELLIA_SUBKEY_R(0);
-
-       t0 = io[0];
-       t1 = io[1];
-       io[0] = io[2];
-       io[1] = io[3];
-       io[2] = t0;
-       io[3] = t1;
-
-       io_text[0] = cpu_to_be32(io[0]);
-       io_text[1] = cpu_to_be32(io[1]);
-       io_text[2] = cpu_to_be32(io[2]);
-       io_text[3] = cpu_to_be32(io[3]);
-
-       return;
+       io[2] ^= SUBKEY_L(0);
+       io[3] ^= SUBKEY_R(0);
+       /* NB: 0,1 should be swapped with 2,3 by caller! */
 }
 
 
+struct camellia_ctx {
+       int key_length;
+       u32 key_table[CAMELLIA_TABLE_BYTE_LEN / sizeof(u32)];
+};
+
 static int
 camellia_set_key(struct crypto_tfm *tfm, const u8 *in_key,
                 unsigned int key_len)
@@ -1688,7 +1027,7 @@ camellia_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 
        cctx->key_length = key_len;
 
-       switch(key_len) {
+       switch (key_len) {
        case 16:
                camellia_setup128(key, cctx->key_table);
                break;
@@ -1698,68 +1037,59 @@ camellia_set_key(struct crypto_tfm *tfm, const u8 *in_key,
        case 32:
                camellia_setup256(key, cctx->key_table);
                break;
-       default:
-               break;
        }
 
        return 0;
 }
 
-
 static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {
        const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm);
        const __be32 *src = (const __be32 *)in;
        __be32 *dst = (__be32 *)out;
 
-       __be32 tmp[4];
+       u32 tmp[4];
 
-       memcpy(tmp, src, CAMELLIA_BLOCK_SIZE);
+       tmp[0] = be32_to_cpu(src[0]);
+       tmp[1] = be32_to_cpu(src[1]);
+       tmp[2] = be32_to_cpu(src[2]);
+       tmp[3] = be32_to_cpu(src[3]);
 
-       switch (cctx->key_length) {
-       case 16:
-               camellia_encrypt128(cctx->key_table, tmp);
-               break;
-       case 24:
-               /* fall through */
-       case 32:
-               camellia_encrypt256(cctx->key_table, tmp);
-               break;
-       default:
-               break;
-       }
+       camellia_do_encrypt(cctx->key_table, tmp,
+               cctx->key_length == 16 ? 24 : 32 /* for key lengths of 24 and 32 */
+       );
 
-       memcpy(dst, tmp, CAMELLIA_BLOCK_SIZE);
+       /* do_encrypt returns 0,1 swapped with 2,3 */
+       dst[0] = cpu_to_be32(tmp[2]);
+       dst[1] = cpu_to_be32(tmp[3]);
+       dst[2] = cpu_to_be32(tmp[0]);
+       dst[3] = cpu_to_be32(tmp[1]);
 }
 
-
 static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {
        const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm);
        const __be32 *src = (const __be32 *)in;
        __be32 *dst = (__be32 *)out;
 
-       __be32 tmp[4];
+       u32 tmp[4];
 
-       memcpy(tmp, src, CAMELLIA_BLOCK_SIZE);
+       tmp[0] = be32_to_cpu(src[0]);
+       tmp[1] = be32_to_cpu(src[1]);
+       tmp[2] = be32_to_cpu(src[2]);
+       tmp[3] = be32_to_cpu(src[3]);
 
-       switch (cctx->key_length) {
-       case 16:
-               camellia_decrypt128(cctx->key_table, tmp);
-               break;
-       case 24:
-               /* fall through */
-       case 32:
-               camellia_decrypt256(cctx->key_table, tmp);
-               break;
-       default:
-               break;
-       }
+       camellia_do_decrypt(cctx->key_table, tmp,
+               cctx->key_length == 16 ? 24 : 32 /* for key lengths of 24 and 32 */
+       );
 
-       memcpy(dst, tmp, CAMELLIA_BLOCK_SIZE);
+       /* do_decrypt returns 0,1 swapped with 2,3 */
+       dst[0] = cpu_to_be32(tmp[2]);
+       dst[1] = cpu_to_be32(tmp[3]);
+       dst[2] = cpu_to_be32(tmp[0]);
+       dst[3] = cpu_to_be32(tmp[1]);
 }
 
-
 static struct crypto_alg camellia_alg = {
        .cra_name               =       "camellia",
        .cra_driver_name        =       "camellia-generic",
@@ -1786,16 +1116,13 @@ static int __init camellia_init(void)
        return crypto_register_alg(&camellia_alg);
 }
 
-
 static void __exit camellia_fini(void)
 {
        crypto_unregister_alg(&camellia_alg);
 }
 
-
 module_init(camellia_init);
 module_exit(camellia_fini);
 
-
 MODULE_DESCRIPTION("Camellia Cipher Algorithm");
 MODULE_LICENSE("GPL");
index 136ab6d..5fd9420 100644 (file)
@@ -369,7 +369,7 @@ static const u8 Tr[4][8] = {
 };
 
 /* forward octave */
-static inline void W(u32 *key, unsigned int i) {
+static void W(u32 *key, unsigned int i) {
        u32 I;
        key[6] ^= F1(key[7], Tr[i % 4][0], Tm[i][0]);
        key[5] ^= F2(key[6], Tr[i % 4][1], Tm[i][1]);
@@ -428,7 +428,7 @@ static int cast6_setkey(struct crypto_tfm *tfm, const u8 *in_key,
 }
 
 /*forward quad round*/
-static inline void Q (u32 * block, u8 * Kr, u32 * Km) {
+static void Q (u32 * block, u8 * Kr, u32 * Km) {
        u32 I;
        block[2] ^= F1(block[3], Kr[0], Km[0]);
        block[1] ^= F2(block[2], Kr[1], Km[1]);
@@ -437,7 +437,7 @@ static inline void Q (u32 * block, u8 * Kr, u32 * Km) {
 }
 
 /*reverse quad round*/
-static inline void QBAR (u32 * block, u8 * Kr, u32 * Km) {
+static void QBAR (u32 * block, u8 * Kr, u32 * Km) {
        u32 I;
         block[3] ^= F1(block[0], Kr[3], Km[3]);
         block[0] ^= F3(block[1], Kr[2], Km[2]);
index 1f2649e..6affff8 100644 (file)
 #include <linux/err.h>
 #include <linux/init.h>
 #include <linux/kernel.h>
+#include <linux/log2.h>
 #include <linux/module.h>
 #include <linux/scatterlist.h>
 #include <linux/slab.h>
 
 struct crypto_cbc_ctx {
        struct crypto_cipher *child;
-       void (*xor)(u8 *dst, const u8 *src, unsigned int bs);
 };
 
 static int crypto_cbc_setkey(struct crypto_tfm *parent, const u8 *key,
@@ -41,9 +41,7 @@ static int crypto_cbc_setkey(struct crypto_tfm *parent, const u8 *key,
 
 static int crypto_cbc_encrypt_segment(struct blkcipher_desc *desc,
                                      struct blkcipher_walk *walk,
-                                     struct crypto_cipher *tfm,
-                                     void (*xor)(u8 *, const u8 *,
-                                                 unsigned int))
+                                     struct crypto_cipher *tfm)
 {
        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
                crypto_cipher_alg(tfm)->cia_encrypt;
@@ -54,7 +52,7 @@ static int crypto_cbc_encrypt_segment(struct blkcipher_desc *desc,
        u8 *iv = walk->iv;
 
        do {
-               xor(iv, src, bsize);
+               crypto_xor(iv, src, bsize);
                fn(crypto_cipher_tfm(tfm), dst, iv);
                memcpy(iv, dst, bsize);
 
@@ -67,9 +65,7 @@ static int crypto_cbc_encrypt_segment(struct blkcipher_desc *desc,
 
 static int crypto_cbc_encrypt_inplace(struct blkcipher_desc *desc,
                                      struct blkcipher_walk *walk,
-                                     struct crypto_cipher *tfm,
-                                     void (*xor)(u8 *, const u8 *,
-                                                 unsigned int))
+                                     struct crypto_cipher *tfm)
 {
        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
                crypto_cipher_alg(tfm)->cia_encrypt;
@@ -79,7 +75,7 @@ static int crypto_cbc_encrypt_inplace(struct blkcipher_desc *desc,
        u8 *iv = walk->iv;
 
        do {
-               xor(src, iv, bsize);
+               crypto_xor(src, iv, bsize);
                fn(crypto_cipher_tfm(tfm), src, src);
                iv = src;
 
@@ -99,7 +95,6 @@ static int crypto_cbc_encrypt(struct blkcipher_desc *desc,
        struct crypto_blkcipher *tfm = desc->tfm;
        struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
        struct crypto_cipher *child = ctx->child;
-       void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor;
        int err;
 
        blkcipher_walk_init(&walk, dst, src, nbytes);
@@ -107,11 +102,9 @@ static int crypto_cbc_encrypt(struct blkcipher_desc *desc,
 
        while ((nbytes = walk.nbytes)) {
                if (walk.src.virt.addr == walk.dst.virt.addr)
-                       nbytes = crypto_cbc_encrypt_inplace(desc, &walk, child,
-                                                           xor);
+                       nbytes = crypto_cbc_encrypt_inplace(desc, &walk, child);
                else
-                       nbytes = crypto_cbc_encrypt_segment(desc, &walk, child,
-                                                           xor);
+                       nbytes = crypto_cbc_encrypt_segment(desc, &walk, child);
                err = blkcipher_walk_done(desc, &walk, nbytes);
        }
 
@@ -120,9 +113,7 @@ static int crypto_cbc_encrypt(struct blkcipher_desc *desc,
 
 static int crypto_cbc_decrypt_segment(struct blkcipher_desc *desc,
                                      struct blkcipher_walk *walk,
-                                     struct crypto_cipher *tfm,
-                                     void (*xor)(u8 *, const u8 *,
-                                                 unsigned int))
+                                     struct crypto_cipher *tfm)
 {
        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
                crypto_cipher_alg(tfm)->cia_decrypt;
@@ -134,7 +125,7 @@ static int crypto_cbc_decrypt_segment(struct blkcipher_desc *desc,
 
        do {
                fn(crypto_cipher_tfm(tfm), dst, src);
-               xor(dst, iv, bsize);
+               crypto_xor(dst, iv, bsize);
                iv = src;
 
                src += bsize;
@@ -148,34 +139,29 @@ static int crypto_cbc_decrypt_segment(struct blkcipher_desc *desc,
 
 static int crypto_cbc_decrypt_inplace(struct blkcipher_desc *desc,
                                      struct blkcipher_walk *walk,
-                                     struct crypto_cipher *tfm,
-                                     void (*xor)(u8 *, const u8 *,
-                                                 unsigned int))
+                                     struct crypto_cipher *tfm)
 {
        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
                crypto_cipher_alg(tfm)->cia_decrypt;
        int bsize = crypto_cipher_blocksize(tfm);
-       unsigned long alignmask = crypto_cipher_alignmask(tfm);
        unsigned int nbytes = walk->nbytes;
        u8 *src = walk->src.virt.addr;
-       u8 stack[bsize + alignmask];
-       u8 *first_iv = (u8 *)ALIGN((unsigned long)stack, alignmask + 1);
-
-       memcpy(first_iv, walk->iv, bsize);
+       u8 last_iv[bsize];
 
        /* Start of the last block. */
-       src += nbytes - nbytes % bsize - bsize;
-       memcpy(walk->iv, src, bsize);
+       src += nbytes - (nbytes & (bsize - 1)) - bsize;
+       memcpy(last_iv, src, bsize);
 
        for (;;) {
                fn(crypto_cipher_tfm(tfm), src, src);
                if ((nbytes -= bsize) < bsize)
                        break;
-               xor(src, src - bsize, bsize);
+               crypto_xor(src, src - bsize, bsize);
                src -= bsize;
        }
 
-       xor(src, first_iv, bsize);
+       crypto_xor(src, walk->iv, bsize);
+       memcpy(walk->iv, last_iv, bsize);
 
        return nbytes;
 }
@@ -188,7 +174,6 @@ static int crypto_cbc_decrypt(struct blkcipher_desc *desc,
        struct crypto_blkcipher *tfm = desc->tfm;
        struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
        struct crypto_cipher *child = ctx->child;
-       void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor;
        int err;
 
        blkcipher_walk_init(&walk, dst, src, nbytes);
@@ -196,48 +181,15 @@ static int crypto_cbc_decrypt(struct blkcipher_desc *desc,
 
        while ((nbytes = walk.nbytes)) {
                if (walk.src.virt.addr == walk.dst.virt.addr)
-                       nbytes = crypto_cbc_decrypt_inplace(desc, &walk, child,
-                                                           xor);
+                       nbytes = crypto_cbc_decrypt_inplace(desc, &walk, child);
                else
-                       nbytes = crypto_cbc_decrypt_segment(desc, &walk, child,
-                                                           xor);
+                       nbytes = crypto_cbc_decrypt_segment(desc, &walk, child);
                err = blkcipher_walk_done(desc, &walk, nbytes);
        }
 
        return err;
 }
 
-static void xor_byte(u8 *a, const u8 *b, unsigned int bs)
-{
-       do {
-               *a++ ^= *b++;
-       } while (--bs);
-}
-
-static void xor_quad(u8 *dst, const u8 *src, unsigned int bs)
-{
-       u32 *a = (u32 *)dst;
-       u32 *b = (u32 *)src;
-
-       do {
-               *a++ ^= *b++;
-       } while ((bs -= 4));
-}
-
-static void xor_64(u8 *a, const u8 *b, unsigned int bs)
-{
-       ((u32 *)a)[0] ^= ((u32 *)b)[0];
-       ((u32 *)a)[1] ^= ((u32 *)b)[1];
-}
-
-static void xor_128(u8 *a, const u8 *b, unsigned int bs)
-{
-       ((u32 *)a)[0] ^= ((u32 *)b)[0];
-       ((u32 *)a)[1] ^= ((u32 *)b)[1];
-       ((u32 *)a)[2] ^= ((u32 *)b)[2];
-       ((u32 *)a)[3] ^= ((u32 *)b)[3];
-}
-
 static int crypto_cbc_init_tfm(struct crypto_tfm *tfm)
 {
        struct crypto_instance *inst = (void *)tfm->__crt_alg;
@@ -245,22 +197,6 @@ static int crypto_cbc_init_tfm(struct crypto_tfm *tfm)
        struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
        struct crypto_cipher *cipher;
 
-       switch (crypto_tfm_alg_blocksize(tfm)) {
-       case 8:
-               ctx->xor = xor_64;
-               break;
-
-       case 16:
-               ctx->xor = xor_128;
-               break;
-
-       default:
-               if (crypto_tfm_alg_blocksize(tfm) % 4)
-                       ctx->xor = xor_byte;
-               else
-                       ctx->xor = xor_quad;
-       }
-
        cipher = crypto_spawn_cipher(spawn);
        if (IS_ERR(cipher))
                return PTR_ERR(cipher);
@@ -290,6 +226,10 @@ static struct crypto_instance *crypto_cbc_alloc(struct rtattr **tb)
        if (IS_ERR(alg))
                return ERR_PTR(PTR_ERR(alg));
 
+       inst = ERR_PTR(-EINVAL);
+       if (!is_power_of_2(alg->cra_blocksize))
+               goto out_put_alg;
+
        inst = crypto_alloc_instance("cbc", alg);
        if (IS_ERR(inst))
                goto out_put_alg;
@@ -300,8 +240,9 @@ static struct crypto_instance *crypto_cbc_alloc(struct rtattr **tb)
        inst->alg.cra_alignmask = alg->cra_alignmask;
        inst->alg.cra_type = &crypto_blkcipher_type;
 
-       if (!(alg->cra_blocksize % 4))
-               inst->alg.cra_alignmask |= 3;
+       /* We access the data as u32s when xoring. */
+       inst->alg.cra_alignmask |= __alignof__(u32) - 1;
+
        inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
        inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
        inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
diff --git a/crypto/ccm.c b/crypto/ccm.c
new file mode 100644 (file)
index 0000000..7cf7e5a
--- /dev/null
@@ -0,0 +1,889 @@
+/*
+ * CCM: Counter with CBC-MAC
+ *
+ * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <crypto/internal/aead.h>
+#include <crypto/internal/skcipher.h>
+#include <crypto/scatterwalk.h>
+#include <linux/err.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+#include <linux/slab.h>
+
+#include "internal.h"
+
+struct ccm_instance_ctx {
+       struct crypto_skcipher_spawn ctr;
+       struct crypto_spawn cipher;
+};
+
+struct crypto_ccm_ctx {
+       struct crypto_cipher *cipher;
+       struct crypto_ablkcipher *ctr;
+};
+
+struct crypto_rfc4309_ctx {
+       struct crypto_aead *child;
+       u8 nonce[3];
+};
+
+struct crypto_ccm_req_priv_ctx {
+       u8 odata[16];
+       u8 idata[16];
+       u8 auth_tag[16];
+       u32 ilen;
+       u32 flags;
+       struct scatterlist src[2];
+       struct scatterlist dst[2];
+       struct ablkcipher_request abreq;
+};
+
+static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
+       struct aead_request *req)
+{
+       unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
+
+       return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
+}
+
+static int set_msg_len(u8 *block, unsigned int msglen, int csize)
+{
+       __be32 data;
+
+       memset(block, 0, csize);
+       block += csize;
+
+       if (csize >= 4)
+               csize = 4;
+       else if (msglen > (1 << (8 * csize)))
+               return -EOVERFLOW;
+
+       data = cpu_to_be32(msglen);
+       memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
+
+       return 0;
+}
+
+static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
+                            unsigned int keylen)
+{
+       struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
+       struct crypto_ablkcipher *ctr = ctx->ctr;
+       struct crypto_cipher *tfm = ctx->cipher;
+       int err = 0;
+
+       crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
+       crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
+                                   CRYPTO_TFM_REQ_MASK);
+       err = crypto_ablkcipher_setkey(ctr, key, keylen);
+       crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
+                             CRYPTO_TFM_RES_MASK);
+       if (err)
+               goto out;
+
+       crypto_cipher_clear_flags(tfm, CRYPTO_TFM_REQ_MASK);
+       crypto_cipher_set_flags(tfm, crypto_aead_get_flags(aead) &
+                                   CRYPTO_TFM_REQ_MASK);
+       err = crypto_cipher_setkey(tfm, key, keylen);
+       crypto_aead_set_flags(aead, crypto_cipher_get_flags(tfm) &
+                             CRYPTO_TFM_RES_MASK);
+
+out:
+       return err;
+}
+
+static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
+                                 unsigned int authsize)
+{
+       switch (authsize) {
+       case 4:
+       case 6:
+       case 8:
+       case 10:
+       case 12:
+       case 14:
+       case 16:
+               break;
+       default:
+               return -EINVAL;
+       }
+
+       return 0;
+}
+
+static int format_input(u8 *info, struct aead_request *req,
+                       unsigned int cryptlen)
+{
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       unsigned int lp = req->iv[0];
+       unsigned int l = lp + 1;
+       unsigned int m;
+
+       m = crypto_aead_authsize(aead);
+
+       memcpy(info, req->iv, 16);
+
+       /* format control info per RFC 3610 and
+        * NIST Special Publication 800-38C
+        */
+       *info |= (8 * ((m - 2) / 2));
+       if (req->assoclen)
+               *info |= 64;
+
+       return set_msg_len(info + 16 - l, cryptlen, l);
+}
+
+static int format_adata(u8 *adata, unsigned int a)
+{
+       int len = 0;
+
+       /* add control info for associated data
+        * RFC 3610 and NIST Special Publication 800-38C
+        */
+       if (a < 65280) {
+               *(__be16 *)adata = cpu_to_be16(a);
+               len = 2;
+       } else  {
+               *(__be16 *)adata = cpu_to_be16(0xfffe);
+               *(__be32 *)&adata[2] = cpu_to_be32(a);
+               len = 6;
+       }
+
+       return len;
+}
+
+static void compute_mac(struct crypto_cipher *tfm, u8 *data, int n,
+                      struct crypto_ccm_req_priv_ctx *pctx)
+{
+       unsigned int bs = 16;
+       u8 *odata = pctx->odata;
+       u8 *idata = pctx->idata;
+       int datalen, getlen;
+
+       datalen = n;
+
+       /* first time in here, block may be partially filled. */
+       getlen = bs - pctx->ilen;
+       if (datalen >= getlen) {
+               memcpy(idata + pctx->ilen, data, getlen);
+               crypto_xor(odata, idata, bs);
+               crypto_cipher_encrypt_one(tfm, odata, odata);
+               datalen -= getlen;
+               data += getlen;
+               pctx->ilen = 0;
+       }
+
+       /* now encrypt rest of data */
+       while (datalen >= bs) {
+               crypto_xor(odata, data, bs);
+               crypto_cipher_encrypt_one(tfm, odata, odata);
+
+               datalen -= bs;
+               data += bs;
+       }
+
+       /* check and see if there's leftover data that wasn't
+        * enough to fill a block.
+        */
+       if (datalen) {
+               memcpy(idata + pctx->ilen, data, datalen);
+               pctx->ilen += datalen;
+       }
+}
+
+static void get_data_to_compute(struct crypto_cipher *tfm,
+                              struct crypto_ccm_req_priv_ctx *pctx,
+                              struct scatterlist *sg, unsigned int len)
+{
+       struct scatter_walk walk;
+       u8 *data_src;
+       int n;
+
+       scatterwalk_start(&walk, sg);
+
+       while (len) {
+               n = scatterwalk_clamp(&walk, len);
+               if (!n) {
+                       scatterwalk_start(&walk, sg_next(walk.sg));
+                       n = scatterwalk_clamp(&walk, len);
+               }
+               data_src = scatterwalk_map(&walk, 0);
+
+               compute_mac(tfm, data_src, n, pctx);
+               len -= n;
+
+               scatterwalk_unmap(data_src, 0);
+               scatterwalk_advance(&walk, n);
+               scatterwalk_done(&walk, 0, len);
+               if (len)
+                       crypto_yield(pctx->flags);
+       }
+
+       /* any leftover needs padding and then encrypted */
+       if (pctx->ilen) {
+               int padlen;
+               u8 *odata = pctx->odata;
+               u8 *idata = pctx->idata;
+
+               padlen = 16 - pctx->ilen;
+               memset(idata + pctx->ilen, 0, padlen);
+               crypto_xor(odata, idata, 16);
+               crypto_cipher_encrypt_one(tfm, odata, odata);
+               pctx->ilen = 0;
+       }
+}
+
+static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
+                          unsigned int cryptlen)
+{
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
+       struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
+       struct crypto_cipher *cipher = ctx->cipher;
+       unsigned int assoclen = req->assoclen;
+       u8 *odata = pctx->odata;
+       u8 *idata = pctx->idata;
+       int err;
+
+       /* format control data for input */
+       err = format_input(odata, req, cryptlen);
+       if (err)
+               goto out;
+
+       /* encrypt first block to use as start in computing mac  */
+       crypto_cipher_encrypt_one(cipher, odata, odata);
+
+       /* format associated data and compute into mac */
+       if (assoclen) {
+               pctx->ilen = format_adata(idata, assoclen);
+               get_data_to_compute(cipher, pctx, req->assoc, req->assoclen);
+       }
+
+       /* compute plaintext into mac */
+       get_data_to_compute(cipher, pctx, plain, cryptlen);
+
+out:
+       return err;
+}
+
+static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
+{
+       struct aead_request *req = areq->data;
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
+       u8 *odata = pctx->odata;
+
+       if (!err)
+               scatterwalk_map_and_copy(odata, req->dst, req->cryptlen,
+                                        crypto_aead_authsize(aead), 1);
+       aead_request_complete(req, err);
+}
+
+static inline int crypto_ccm_check_iv(const u8 *iv)
+{
+       /* 2 <= L <= 8, so 1 <= L' <= 7. */
+       if (1 > iv[0] || iv[0] > 7)
+               return -EINVAL;
+
+       return 0;
+}
+
+static int crypto_ccm_encrypt(struct aead_request *req)
+{
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
+       struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
+       struct ablkcipher_request *abreq = &pctx->abreq;
+       struct scatterlist *dst;
+       unsigned int cryptlen = req->cryptlen;
+       u8 *odata = pctx->odata;
+       u8 *iv = req->iv;
+       int err;
+
+       err = crypto_ccm_check_iv(iv);
+       if (err)
+               return err;
+
+       pctx->flags = aead_request_flags(req);
+
+       err = crypto_ccm_auth(req, req->src, cryptlen);
+       if (err)
+               return err;
+
+        /* Note: rfc 3610 and NIST 800-38C require counter of
+        * zero to encrypt auth tag.
+        */
+       memset(iv + 15 - iv[0], 0, iv[0] + 1);
+
+       sg_init_table(pctx->src, 2);
+       sg_set_buf(pctx->src, odata, 16);
+       scatterwalk_sg_chain(pctx->src, 2, req->src);
+
+       dst = pctx->src;
+       if (req->src != req->dst) {
+               sg_init_table(pctx->dst, 2);
+               sg_set_buf(pctx->dst, odata, 16);
+               scatterwalk_sg_chain(pctx->dst, 2, req->dst);
+               dst = pctx->dst;
+       }
+
+       ablkcipher_request_set_tfm(abreq, ctx->ctr);
+       ablkcipher_request_set_callback(abreq, pctx->flags,
+                                       crypto_ccm_encrypt_done, req);
+       ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv);
+       err = crypto_ablkcipher_encrypt(abreq);
+       if (err)
+               return err;
+
+       /* copy authtag to end of dst */
+       scatterwalk_map_and_copy(odata, req->dst, cryptlen,
+                                crypto_aead_authsize(aead), 1);
+       return err;
+}
+
+static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
+                                  int err)
+{
+       struct aead_request *req = areq->data;
+       struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       unsigned int authsize = crypto_aead_authsize(aead);
+       unsigned int cryptlen = req->cryptlen - authsize;
+
+       if (!err) {
+               err = crypto_ccm_auth(req, req->dst, cryptlen);
+               if (!err && memcmp(pctx->auth_tag, pctx->odata, authsize))
+                       err = -EBADMSG;
+       }
+       aead_request_complete(req, err);
+}
+
+static int crypto_ccm_decrypt(struct aead_request *req)
+{
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
+       struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
+       struct ablkcipher_request *abreq = &pctx->abreq;
+       struct scatterlist *dst;
+       unsigned int authsize = crypto_aead_authsize(aead);
+       unsigned int cryptlen = req->cryptlen;
+       u8 *authtag = pctx->auth_tag;
+       u8 *odata = pctx->odata;
+       u8 *iv = req->iv;
+       int err;
+
+       if (cryptlen < authsize)
+               return -EINVAL;
+       cryptlen -= authsize;
+
+       err = crypto_ccm_check_iv(iv);
+       if (err)
+               return err;
+
+       pctx->flags = aead_request_flags(req);
+
+       scatterwalk_map_and_copy(authtag, req->src, cryptlen, authsize, 0);
+
+       memset(iv + 15 - iv[0], 0, iv[0] + 1);
+
+       sg_init_table(pctx->src, 2);
+       sg_set_buf(pctx->src, authtag, 16);
+       scatterwalk_sg_chain(pctx->src, 2, req->src);
+
+       dst = pctx->src;
+       if (req->src != req->dst) {
+               sg_init_table(pctx->dst, 2);
+               sg_set_buf(pctx->dst, authtag, 16);
+               scatterwalk_sg_chain(pctx->dst, 2, req->dst);
+               dst = pctx->dst;
+       }
+
+       ablkcipher_request_set_tfm(abreq, ctx->ctr);
+       ablkcipher_request_set_callback(abreq, pctx->flags,
+                                       crypto_ccm_decrypt_done, req);
+       ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv);
+       err = crypto_ablkcipher_decrypt(abreq);
+       if (err)
+               return err;
+
+       err = crypto_ccm_auth(req, req->dst, cryptlen);
+       if (err)
+               return err;
+
+       /* verify */
+       if (memcmp(authtag, odata, authsize))
+               return -EBADMSG;
+
+       return err;
+}
+
+static int crypto_ccm_init_tfm(struct crypto_tfm *tfm)
+{
+       struct crypto_instance *inst = (void *)tfm->__crt_alg;
+       struct ccm_instance_ctx *ictx = crypto_instance_ctx(inst);
+       struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm);
+       struct crypto_cipher *cipher;
+       struct crypto_ablkcipher *ctr;
+       unsigned long align;
+       int err;
+
+       cipher = crypto_spawn_cipher(&ictx->cipher);
+       if (IS_ERR(cipher))
+               return PTR_ERR(cipher);
+
+       ctr = crypto_spawn_skcipher(&ictx->ctr);
+       err = PTR_ERR(ctr);
+       if (IS_ERR(ctr))
+               goto err_free_cipher;
+
+       ctx->cipher = cipher;
+       ctx->ctr = ctr;
+
+       align = crypto_tfm_alg_alignmask(tfm);
+       align &= ~(crypto_tfm_ctx_alignment() - 1);
+       tfm->crt_aead.reqsize = align +
+                               sizeof(struct crypto_ccm_req_priv_ctx) +
+                               crypto_ablkcipher_reqsize(ctr);
+
+       return 0;
+
+err_free_cipher:
+       crypto_free_cipher(cipher);
+       return err;
+}
+
+static void crypto_ccm_exit_tfm(struct crypto_tfm *tfm)
+{
+       struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       crypto_free_cipher(ctx->cipher);
+       crypto_free_ablkcipher(ctx->ctr);
+}
+
+static struct crypto_instance *crypto_ccm_alloc_common(struct rtattr **tb,
+                                                      const char *full_name,
+                                                      const char *ctr_name,
+                                                      const char *cipher_name)
+{
+       struct crypto_attr_type *algt;
+       struct crypto_instance *inst;
+       struct crypto_alg *ctr;
+       struct crypto_alg *cipher;
+       struct ccm_instance_ctx *ictx;
+       int err;
+
+       algt = crypto_get_attr_type(tb);
+       err = PTR_ERR(algt);
+       if (IS_ERR(algt))
+               return ERR_PTR(err);
+
+       if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
+               return ERR_PTR(-EINVAL);
+
+       cipher = crypto_alg_mod_lookup(cipher_name,  CRYPTO_ALG_TYPE_CIPHER,
+                                      CRYPTO_ALG_TYPE_MASK);
+       err = PTR_ERR(cipher);
+       if (IS_ERR(cipher))
+               return ERR_PTR(err);
+
+       err = -EINVAL;
+       if (cipher->cra_blocksize != 16)
+               goto out_put_cipher;
+
+       inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
+       err = -ENOMEM;
+       if (!inst)
+               goto out_put_cipher;
+
+       ictx = crypto_instance_ctx(inst);
+
+       err = crypto_init_spawn(&ictx->cipher, cipher, inst,
+                               CRYPTO_ALG_TYPE_MASK);
+       if (err)
+               goto err_free_inst;
+
+       crypto_set_skcipher_spawn(&ictx->ctr, inst);
+       err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0,
+                                  crypto_requires_sync(algt->type,
+                                                       algt->mask));
+       if (err)
+               goto err_drop_cipher;
+
+       ctr = crypto_skcipher_spawn_alg(&ictx->ctr);
+
+       /* Not a stream cipher? */
+       err = -EINVAL;
+       if (ctr->cra_blocksize != 1)
+               goto err_drop_ctr;
+
+       /* We want the real thing! */
+       if (ctr->cra_ablkcipher.ivsize != 16)
+               goto err_drop_ctr;
+
+       err = -ENAMETOOLONG;
+       if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+                    "ccm_base(%s,%s)", ctr->cra_driver_name,
+                    cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
+               goto err_drop_ctr;
+
+       memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
+
+       inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
+       inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC;
+       inst->alg.cra_priority = cipher->cra_priority + ctr->cra_priority;
+       inst->alg.cra_blocksize = 1;
+       inst->alg.cra_alignmask = cipher->cra_alignmask | ctr->cra_alignmask |
+                                 (__alignof__(u32) - 1);
+       inst->alg.cra_type = &crypto_aead_type;
+       inst->alg.cra_aead.ivsize = 16;
+       inst->alg.cra_aead.maxauthsize = 16;
+       inst->alg.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
+       inst->alg.cra_init = crypto_ccm_init_tfm;
+       inst->alg.cra_exit = crypto_ccm_exit_tfm;
+       inst->alg.cra_aead.setkey = crypto_ccm_setkey;
+       inst->alg.cra_aead.setauthsize = crypto_ccm_setauthsize;
+       inst->alg.cra_aead.encrypt = crypto_ccm_encrypt;
+       inst->alg.cra_aead.decrypt = crypto_ccm_decrypt;
+
+out:
+       crypto_mod_put(cipher);
+       return inst;
+
+err_drop_ctr:
+       crypto_drop_skcipher(&ictx->ctr);
+err_drop_cipher:
+       crypto_drop_spawn(&ictx->cipher);
+err_free_inst:
+       kfree(inst);
+out_put_cipher:
+       inst = ERR_PTR(err);
+       goto out;
+}
+
+static struct crypto_instance *crypto_ccm_alloc(struct rtattr **tb)
+{
+       int err;
+       const char *cipher_name;
+       char ctr_name[CRYPTO_MAX_ALG_NAME];
+       char full_name[CRYPTO_MAX_ALG_NAME];
+
+       cipher_name = crypto_attr_alg_name(tb[1]);
+       err = PTR_ERR(cipher_name);
+       if (IS_ERR(cipher_name))
+               return ERR_PTR(err);
+
+       if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
+                    cipher_name) >= CRYPTO_MAX_ALG_NAME)
+               return ERR_PTR(-ENAMETOOLONG);
+
+       if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >=
+           CRYPTO_MAX_ALG_NAME)
+               return ERR_PTR(-ENAMETOOLONG);
+
+       return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name);
+}
+
+static void crypto_ccm_free(struct crypto_instance *inst)
+{
+       struct ccm_instance_ctx *ctx = crypto_instance_ctx(inst);
+
+       crypto_drop_spawn(&ctx->cipher);
+       crypto_drop_skcipher(&ctx->ctr);
+       kfree(inst);
+}
+
+static struct crypto_template crypto_ccm_tmpl = {
+       .name = "ccm",
+       .alloc = crypto_ccm_alloc,
+       .free = crypto_ccm_free,
+       .module = THIS_MODULE,
+};
+
+static struct crypto_instance *crypto_ccm_base_alloc(struct rtattr **tb)
+{
+       int err;
+       const char *ctr_name;
+       const char *cipher_name;
+       char full_name[CRYPTO_MAX_ALG_NAME];
+
+       ctr_name = crypto_attr_alg_name(tb[1]);
+       err = PTR_ERR(ctr_name);
+       if (IS_ERR(ctr_name))
+               return ERR_PTR(err);
+
+       cipher_name = crypto_attr_alg_name(tb[2]);
+       err = PTR_ERR(cipher_name);
+       if (IS_ERR(cipher_name))
+               return ERR_PTR(err);
+
+       if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
+                    ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
+               return ERR_PTR(-ENAMETOOLONG);
+
+       return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name);
+}
+
+static struct crypto_template crypto_ccm_base_tmpl = {
+       .name = "ccm_base",
+       .alloc = crypto_ccm_base_alloc,
+       .free = crypto_ccm_free,
+       .module = THIS_MODULE,
+};
+
+static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
+                                unsigned int keylen)
+{
+       struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
+       struct crypto_aead *child = ctx->child;
+       int err;
+
+       if (keylen < 3)
+               return -EINVAL;
+
+       keylen -= 3;
+       memcpy(ctx->nonce, key + keylen, 3);
+
+       crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
+       crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
+                                    CRYPTO_TFM_REQ_MASK);
+       err = crypto_aead_setkey(child, key, keylen);
+       crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
+                                     CRYPTO_TFM_RES_MASK);
+
+       return err;
+}
+
+static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
+                                     unsigned int authsize)
+{
+       struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
+
+       switch (authsize) {
+       case 8:
+       case 12:
+       case 16:
+               break;
+       default:
+               return -EINVAL;
+       }
+
+       return crypto_aead_setauthsize(ctx->child, authsize);
+}
+
+static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
+{
+       struct aead_request *subreq = aead_request_ctx(req);
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
+       struct crypto_aead *child = ctx->child;
+       u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
+                          crypto_aead_alignmask(child) + 1);
+
+       /* L' */
+       iv[0] = 3;
+
+       memcpy(iv + 1, ctx->nonce, 3);
+       memcpy(iv + 4, req->iv, 8);
+
+       aead_request_set_tfm(subreq, child);
+       aead_request_set_callback(subreq, req->base.flags, req->base.complete,
+                                 req->base.data);
+       aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, iv);
+       aead_request_set_assoc(subreq, req->assoc, req->assoclen);
+
+       return subreq;
+}
+
+static int crypto_rfc4309_encrypt(struct aead_request *req)
+{
+       req = crypto_rfc4309_crypt(req);
+
+       return crypto_aead_encrypt(req);
+}
+
+static int crypto_rfc4309_decrypt(struct aead_request *req)
+{
+       req = crypto_rfc4309_crypt(req);
+
+       return crypto_aead_decrypt(req);
+}
+
+static int crypto_rfc4309_init_tfm(struct crypto_tfm *tfm)
+{
+       struct crypto_instance *inst = (void *)tfm->__crt_alg;
+       struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst);
+       struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm);
+       struct crypto_aead *aead;
+       unsigned long align;
+
+       aead = crypto_spawn_aead(spawn);
+       if (IS_ERR(aead))
+               return PTR_ERR(aead);
+
+       ctx->child = aead;
+
+       align = crypto_aead_alignmask(aead);
+       align &= ~(crypto_tfm_ctx_alignment() - 1);
+       tfm->crt_aead.reqsize = sizeof(struct aead_request) +
+                               ALIGN(crypto_aead_reqsize(aead),
+                                     crypto_tfm_ctx_alignment()) +
+                               align + 16;
+
+       return 0;
+}
+
+static void crypto_rfc4309_exit_tfm(struct crypto_tfm *tfm)
+{
+       struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       crypto_free_aead(ctx->child);
+}
+
+static struct crypto_instance *crypto_rfc4309_alloc(struct rtattr **tb)
+{
+       struct crypto_attr_type *algt;
+       struct crypto_instance *inst;
+       struct crypto_aead_spawn *spawn;
+       struct crypto_alg *alg;
+       const char *ccm_name;
+       int err;
+
+       algt = crypto_get_attr_type(tb);
+       err = PTR_ERR(algt);
+       if (IS_ERR(algt))
+               return ERR_PTR(err);
+
+       if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
+               return ERR_PTR(-EINVAL);
+
+       ccm_name = crypto_attr_alg_name(tb[1]);
+       err = PTR_ERR(ccm_name);
+       if (IS_ERR(ccm_name))
+               return ERR_PTR(err);
+
+       inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+       if (!inst)
+               return ERR_PTR(-ENOMEM);
+
+       spawn = crypto_instance_ctx(inst);
+       crypto_set_aead_spawn(spawn, inst);
+       err = crypto_grab_aead(spawn, ccm_name, 0,
+                              crypto_requires_sync(algt->type, algt->mask));
+       if (err)
+               goto out_free_inst;
+
+       alg = crypto_aead_spawn_alg(spawn);
+
+       err = -EINVAL;
+
+       /* We only support 16-byte blocks. */
+       if (alg->cra_aead.ivsize != 16)
+               goto out_drop_alg;
+
+       /* Not a stream cipher? */
+       if (alg->cra_blocksize != 1)
+               goto out_drop_alg;
+
+       err = -ENAMETOOLONG;
+       if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
+                    "rfc4309(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME ||
+           snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+                    "rfc4309(%s)", alg->cra_driver_name) >=
+           CRYPTO_MAX_ALG_NAME)
+               goto out_drop_alg;
+
+       inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
+       inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
+       inst->alg.cra_priority = alg->cra_priority;
+       inst->alg.cra_blocksize = 1;
+       inst->alg.cra_alignmask = alg->cra_alignmask;
+       inst->alg.cra_type = &crypto_nivaead_type;
+
+       inst->alg.cra_aead.ivsize = 8;
+       inst->alg.cra_aead.maxauthsize = 16;
+
+       inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
+
+       inst->alg.cra_init = crypto_rfc4309_init_tfm;
+       inst->alg.cra_exit = crypto_rfc4309_exit_tfm;
+
+       inst->alg.cra_aead.setkey = crypto_rfc4309_setkey;
+       inst->alg.cra_aead.setauthsize = crypto_rfc4309_setauthsize;
+       inst->alg.cra_aead.encrypt = crypto_rfc4309_encrypt;
+       inst->alg.cra_aead.decrypt = crypto_rfc4309_decrypt;
+
+       inst->alg.cra_aead.geniv = "seqiv";
+
+out:
+       return inst;
+
+out_drop_alg:
+       crypto_drop_aead(spawn);
+out_free_inst:
+       kfree(inst);
+       inst = ERR_PTR(err);
+       goto out;
+}
+
+static void crypto_rfc4309_free(struct crypto_instance *inst)
+{
+       crypto_drop_spawn(crypto_instance_ctx(inst));
+       kfree(inst);
+}
+
+static struct crypto_template crypto_rfc4309_tmpl = {
+       .name = "rfc4309",
+       .alloc = crypto_rfc4309_alloc,
+       .free = crypto_rfc4309_free,
+       .module = THIS_MODULE,
+};
+
+static int __init crypto_ccm_module_init(void)
+{
+       int err;
+
+       err = crypto_register_template(&crypto_ccm_base_tmpl);
+       if (err)
+               goto out;
+
+       err = crypto_register_template(&crypto_ccm_tmpl);
+       if (err)
+               goto out_undo_base;
+
+       err = crypto_register_template(&crypto_rfc4309_tmpl);
+       if (err)
+               goto out_undo_ccm;
+
+out:
+       return err;
+
+out_undo_ccm:
+       crypto_unregister_template(&crypto_ccm_tmpl);
+out_undo_base:
+       crypto_unregister_template(&crypto_ccm_base_tmpl);
+       goto out;
+}
+
+static void __exit crypto_ccm_module_exit(void)
+{
+       crypto_unregister_template(&crypto_rfc4309_tmpl);
+       crypto_unregister_template(&crypto_ccm_tmpl);
+       crypto_unregister_template(&crypto_ccm_base_tmpl);
+}
+
+module_init(crypto_ccm_module_init);
+module_exit(crypto_ccm_module_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Counter with CBC MAC");
+MODULE_ALIAS("ccm_base");
+MODULE_ALIAS("rfc4309");
diff --git a/crypto/chainiv.c b/crypto/chainiv.c
new file mode 100644 (file)
index 0000000..d17fa04
--- /dev/null
@@ -0,0 +1,331 @@
+/*
+ * chainiv: Chain IV Generator
+ *
+ * Generate IVs simply be using the last block of the previous encryption.
+ * This is mainly useful for CBC with a synchronous algorithm.
+ *
+ * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <crypto/internal/skcipher.h>
+#include <linux/err.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+#include <linux/random.h>
+#include <linux/spinlock.h>
+#include <linux/string.h>
+#include <linux/workqueue.h>
+
+enum {
+       CHAINIV_STATE_INUSE = 0,
+};
+
+struct chainiv_ctx {
+       spinlock_t lock;
+       char iv[];
+};
+
+struct async_chainiv_ctx {
+       unsigned long state;
+
+       spinlock_t lock;
+       int err;
+
+       struct crypto_queue queue;
+       struct work_struct postponed;
+
+       char iv[];
+};
+
+static int chainiv_givencrypt(struct skcipher_givcrypt_request *req)
+{
+       struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
+       struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
+       struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
+       unsigned int ivsize;
+       int err;
+
+       ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
+       ablkcipher_request_set_callback(subreq, req->creq.base.flags &
+                                               ~CRYPTO_TFM_REQ_MAY_SLEEP,
+                                       req->creq.base.complete,
+                                       req->creq.base.data);
+       ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst,
+                                    req->creq.nbytes, req->creq.info);
+
+       spin_lock_bh(&ctx->lock);
+
+       ivsize = crypto_ablkcipher_ivsize(geniv);
+
+       memcpy(req->giv, ctx->iv, ivsize);
+       memcpy(subreq->info, ctx->iv, ivsize);
+
+       err = crypto_ablkcipher_encrypt(subreq);
+       if (err)
+               goto unlock;
+
+       memcpy(ctx->iv, subreq->info, ivsize);
+
+unlock:
+       spin_unlock_bh(&ctx->lock);
+
+       return err;
+}
+
+static int chainiv_givencrypt_first(struct skcipher_givcrypt_request *req)
+{
+       struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
+       struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
+
+       spin_lock_bh(&ctx->lock);
+       if (crypto_ablkcipher_crt(geniv)->givencrypt !=
+           chainiv_givencrypt_first)
+               goto unlock;
+
+       crypto_ablkcipher_crt(geniv)->givencrypt = chainiv_givencrypt;
+       get_random_bytes(ctx->iv, crypto_ablkcipher_ivsize(geniv));
+
+unlock:
+       spin_unlock_bh(&ctx->lock);
+
+       return chainiv_givencrypt(req);
+}
+
+static int chainiv_init_common(struct crypto_tfm *tfm)
+{
+       tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request);
+
+       return skcipher_geniv_init(tfm);
+}
+
+static int chainiv_init(struct crypto_tfm *tfm)
+{
+       struct chainiv_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       spin_lock_init(&ctx->lock);
+
+       return chainiv_init_common(tfm);
+}
+
+static int async_chainiv_schedule_work(struct async_chainiv_ctx *ctx)
+{
+       int queued;
+
+       if (!ctx->queue.qlen) {
+               smp_mb__before_clear_bit();
+               clear_bit(CHAINIV_STATE_INUSE, &ctx->state);
+
+               if (!ctx->queue.qlen ||
+                   test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state))
+                       goto out;
+       }
+
+       queued = schedule_work(&ctx->postponed);
+       BUG_ON(!queued);
+
+out:
+       return ctx->err;
+}
+
+static int async_chainiv_postpone_request(struct skcipher_givcrypt_request *req)
+{
+       struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
+       struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
+       int err;
+
+       spin_lock_bh(&ctx->lock);
+       err = skcipher_enqueue_givcrypt(&ctx->queue, req);
+       spin_unlock_bh(&ctx->lock);
+
+       if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state))
+               return err;
+
+       ctx->err = err;
+       return async_chainiv_schedule_work(ctx);
+}
+
+static int async_chainiv_givencrypt_tail(struct skcipher_givcrypt_request *req)
+{
+       struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
+       struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
+       struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
+       unsigned int ivsize = crypto_ablkcipher_ivsize(geniv);
+
+       memcpy(req->giv, ctx->iv, ivsize);
+       memcpy(subreq->info, ctx->iv, ivsize);
+
+       ctx->err = crypto_ablkcipher_encrypt(subreq);
+       if (ctx->err)
+               goto out;
+
+       memcpy(ctx->iv, subreq->info, ivsize);
+
+out:
+       return async_chainiv_schedule_work(ctx);
+}
+
+static int async_chainiv_givencrypt(struct skcipher_givcrypt_request *req)
+{
+       struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
+       struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
+       struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
+
+       ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
+       ablkcipher_request_set_callback(subreq, req->creq.base.flags,
+                                       req->creq.base.complete,
+                                       req->creq.base.data);
+       ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst,
+                                    req->creq.nbytes, req->creq.info);
+
+       if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state))
+               goto postpone;
+
+       if (ctx->queue.qlen) {
+               clear_bit(CHAINIV_STATE_INUSE, &ctx->state);
+               goto postpone;
+       }
+
+       return async_chainiv_givencrypt_tail(req);
+
+postpone:
+       return async_chainiv_postpone_request(req);
+}
+
+static int async_chainiv_givencrypt_first(struct skcipher_givcrypt_request *req)
+{
+       struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
+       struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
+
+       if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state))
+               goto out;
+
+       if (crypto_ablkcipher_crt(geniv)->givencrypt !=
+           async_chainiv_givencrypt_first)
+               goto unlock;
+
+       crypto_ablkcipher_crt(geniv)->givencrypt = async_chainiv_givencrypt;
+       get_random_bytes(ctx->iv, crypto_ablkcipher_ivsize(geniv));
+
+unlock:
+       clear_bit(CHAINIV_STATE_INUSE, &ctx->state);
+
+out:
+       return async_chainiv_givencrypt(req);
+}
+
+static void async_chainiv_do_postponed(struct work_struct *work)
+{
+       struct async_chainiv_ctx *ctx = container_of(work,
+                                                    struct async_chainiv_ctx,
+                                                    postponed);
+       struct skcipher_givcrypt_request *req;
+       struct ablkcipher_request *subreq;
+
+       /* Only handle one request at a time to avoid hogging keventd. */
+       spin_lock_bh(&ctx->lock);
+       req = skcipher_dequeue_givcrypt(&ctx->queue);
+       spin_unlock_bh(&ctx->lock);
+
+       if (!req) {
+               async_chainiv_schedule_work(ctx);
+               return;
+       }
+
+       subreq = skcipher_givcrypt_reqctx(req);
+       subreq->base.flags |= CRYPTO_TFM_REQ_MAY_SLEEP;
+
+       async_chainiv_givencrypt_tail(req);
+}
+
+static int async_chainiv_init(struct crypto_tfm *tfm)
+{
+       struct async_chainiv_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       spin_lock_init(&ctx->lock);
+
+       crypto_init_queue(&ctx->queue, 100);
+       INIT_WORK(&ctx->postponed, async_chainiv_do_postponed);
+
+       return chainiv_init_common(tfm);
+}
+
+static void async_chainiv_exit(struct crypto_tfm *tfm)
+{
+       struct async_chainiv_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       BUG_ON(test_bit(CHAINIV_STATE_INUSE, &ctx->state) || ctx->queue.qlen);
+
+       skcipher_geniv_exit(tfm);
+}
+
+static struct crypto_template chainiv_tmpl;
+
+static struct crypto_instance *chainiv_alloc(struct rtattr **tb)
+{
+       struct crypto_attr_type *algt;
+       struct crypto_instance *inst;
+       int err;
+
+       algt = crypto_get_attr_type(tb);
+       err = PTR_ERR(algt);
+       if (IS_ERR(algt))
+               return ERR_PTR(err);
+
+       inst = skcipher_geniv_alloc(&chainiv_tmpl, tb, 0, 0);
+       if (IS_ERR(inst))
+               goto out;
+
+       inst->alg.cra_ablkcipher.givencrypt = chainiv_givencrypt_first;
+
+       inst->alg.cra_init = chainiv_init;
+       inst->alg.cra_exit = skcipher_geniv_exit;
+
+       inst->alg.cra_ctxsize = sizeof(struct chainiv_ctx);
+
+       if (!crypto_requires_sync(algt->type, algt->mask)) {
+               inst->alg.cra_flags |= CRYPTO_ALG_ASYNC;
+
+               inst->alg.cra_ablkcipher.givencrypt =
+                       async_chainiv_givencrypt_first;
+
+               inst->alg.cra_init = async_chainiv_init;
+               inst->alg.cra_exit = async_chainiv_exit;
+
+               inst->alg.cra_ctxsize = sizeof(struct async_chainiv_ctx);
+       }
+
+       inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
+
+out:
+       return inst;
+}
+
+static struct crypto_template chainiv_tmpl = {
+       .name = "chainiv",
+       .alloc = chainiv_alloc,
+       .free = skcipher_geniv_free,
+       .module = THIS_MODULE,
+};
+
+static int __init chainiv_module_init(void)
+{
+       return crypto_register_template(&chainiv_tmpl);
+}
+
+static void __exit chainiv_module_exit(void)
+{
+       crypto_unregister_template(&chainiv_tmpl);
+}
+
+module_init(chainiv_module_init);
+module_exit(chainiv_module_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Chain IV Generator");
index 8bf2da8..074298f 100644 (file)
@@ -228,7 +228,7 @@ static struct crypto_instance *cryptd_alloc_blkcipher(
        struct crypto_alg *alg;
 
        alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER,
-                                 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
+                                 CRYPTO_ALG_TYPE_MASK);
        if (IS_ERR(alg))
                return ERR_PTR(PTR_ERR(alg));
 
@@ -236,13 +236,15 @@ static struct crypto_instance *cryptd_alloc_blkcipher(
        if (IS_ERR(inst))
                goto out_put_alg;
 
-       inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_ASYNC;
+       inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
        inst->alg.cra_type = &crypto_ablkcipher_type;
 
        inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
        inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
        inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
 
+       inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
+
        inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
 
        inst->alg.cra_init = cryptd_blkcipher_init_tfm;
index 29f7747..ff7b3de 100644 (file)
  * (at your option) any later version.
  *
  */
+
+#include <crypto/internal/skcipher.h>
 #include <linux/init.h>
 #include <linux/module.h>
 #include <linux/mm.h>
-#include <linux/crypto.h>
 #include <linux/string.h>
 
 #define NULL_KEY_SIZE          0
 #define NULL_BLOCK_SIZE                1
 #define NULL_DIGEST_SIZE       0
+#define NULL_IV_SIZE           0
 
 static int null_compress(struct crypto_tfm *tfm, const u8 *src,
                         unsigned int slen, u8 *dst, unsigned int *dlen)
@@ -55,6 +57,26 @@ static void null_crypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
        memcpy(dst, src, NULL_BLOCK_SIZE);
 }
 
+static int skcipher_null_crypt(struct blkcipher_desc *desc,
+                              struct scatterlist *dst,
+                              struct scatterlist *src, unsigned int nbytes)
+{
+       struct blkcipher_walk walk;
+       int err;
+
+       blkcipher_walk_init(&walk, dst, src, nbytes);
+       err = blkcipher_walk_virt(desc, &walk);
+
+       while (walk.nbytes) {
+               if (walk.src.virt.addr != walk.dst.virt.addr)
+                       memcpy(walk.dst.virt.addr, walk.src.virt.addr,
+                              walk.nbytes);
+               err = blkcipher_walk_done(desc, &walk, 0);
+       }
+
+       return err;
+}
+
 static struct crypto_alg compress_null = {
        .cra_name               =       "compress_null",
        .cra_flags              =       CRYPTO_ALG_TYPE_COMPRESS,
@@ -76,6 +98,7 @@ static struct crypto_alg digest_null = {
        .cra_list               =       LIST_HEAD_INIT(digest_null.cra_list),   
        .cra_u                  =       { .digest = {
        .dia_digestsize         =       NULL_DIGEST_SIZE,
+       .dia_setkey             =       null_setkey,
        .dia_init               =       null_init,
        .dia_update             =       null_update,
        .dia_final              =       null_final } }
@@ -96,6 +119,25 @@ static struct crypto_alg cipher_null = {
        .cia_decrypt            =       null_crypt } }
 };
 
+static struct crypto_alg skcipher_null = {
+       .cra_name               =       "ecb(cipher_null)",
+       .cra_driver_name        =       "ecb-cipher_null",
+       .cra_priority           =       100,
+       .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER,
+       .cra_blocksize          =       NULL_BLOCK_SIZE,
+       .cra_type               =       &crypto_blkcipher_type,
+       .cra_ctxsize            =       0,
+       .cra_module             =       THIS_MODULE,
+       .cra_list               =       LIST_HEAD_INIT(skcipher_null.cra_list),
+       .cra_u                  =       { .blkcipher = {
+       .min_keysize            =       NULL_KEY_SIZE,
+       .max_keysize            =       NULL_KEY_SIZE,
+       .ivsize                 =       NULL_IV_SIZE,
+       .setkey                 =       null_setkey,
+       .encrypt                =       skcipher_null_crypt,
+       .decrypt                =       skcipher_null_crypt } }
+};
+
 MODULE_ALIAS("compress_null");
 MODULE_ALIAS("digest_null");
 MODULE_ALIAS("cipher_null");
@@ -108,27 +150,35 @@ static int __init init(void)
        if (ret < 0)
                goto out;
 
+       ret = crypto_register_alg(&skcipher_null);
+       if (ret < 0)
+               goto out_unregister_cipher;
+
        ret = crypto_register_alg(&digest_null);
-       if (ret < 0) {
-               crypto_unregister_alg(&cipher_null);
-               goto out;
-       }
+       if (ret < 0)
+               goto out_unregister_skcipher;
 
        ret = crypto_register_alg(&compress_null);
-       if (ret < 0) {
-               crypto_unregister_alg(&digest_null);
-               crypto_unregister_alg(&cipher_null);
-               goto out;
-       }
+       if (ret < 0)
+               goto out_unregister_digest;
 
 out:   
        return ret;
+
+out_unregister_digest:
+       crypto_unregister_alg(&digest_null);
+out_unregister_skcipher:
+       crypto_unregister_alg(&skcipher_null);
+out_unregister_cipher:
+       crypto_unregister_alg(&cipher_null);
+       goto out;
 }
 
 static void __exit fini(void)
 {
        crypto_unregister_alg(&compress_null);
        crypto_unregister_alg(&digest_null);
+       crypto_unregister_alg(&skcipher_null);
        crypto_unregister_alg(&cipher_null);
 }
 
diff --git a/crypto/ctr.c b/crypto/ctr.c
new file mode 100644 (file)
index 0000000..2d7425f
--- /dev/null
@@ -0,0 +1,422 @@
+/*
+ * CTR: Counter mode
+ *
+ * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <crypto/algapi.h>
+#include <crypto/ctr.h>
+#include <linux/err.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+#include <linux/random.h>
+#include <linux/scatterlist.h>
+#include <linux/slab.h>
+
+struct crypto_ctr_ctx {
+       struct crypto_cipher *child;
+};
+
+struct crypto_rfc3686_ctx {
+       struct crypto_blkcipher *child;
+       u8 nonce[CTR_RFC3686_NONCE_SIZE];
+};
+
+static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key,
+                            unsigned int keylen)
+{
+       struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent);
+       struct crypto_cipher *child = ctx->child;
+       int err;
+
+       crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
+       crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
+                               CRYPTO_TFM_REQ_MASK);
+       err = crypto_cipher_setkey(child, key, keylen);
+       crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
+                            CRYPTO_TFM_RES_MASK);
+
+       return err;
+}
+
+static void crypto_ctr_crypt_final(struct blkcipher_walk *walk,
+                                  struct crypto_cipher *tfm)
+{
+       unsigned int bsize = crypto_cipher_blocksize(tfm);
+       unsigned long alignmask = crypto_cipher_alignmask(tfm);
+       u8 *ctrblk = walk->iv;
+       u8 tmp[bsize + alignmask];
+       u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
+       u8 *src = walk->src.virt.addr;
+       u8 *dst = walk->dst.virt.addr;
+       unsigned int nbytes = walk->nbytes;
+
+       crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
+       crypto_xor(keystream, src, nbytes);
+       memcpy(dst, keystream, nbytes);
+
+       crypto_inc(ctrblk, bsize);
+}
+
+static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk,
+                                   struct crypto_cipher *tfm)
+{
+       void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
+                  crypto_cipher_alg(tfm)->cia_encrypt;
+       unsigned int bsize = crypto_cipher_blocksize(tfm);
+       u8 *ctrblk = walk->iv;
+       u8 *src = walk->src.virt.addr;
+       u8 *dst = walk->dst.virt.addr;
+       unsigned int nbytes = walk->nbytes;
+
+       do {
+               /* create keystream */
+               fn(crypto_cipher_tfm(tfm), dst, ctrblk);
+               crypto_xor(dst, src, bsize);
+
+               /* increment counter in counterblock */
+               crypto_inc(ctrblk, bsize);
+
+               src += bsize;
+               dst += bsize;
+       } while ((nbytes -= bsize) >= bsize);
+
+       return nbytes;
+}
+
+static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk,
+                                   struct crypto_cipher *tfm)
+{
+       void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
+                  crypto_cipher_alg(tfm)->cia_encrypt;
+       unsigned int bsize = crypto_cipher_blocksize(tfm);
+       unsigned long alignmask = crypto_cipher_alignmask(tfm);
+       unsigned int nbytes = walk->nbytes;
+       u8 *ctrblk = walk->iv;
+       u8 *src = walk->src.virt.addr;
+       u8 tmp[bsize + alignmask];
+       u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
+
+       do {
+               /* create keystream */
+               fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
+               crypto_xor(src, keystream, bsize);
+
+               /* increment counter in counterblock */
+               crypto_inc(ctrblk, bsize);
+
+               src += bsize;
+       } while ((nbytes -= bsize) >= bsize);
+
+       return nbytes;
+}
+
+static int crypto_ctr_crypt(struct blkcipher_desc *desc,
+                             struct scatterlist *dst, struct scatterlist *src,
+                             unsigned int nbytes)
+{
+       struct blkcipher_walk walk;
+       struct crypto_blkcipher *tfm = desc->tfm;
+       struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm);
+       struct crypto_cipher *child = ctx->child;
+       unsigned int bsize = crypto_cipher_blocksize(child);
+       int err;
+
+       blkcipher_walk_init(&walk, dst, src, nbytes);
+       err = blkcipher_walk_virt_block(desc, &walk, bsize);
+
+       while (walk.nbytes >= bsize) {
+               if (walk.src.virt.addr == walk.dst.virt.addr)
+                       nbytes = crypto_ctr_crypt_inplace(&walk, child);
+               else
+                       nbytes = crypto_ctr_crypt_segment(&walk, child);
+
+               err = blkcipher_walk_done(desc, &walk, nbytes);
+       }
+
+       if (walk.nbytes) {
+               crypto_ctr_crypt_final(&walk, child);
+               err = blkcipher_walk_done(desc, &walk, 0);
+       }
+
+       return err;
+}
+
+static int crypto_ctr_init_tfm(struct crypto_tfm *tfm)
+{
+       struct crypto_instance *inst = (void *)tfm->__crt_alg;
+       struct crypto_spawn *spawn = crypto_instance_ctx(inst);
+       struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
+       struct crypto_cipher *cipher;
+
+       cipher = crypto_spawn_cipher(spawn);
+       if (IS_ERR(cipher))
+               return PTR_ERR(cipher);
+
+       ctx->child = cipher;
+
+       return 0;
+}
+
+static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
+{
+       struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       crypto_free_cipher(ctx->child);
+}
+
+static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
+{
+       struct crypto_instance *inst;
+       struct crypto_alg *alg;
+       int err;
+
+       err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
+       if (err)
+               return ERR_PTR(err);
+
+       alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
+                                 CRYPTO_ALG_TYPE_MASK);
+       if (IS_ERR(alg))
+               return ERR_PTR(PTR_ERR(alg));
+
+       /* Block size must be >= 4 bytes. */
+       err = -EINVAL;
+       if (alg->cra_blocksize < 4)
+               goto out_put_alg;
+
+       /* If this is false we'd fail the alignment of crypto_inc. */
+       if (alg->cra_blocksize % 4)
+               goto out_put_alg;
+
+       inst = crypto_alloc_instance("ctr", alg);
+       if (IS_ERR(inst))
+               goto out;
+
+       inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
+       inst->alg.cra_priority = alg->cra_priority;
+       inst->alg.cra_blocksize = 1;
+       inst->alg.cra_alignmask = alg->cra_alignmask | (__alignof__(u32) - 1);
+       inst->alg.cra_type = &crypto_blkcipher_type;
+
+       inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
+       inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
+       inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
+
+       inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx);
+
+       inst->alg.cra_init = crypto_ctr_init_tfm;
+       inst->alg.cra_exit = crypto_ctr_exit_tfm;
+
+       inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey;
+       inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
+       inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
+
+out:
+       crypto_mod_put(alg);
+       return inst;
+
+out_put_alg:
+       inst = ERR_PTR(err);
+       goto out;
+}
+
+static void crypto_ctr_free(struct crypto_instance *inst)
+{
+       crypto_drop_spawn(crypto_instance_ctx(inst));
+       kfree(inst);
+}
+
+static struct crypto_template crypto_ctr_tmpl = {
+       .name = "ctr",
+       .alloc = crypto_ctr_alloc,
+       .free = crypto_ctr_free,
+       .module = THIS_MODULE,
+};
+
+static int crypto_rfc3686_setkey(struct crypto_tfm *parent, const u8 *key,
+                                unsigned int keylen)
+{
+       struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(parent);
+       struct crypto_blkcipher *child = ctx->child;
+       int err;
+
+       /* the nonce is stored in bytes at end of key */
+       if (keylen < CTR_RFC3686_NONCE_SIZE)
+               return -EINVAL;
+
+       memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
+              CTR_RFC3686_NONCE_SIZE);
+
+       keylen -= CTR_RFC3686_NONCE_SIZE;
+
+       crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
+       crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) &
+                                         CRYPTO_TFM_REQ_MASK);
+       err = crypto_blkcipher_setkey(child, key, keylen);
+       crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) &
+                                    CRYPTO_TFM_RES_MASK);
+
+       return err;
+}
+
+static int crypto_rfc3686_crypt(struct blkcipher_desc *desc,
+                               struct scatterlist *dst,
+                               struct scatterlist *src, unsigned int nbytes)
+{
+       struct crypto_blkcipher *tfm = desc->tfm;
+       struct crypto_rfc3686_ctx *ctx = crypto_blkcipher_ctx(tfm);
+       struct crypto_blkcipher *child = ctx->child;
+       unsigned long alignmask = crypto_blkcipher_alignmask(tfm);
+       u8 ivblk[CTR_RFC3686_BLOCK_SIZE + alignmask];
+       u8 *iv = PTR_ALIGN(ivblk + 0, alignmask + 1);
+       u8 *info = desc->info;
+       int err;
+
+       /* set up counter block */
+       memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
+       memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE);
+
+       /* initialize counter portion of counter block */
+       *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
+               cpu_to_be32(1);
+
+       desc->tfm = child;
+       desc->info = iv;
+       err = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
+       desc->tfm = tfm;
+       desc->info = info;
+
+       return err;
+}
+
+static int crypto_rfc3686_init_tfm(struct crypto_tfm *tfm)
+{
+       struct crypto_instance *inst = (void *)tfm->__crt_alg;
+       struct crypto_spawn *spawn = crypto_instance_ctx(inst);
+       struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
+       struct crypto_blkcipher *cipher;
+
+       cipher = crypto_spawn_blkcipher(spawn);
+       if (IS_ERR(cipher))
+               return PTR_ERR(cipher);
+
+       ctx->child = cipher;
+
+       return 0;
+}
+
+static void crypto_rfc3686_exit_tfm(struct crypto_tfm *tfm)
+{
+       struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       crypto_free_blkcipher(ctx->child);
+}
+
+static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb)
+{
+       struct crypto_instance *inst;
+       struct crypto_alg *alg;
+       int err;
+
+       err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
+       if (err)
+               return ERR_PTR(err);
+
+       alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER,
+                                 CRYPTO_ALG_TYPE_MASK);
+       err = PTR_ERR(alg);
+       if (IS_ERR(alg))
+               return ERR_PTR(err);
+
+       /* We only support 16-byte blocks. */
+       err = -EINVAL;
+       if (alg->cra_blkcipher.ivsize != CTR_RFC3686_BLOCK_SIZE)
+               goto out_put_alg;
+
+       /* Not a stream cipher? */
+       if (alg->cra_blocksize != 1)
+               goto out_put_alg;
+
+       inst = crypto_alloc_instance("rfc3686", alg);
+       if (IS_ERR(inst))
+               goto out;
+
+       inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
+       inst->alg.cra_priority = alg->cra_priority;
+       inst->alg.cra_blocksize = 1;
+       inst->alg.cra_alignmask = alg->cra_alignmask;
+       inst->alg.cra_type = &crypto_blkcipher_type;
+
+       inst->alg.cra_blkcipher.ivsize = CTR_RFC3686_IV_SIZE;
+       inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize
+                                             + CTR_RFC3686_NONCE_SIZE;
+       inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize
+                                             + CTR_RFC3686_NONCE_SIZE;
+
+       inst->alg.cra_blkcipher.geniv = "seqiv";
+
+       inst->alg.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
+
+       inst->alg.cra_init = crypto_rfc3686_init_tfm;
+       inst->alg.cra_exit = crypto_rfc3686_exit_tfm;
+
+       inst->alg.cra_blkcipher.setkey = crypto_rfc3686_setkey;
+       inst->alg.cra_blkcipher.encrypt = crypto_rfc3686_crypt;
+       inst->alg.cra_blkcipher.decrypt = crypto_rfc3686_crypt;
+
+out:
+       crypto_mod_put(alg);
+       return inst;
+
+out_put_alg:
+       inst = ERR_PTR(err);
+       goto out;
+}
+
+static struct crypto_template crypto_rfc3686_tmpl = {
+       .name = "rfc3686",
+       .alloc = crypto_rfc3686_alloc,
+       .free = crypto_ctr_free,
+       .module = THIS_MODULE,
+};
+
+static int __init crypto_ctr_module_init(void)
+{
+       int err;
+
+       err = crypto_register_template(&crypto_ctr_tmpl);
+       if (err)
+               goto out;
+
+       err = crypto_register_template(&crypto_rfc3686_tmpl);
+       if (err)
+               goto out_drop_ctr;
+
+out:
+       return err;
+
+out_drop_ctr:
+       crypto_unregister_template(&crypto_ctr_tmpl);
+       goto out;
+}
+
+static void __exit crypto_ctr_module_exit(void)
+{
+       crypto_unregister_template(&crypto_rfc3686_tmpl);
+       crypto_unregister_template(&crypto_ctr_tmpl);
+}
+
+module_init(crypto_ctr_module_init);
+module_exit(crypto_ctr_module_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("CTR Counter block mode");
+MODULE_ALIAS("rfc3686");
index 59966d1..355ecb7 100644 (file)
 #include <linux/crypto.h>
 #include <linux/types.h>
 
-#define DES_KEY_SIZE           8
-#define DES_EXPKEY_WORDS       32
-#define DES_BLOCK_SIZE         8
-
-#define DES3_EDE_KEY_SIZE      (3 * DES_KEY_SIZE)
-#define DES3_EDE_EXPKEY_WORDS  (3 * DES_EXPKEY_WORDS)
-#define DES3_EDE_BLOCK_SIZE    DES_BLOCK_SIZE
+#include <crypto/des.h>
 
 #define ROL(x, r) ((x) = rol32((x), (r)))
 #define ROR(x, r) ((x) = ror32((x), (r)))
@@ -634,7 +628,7 @@ static const u32 S8[64] = {
  *   Choice 1 has operated on the key.
  *
  */
-static unsigned long ekey(u32 *pe, const u8 *k)
+unsigned long des_ekey(u32 *pe, const u8 *k)
 {
        /* K&R: long is at least 32 bits */
        unsigned long a, b, c, d, w;
@@ -709,6 +703,7 @@ static unsigned long ekey(u32 *pe, const u8 *k)
        /* Zero if weak key */
        return w;
 }
+EXPORT_SYMBOL_GPL(des_ekey);
 
 /*
  * Decryption key expansion
@@ -792,7 +787,7 @@ static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
        int ret;
 
        /* Expand to tmp */
-       ret = ekey(tmp, key);
+       ret = des_ekey(tmp, key);
 
        if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
                *flags |= CRYPTO_TFM_RES_WEAK_KEY;
@@ -879,9 +874,9 @@ static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key,
                return -EINVAL;
        }
 
-       ekey(expkey, key); expkey += DES_EXPKEY_WORDS; key += DES_KEY_SIZE;
+       des_ekey(expkey, key); expkey += DES_EXPKEY_WORDS; key += DES_KEY_SIZE;
        dkey(expkey, key); expkey += DES_EXPKEY_WORDS; key += DES_KEY_SIZE;
-       ekey(expkey, key);
+       des_ekey(expkey, key);
 
        return 0;
 }
index 8871dec..6fd43bd 100644 (file)
@@ -12,6 +12,7 @@
  *
  */
 
+#include <crypto/scatterwalk.h>
 #include <linux/mm.h>
 #include <linux/errno.h>
 #include <linux/hardirq.h>
@@ -20,9 +21,6 @@
 #include <linux/module.h>
 #include <linux/scatterlist.h>
 
-#include "internal.h"
-#include "scatterwalk.h"
-
 static int init(struct hash_desc *desc)
 {
        struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm);
diff --git a/crypto/eseqiv.c b/crypto/eseqiv.c
new file mode 100644 (file)
index 0000000..eb90d27
--- /dev/null
@@ -0,0 +1,264 @@
+/*
+ * eseqiv: Encrypted Sequence Number IV Generator
+ *
+ * This generator generates an IV based on a sequence number by xoring it
+ * with a salt and then encrypting it with the same key as used to encrypt
+ * the plain text.  This algorithm requires that the block size be equal
+ * to the IV size.  It is mainly useful for CBC.
+ *
+ * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <crypto/internal/skcipher.h>
+#include <crypto/scatterwalk.h>
+#include <linux/err.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+#include <linux/mm.h>
+#include <linux/module.h>
+#include <linux/random.h>
+#include <linux/scatterlist.h>
+#include <linux/spinlock.h>
+#include <linux/string.h>
+
+struct eseqiv_request_ctx {
+       struct scatterlist src[2];
+       struct scatterlist dst[2];
+       char tail[];
+};
+
+struct eseqiv_ctx {
+       spinlock_t lock;
+       unsigned int reqoff;
+       char salt[];
+};
+
+static void eseqiv_complete2(struct skcipher_givcrypt_request *req)
+{
+       struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
+       struct eseqiv_request_ctx *reqctx = skcipher_givcrypt_reqctx(req);
+
+       memcpy(req->giv, PTR_ALIGN((u8 *)reqctx->tail,
+                        crypto_ablkcipher_alignmask(geniv) + 1),
+              crypto_ablkcipher_ivsize(geniv));
+}
+
+static void eseqiv_complete(struct crypto_async_request *base, int err)
+{
+       struct skcipher_givcrypt_request *req = base->data;
+
+       if (err)
+               goto out;
+
+       eseqiv_complete2(req);
+
+out:
+       skcipher_givcrypt_complete(req, err);
+}
+
+static void eseqiv_chain(struct scatterlist *head, struct scatterlist *sg,
+                        int chain)
+{
+       if (chain) {
+               head->length += sg->length;
+               sg = scatterwalk_sg_next(sg);
+       }
+
+       if (sg)
+               scatterwalk_sg_chain(head, 2, sg);
+       else
+               sg_mark_end(head);
+}
+
+static int eseqiv_givencrypt(struct skcipher_givcrypt_request *req)
+{
+       struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
+       struct eseqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
+       struct eseqiv_request_ctx *reqctx = skcipher_givcrypt_reqctx(req);
+       struct ablkcipher_request *subreq;
+       crypto_completion_t complete;
+       void *data;
+       struct scatterlist *osrc, *odst;
+       struct scatterlist *dst;
+       struct page *srcp;
+       struct page *dstp;
+       u8 *giv;
+       u8 *vsrc;
+       u8 *vdst;
+       __be64 seq;
+       unsigned int ivsize;
+       unsigned int len;
+       int err;
+
+       subreq = (void *)(reqctx->tail + ctx->reqoff);
+       ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
+
+       giv = req->giv;
+       complete = req->creq.base.complete;
+       data = req->creq.base.data;
+
+       osrc = req->creq.src;
+       odst = req->creq.dst;
+       srcp = sg_page(osrc);
+       dstp = sg_page(odst);
+       vsrc = PageHighMem(srcp) ? NULL : page_address(srcp) + osrc->offset;
+       vdst = PageHighMem(dstp) ? NULL : page_address(dstp) + odst->offset;
+
+       ivsize = crypto_ablkcipher_ivsize(geniv);
+
+       if (vsrc != giv + ivsize && vdst != giv + ivsize) {
+               giv = PTR_ALIGN((u8 *)reqctx->tail,
+                               crypto_ablkcipher_alignmask(geniv) + 1);
+               complete = eseqiv_complete;
+               data = req;
+       }
+
+       ablkcipher_request_set_callback(subreq, req->creq.base.flags, complete,
+                                       data);
+
+       sg_init_table(reqctx->src, 2);
+       sg_set_buf(reqctx->src, giv, ivsize);
+       eseqiv_chain(reqctx->src, osrc, vsrc == giv + ivsize);
+
+       dst = reqctx->src;
+       if (osrc != odst) {
+               sg_init_table(reqctx->dst, 2);
+               sg_set_buf(reqctx->dst, giv, ivsize);
+               eseqiv_chain(reqctx->dst, odst, vdst == giv + ivsize);
+
+               dst = reqctx->dst;
+       }
+
+       ablkcipher_request_set_crypt(subreq, reqctx->src, dst,
+                                    req->creq.nbytes, req->creq.info);
+
+       memcpy(req->creq.info, ctx->salt, ivsize);
+
+       len = ivsize;
+       if (ivsize > sizeof(u64)) {
+               memset(req->giv, 0, ivsize - sizeof(u64));
+               len = sizeof(u64);
+       }
+       seq = cpu_to_be64(req->seq);
+       memcpy(req->giv + ivsize - len, &seq, len);
+
+       err = crypto_ablkcipher_encrypt(subreq);
+       if (err)
+               goto out;
+
+       eseqiv_complete2(req);
+
+out:
+       return err;
+}
+
+static int eseqiv_givencrypt_first(struct skcipher_givcrypt_request *req)
+{
+       struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
+       struct eseqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
+
+       spin_lock_bh(&ctx->lock);
+       if (crypto_ablkcipher_crt(geniv)->givencrypt != eseqiv_givencrypt_first)
+               goto unlock;
+
+       crypto_ablkcipher_crt(geniv)->givencrypt = eseqiv_givencrypt;
+       get_random_bytes(ctx->salt, crypto_ablkcipher_ivsize(geniv));
+
+unlock:
+       spin_unlock_bh(&ctx->lock);
+
+       return eseqiv_givencrypt(req);
+}
+
+static int eseqiv_init(struct crypto_tfm *tfm)
+{
+       struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm);
+       struct eseqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
+       unsigned long alignmask;
+       unsigned int reqsize;
+
+       spin_lock_init(&ctx->lock);
+
+       alignmask = crypto_tfm_ctx_alignment() - 1;
+       reqsize = sizeof(struct eseqiv_request_ctx);
+
+       if (alignmask & reqsize) {
+               alignmask &= reqsize;
+               alignmask--;
+       }
+
+       alignmask = ~alignmask;
+       alignmask &= crypto_ablkcipher_alignmask(geniv);
+
+       reqsize += alignmask;
+       reqsize += crypto_ablkcipher_ivsize(geniv);
+       reqsize = ALIGN(reqsize, crypto_tfm_ctx_alignment());
+
+       ctx->reqoff = reqsize - sizeof(struct eseqiv_request_ctx);
+
+       tfm->crt_ablkcipher.reqsize = reqsize +
+                                     sizeof(struct ablkcipher_request);
+
+       return skcipher_geniv_init(tfm);
+}
+
+static struct crypto_template eseqiv_tmpl;
+
+static struct crypto_instance *eseqiv_alloc(struct rtattr **tb)
+{
+       struct crypto_instance *inst;
+       int err;
+
+       inst = skcipher_geniv_alloc(&eseqiv_tmpl, tb, 0, 0);
+       if (IS_ERR(inst))
+               goto out;
+
+       err = -EINVAL;
+       if (inst->alg.cra_ablkcipher.ivsize != inst->alg.cra_blocksize)
+               goto free_inst;
+
+       inst->alg.cra_ablkcipher.givencrypt = eseqiv_givencrypt_first;
+
+       inst->alg.cra_init = eseqiv_init;
+       inst->alg.cra_exit = skcipher_geniv_exit;
+
+       inst->alg.cra_ctxsize = sizeof(struct eseqiv_ctx);
+       inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
+
+out:
+       return inst;
+
+free_inst:
+       skcipher_geniv_free(inst);
+       inst = ERR_PTR(err);
+       goto out;
+}
+
+static struct crypto_template eseqiv_tmpl = {
+       .name = "eseqiv",
+       .alloc = eseqiv_alloc,
+       .free = skcipher_geniv_free,
+       .module = THIS_MODULE,
+};
+
+static int __init eseqiv_module_init(void)
+{
+       return crypto_register_template(&eseqiv_tmpl);
+}
+
+static void __exit eseqiv_module_exit(void)
+{
+       crypto_unregister_template(&eseqiv_tmpl);
+}
+
+module_init(eseqiv_module_init);
+module_exit(eseqiv_module_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Encrypted Sequence Number IV Generator");
diff --git a/crypto/gcm.c b/crypto/gcm.c
new file mode 100644 (file)
index 0000000..e70afd0
--- /dev/null
@@ -0,0 +1,823 @@
+/*
+ * GCM: Galois/Counter Mode.
+ *
+ * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 as published
+ * by the Free Software Foundation.
+ */
+
+#include <crypto/gf128mul.h>
+#include <crypto/internal/aead.h>
+#include <crypto/internal/skcipher.h>
+#include <crypto/scatterwalk.h>
+#include <linux/completion.h>
+#include <linux/err.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+#include <linux/slab.h>
+
+struct gcm_instance_ctx {
+       struct crypto_skcipher_spawn ctr;
+};
+
+struct crypto_gcm_ctx {
+       struct crypto_ablkcipher *ctr;
+       struct gf128mul_4k *gf128;
+};
+
+struct crypto_rfc4106_ctx {
+       struct crypto_aead *child;
+       u8 nonce[4];
+};
+
+struct crypto_gcm_ghash_ctx {
+       u32 bytes;
+       u32 flags;
+       struct gf128mul_4k *gf128;
+       u8 buffer[16];
+};
+
+struct crypto_gcm_req_priv_ctx {
+       u8 auth_tag[16];
+       u8 iauth_tag[16];
+       struct scatterlist src[2];
+       struct scatterlist dst[2];
+       struct crypto_gcm_ghash_ctx ghash;
+       struct ablkcipher_request abreq;
+};
+
+struct crypto_gcm_setkey_result {
+       int err;
+       struct completion completion;
+};
+
+static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
+       struct aead_request *req)
+{
+       unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
+
+       return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
+}
+
+static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
+                                 struct gf128mul_4k *gf128)
+{
+       ctx->bytes = 0;
+       ctx->flags = flags;
+       ctx->gf128 = gf128;
+       memset(ctx->buffer, 0, 16);
+}
+
+static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
+                                   const u8 *src, unsigned int srclen)
+{
+       u8 *dst = ctx->buffer;
+
+       if (ctx->bytes) {
+               int n = min(srclen, ctx->bytes);
+               u8 *pos = dst + (16 - ctx->bytes);
+
+               ctx->bytes -= n;
+               srclen -= n;
+
+               while (n--)
+                       *pos++ ^= *src++;
+
+               if (!ctx->bytes)
+                       gf128mul_4k_lle((be128 *)dst, ctx->gf128);
+       }
+
+       while (srclen >= 16) {
+               crypto_xor(dst, src, 16);
+               gf128mul_4k_lle((be128 *)dst, ctx->gf128);
+               src += 16;
+               srclen -= 16;
+       }
+
+       if (srclen) {
+               ctx->bytes = 16 - srclen;
+               while (srclen--)
+                       *dst++ ^= *src++;
+       }
+}
+
+static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
+                                      struct scatterlist *sg, int len)
+{
+       struct scatter_walk walk;
+       u8 *src;
+       int n;
+
+       if (!len)
+               return;
+
+       scatterwalk_start(&walk, sg);
+
+       while (len) {
+               n = scatterwalk_clamp(&walk, len);
+
+               if (!n) {
+                       scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
+                       n = scatterwalk_clamp(&walk, len);
+               }
+
+               src = scatterwalk_map(&walk, 0);
+
+               crypto_gcm_ghash_update(ctx, src, n);
+               len -= n;
+
+               scatterwalk_unmap(src, 0);
+               scatterwalk_advance(&walk, n);
+               scatterwalk_done(&walk, 0, len);
+               if (len)
+                       crypto_yield(ctx->flags);
+       }
+}
+
+static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
+{
+       u8 *dst = ctx->buffer;
+
+       if (ctx->bytes) {
+               u8 *tmp = dst + (16 - ctx->bytes);
+
+               while (ctx->bytes--)
+                       *tmp++ ^= 0;
+
+               gf128mul_4k_lle((be128 *)dst, ctx->gf128);
+       }
+
+       ctx->bytes = 0;
+}
+
+static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
+                                      unsigned int authlen,
+                                      unsigned int cryptlen, u8 *dst)
+{
+       u8 *buf = ctx->buffer;
+       u128 lengths;
+
+       lengths.a = cpu_to_be64(authlen * 8);
+       lengths.b = cpu_to_be64(cryptlen * 8);
+
+       crypto_gcm_ghash_flush(ctx);
+       crypto_xor(buf, (u8 *)&lengths, 16);
+       gf128mul_4k_lle((be128 *)buf, ctx->gf128);
+       crypto_xor(dst, buf, 16);
+}
+
+static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
+{
+       struct crypto_gcm_setkey_result *result = req->data;
+
+       if (err == -EINPROGRESS)
+               return;
+
+       result->err = err;
+       complete(&result->completion);
+}
+
+static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
+                            unsigned int keylen)
+{
+       struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
+       struct crypto_ablkcipher *ctr = ctx->ctr;
+       struct {
+               be128 hash;
+               u8 iv[8];
+
+               struct crypto_gcm_setkey_result result;
+
+               struct scatterlist sg[1];
+               struct ablkcipher_request req;
+       } *data;
+       int err;
+
+       crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
+       crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
+                                  CRYPTO_TFM_REQ_MASK);
+
+       err = crypto_ablkcipher_setkey(ctr, key, keylen);
+       if (err)
+               return err;
+
+       crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
+                                      CRYPTO_TFM_RES_MASK);
+
+       data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr),
+                      GFP_KERNEL);
+       if (!data)
+               return -ENOMEM;
+
+       init_completion(&data->result.completion);
+       sg_init_one(data->sg, &data->hash, sizeof(data->hash));
+       ablkcipher_request_set_tfm(&data->req, ctr);
+       ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
+                                                   CRYPTO_TFM_REQ_MAY_BACKLOG,
+                                       crypto_gcm_setkey_done,
+                                       &data->result);
+       ablkcipher_request_set_crypt(&data->req, data->sg, data->sg,
+                                    sizeof(data->hash), data->iv);
+
+       err = crypto_ablkcipher_encrypt(&data->req);
+       if (err == -EINPROGRESS || err == -EBUSY) {
+               err = wait_for_completion_interruptible(
+                       &data->result.completion);
+               if (!err)
+                       err = data->result.err;
+       }
+
+       if (err)
+               goto out;
+
+       if (ctx->gf128 != NULL)
+               gf128mul_free_4k(ctx->gf128);
+
+       ctx->gf128 = gf128mul_init_4k_lle(&data->hash);
+
+       if (ctx->gf128 == NULL)
+               err = -ENOMEM;
+
+out:
+       kfree(data);
+       return err;
+}
+
+static int crypto_gcm_setauthsize(struct crypto_aead *tfm,
+                                 unsigned int authsize)
+{
+       switch (authsize) {
+       case 4:
+       case 8:
+       case 12:
+       case 13:
+       case 14:
+       case 15:
+       case 16:
+               break;
+       default:
+               return -EINVAL;
+       }
+
+       return 0;
+}
+
+static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
+                                 struct aead_request *req,
+                                 unsigned int cryptlen)
+{
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
+       struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+       u32 flags = req->base.tfm->crt_flags;
+       struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
+       struct scatterlist *dst;
+       __be32 counter = cpu_to_be32(1);
+
+       memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
+       memcpy(req->iv + 12, &counter, 4);
+
+       sg_init_table(pctx->src, 2);
+       sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
+       scatterwalk_sg_chain(pctx->src, 2, req->src);
+
+       dst = pctx->src;
+       if (req->src != req->dst) {
+               sg_init_table(pctx->dst, 2);
+               sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
+               scatterwalk_sg_chain(pctx->dst, 2, req->dst);
+               dst = pctx->dst;
+       }
+
+       ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
+       ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
+                                    cryptlen + sizeof(pctx->auth_tag),
+                                    req->iv);
+
+       crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
+
+       crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
+       crypto_gcm_ghash_flush(ghash);
+}
+
+static int crypto_gcm_hash(struct aead_request *req)
+{
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+       u8 *auth_tag = pctx->auth_tag;
+       struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
+
+       crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen);
+       crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen,
+                                  auth_tag);
+
+       scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
+                                crypto_aead_authsize(aead), 1);
+       return 0;
+}
+
+static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err)
+{
+       struct aead_request *req = areq->data;
+
+       if (!err)
+               err = crypto_gcm_hash(req);
+
+       aead_request_complete(req, err);
+}
+
+static int crypto_gcm_encrypt(struct aead_request *req)
+{
+       struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+       struct ablkcipher_request *abreq = &pctx->abreq;
+       int err;
+
+       crypto_gcm_init_crypt(abreq, req, req->cryptlen);
+       ablkcipher_request_set_callback(abreq, aead_request_flags(req),
+                                       crypto_gcm_encrypt_done, req);
+
+       err = crypto_ablkcipher_encrypt(abreq);
+       if (err)
+               return err;
+
+       return crypto_gcm_hash(req);
+}
+
+static int crypto_gcm_verify(struct aead_request *req)
+{
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+       struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
+       u8 *auth_tag = pctx->auth_tag;
+       u8 *iauth_tag = pctx->iauth_tag;
+       unsigned int authsize = crypto_aead_authsize(aead);
+       unsigned int cryptlen = req->cryptlen - authsize;
+
+       crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
+
+       authsize = crypto_aead_authsize(aead);
+       scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
+       return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
+}
+
+static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
+{
+       struct aead_request *req = areq->data;
+
+       if (!err)
+               err = crypto_gcm_verify(req);
+
+       aead_request_complete(req, err);
+}
+
+static int crypto_gcm_decrypt(struct aead_request *req)
+{
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+       struct ablkcipher_request *abreq = &pctx->abreq;
+       struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
+       unsigned int cryptlen = req->cryptlen;
+       unsigned int authsize = crypto_aead_authsize(aead);
+       int err;
+
+       if (cryptlen < authsize)
+               return -EINVAL;
+       cryptlen -= authsize;
+
+       crypto_gcm_init_crypt(abreq, req, cryptlen);
+       ablkcipher_request_set_callback(abreq, aead_request_flags(req),
+                                       crypto_gcm_decrypt_done, req);
+
+       crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
+
+       err = crypto_ablkcipher_decrypt(abreq);
+       if (err)
+               return err;
+
+       return crypto_gcm_verify(req);
+}
+
+static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
+{
+       struct crypto_instance *inst = (void *)tfm->__crt_alg;
+       struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
+       struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
+       struct crypto_ablkcipher *ctr;
+       unsigned long align;
+       int err;
+
+       ctr = crypto_spawn_skcipher(&ictx->ctr);
+       err = PTR_ERR(ctr);
+       if (IS_ERR(ctr))
+               return err;
+
+       ctx->ctr = ctr;
+       ctx->gf128 = NULL;
+
+       align = crypto_tfm_alg_alignmask(tfm);
+       align &= ~(crypto_tfm_ctx_alignment() - 1);
+       tfm->crt_aead.reqsize = align +
+                               sizeof(struct crypto_gcm_req_priv_ctx) +
+                               crypto_ablkcipher_reqsize(ctr);
+
+       return 0;
+}
+
+static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
+{
+       struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       if (ctx->gf128 != NULL)
+               gf128mul_free_4k(ctx->gf128);
+
+       crypto_free_ablkcipher(ctx->ctr);
+}
+
+static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
+                                                      const char *full_name,
+                                                      const char *ctr_name)
+{
+       struct crypto_attr_type *algt;
+       struct crypto_instance *inst;
+       struct crypto_alg *ctr;
+       struct gcm_instance_ctx *ctx;
+       int err;
+
+       algt = crypto_get_attr_type(tb);
+       err = PTR_ERR(algt);
+       if (IS_ERR(algt))
+               return ERR_PTR(err);
+
+       if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
+               return ERR_PTR(-EINVAL);
+
+       inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
+       if (!inst)
+               return ERR_PTR(-ENOMEM);
+
+       ctx = crypto_instance_ctx(inst);
+       crypto_set_skcipher_spawn(&ctx->ctr, inst);
+       err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0,
+                                  crypto_requires_sync(algt->type,
+                                                       algt->mask));
+       if (err)
+               goto err_free_inst;
+
+       ctr = crypto_skcipher_spawn_alg(&ctx->ctr);
+
+       /* We only support 16-byte blocks. */
+       if (ctr->cra_ablkcipher.ivsize != 16)
+               goto out_put_ctr;
+
+       /* Not a stream cipher? */
+       err = -EINVAL;
+       if (ctr->cra_blocksize != 1)
+               goto out_put_ctr;
+
+       err = -ENAMETOOLONG;
+       if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+                    "gcm_base(%s)", ctr->cra_driver_name) >=
+           CRYPTO_MAX_ALG_NAME)
+               goto out_put_ctr;
+
+       memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
+
+       inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
+       inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC;
+       inst->alg.cra_priority = ctr->cra_priority;
+       inst->alg.cra_blocksize = 1;
+       inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
+       inst->alg.cra_type = &crypto_aead_type;
+       inst->alg.cra_aead.ivsize = 16;
+       inst->alg.cra_aead.maxauthsize = 16;
+       inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
+       inst->alg.cra_init = crypto_gcm_init_tfm;
+       inst->alg.cra_exit = crypto_gcm_exit_tfm;
+       inst->alg.cra_aead.setkey = crypto_gcm_setkey;
+       inst->alg.cra_aead.setauthsize = crypto_gcm_setauthsize;
+       inst->alg.cra_aead.encrypt = crypto_gcm_encrypt;
+       inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
+
+out:
+       return inst;
+
+out_put_ctr:
+       crypto_drop_skcipher(&ctx->ctr);
+err_free_inst:
+       kfree(inst);
+       inst = ERR_PTR(err);
+       goto out;
+}
+
+static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
+{
+       int err;
+       const char *cipher_name;
+       char ctr_name[CRYPTO_MAX_ALG_NAME];
+       char full_name[CRYPTO_MAX_ALG_NAME];
+
+       cipher_name = crypto_attr_alg_name(tb[1]);
+       err = PTR_ERR(cipher_name);
+       if (IS_ERR(cipher_name))
+               return ERR_PTR(err);
+
+       if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
+           CRYPTO_MAX_ALG_NAME)
+               return ERR_PTR(-ENAMETOOLONG);
+
+       if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm(%s)", cipher_name) >=
+           CRYPTO_MAX_ALG_NAME)
+               return ERR_PTR(-ENAMETOOLONG);
+
+       return crypto_gcm_alloc_common(tb, full_name, ctr_name);
+}
+
+static void crypto_gcm_free(struct crypto_instance *inst)
+{
+       struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
+
+       crypto_drop_skcipher(&ctx->ctr);
+       kfree(inst);
+}
+
+static struct crypto_template crypto_gcm_tmpl = {
+       .name = "gcm",
+       .alloc = crypto_gcm_alloc,
+       .free = crypto_gcm_free,
+       .module = THIS_MODULE,
+};
+
+static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
+{
+       int err;
+       const char *ctr_name;
+       char full_name[CRYPTO_MAX_ALG_NAME];
+
+       ctr_name = crypto_attr_alg_name(tb[1]);
+       err = PTR_ERR(ctr_name);
+       if (IS_ERR(ctr_name))
+               return ERR_PTR(err);
+
+       if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s)",
+                    ctr_name) >= CRYPTO_MAX_ALG_NAME)
+               return ERR_PTR(-ENAMETOOLONG);
+
+       return crypto_gcm_alloc_common(tb, full_name, ctr_name);
+}
+
+static struct crypto_template crypto_gcm_base_tmpl = {
+       .name = "gcm_base",
+       .alloc = crypto_gcm_base_alloc,
+       .free = crypto_gcm_free,
+       .module = THIS_MODULE,
+};
+
+static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key,
+                                unsigned int keylen)
+{
+       struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
+       struct crypto_aead *child = ctx->child;
+       int err;
+
+       if (keylen < 4)
+               return -EINVAL;
+
+       keylen -= 4;
+       memcpy(ctx->nonce, key + keylen, 4);
+
+       crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
+       crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
+                                    CRYPTO_TFM_REQ_MASK);
+       err = crypto_aead_setkey(child, key, keylen);
+       crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
+                                     CRYPTO_TFM_RES_MASK);
+
+       return err;
+}
+
+static int crypto_rfc4106_setauthsize(struct crypto_aead *parent,
+                                     unsigned int authsize)
+{
+       struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
+
+       switch (authsize) {
+       case 8:
+       case 12:
+       case 16:
+               break;
+       default:
+               return -EINVAL;
+       }
+
+       return crypto_aead_setauthsize(ctx->child, authsize);
+}
+
+static struct aead_request *crypto_rfc4106_crypt(struct aead_request *req)
+{
+       struct aead_request *subreq = aead_request_ctx(req);
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(aead);
+       struct crypto_aead *child = ctx->child;
+       u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
+                          crypto_aead_alignmask(child) + 1);
+
+       memcpy(iv, ctx->nonce, 4);
+       memcpy(iv + 4, req->iv, 8);
+
+       aead_request_set_tfm(subreq, child);
+       aead_request_set_callback(subreq, req->base.flags, req->base.complete,
+                                 req->base.data);
+       aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, iv);
+       aead_request_set_assoc(subreq, req->assoc, req->assoclen);
+
+       return subreq;
+}
+
+static int crypto_rfc4106_encrypt(struct aead_request *req)
+{
+       req = crypto_rfc4106_crypt(req);
+
+       return crypto_aead_encrypt(req);
+}
+
+static int crypto_rfc4106_decrypt(struct aead_request *req)
+{
+       req = crypto_rfc4106_crypt(req);
+
+       return crypto_aead_decrypt(req);
+}
+
+static int crypto_rfc4106_init_tfm(struct crypto_tfm *tfm)
+{
+       struct crypto_instance *inst = (void *)tfm->__crt_alg;
+       struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst);
+       struct crypto_rfc4106_ctx *ctx = crypto_tfm_ctx(tfm);
+       struct crypto_aead *aead;
+       unsigned long align;
+
+       aead = crypto_spawn_aead(spawn);
+       if (IS_ERR(aead))
+               return PTR_ERR(aead);
+
+       ctx->child = aead;
+
+       align = crypto_aead_alignmask(aead);
+       align &= ~(crypto_tfm_ctx_alignment() - 1);
+       tfm->crt_aead.reqsize = sizeof(struct aead_request) +
+                               ALIGN(crypto_aead_reqsize(aead),
+                                     crypto_tfm_ctx_alignment()) +
+                               align + 16;
+
+       return 0;
+}
+
+static void crypto_rfc4106_exit_tfm(struct crypto_tfm *tfm)
+{
+       struct crypto_rfc4106_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       crypto_free_aead(ctx->child);
+}
+
+static struct crypto_instance *crypto_rfc4106_alloc(struct rtattr **tb)
+{
+       struct crypto_attr_type *algt;
+       struct crypto_instance *inst;
+       struct crypto_aead_spawn *spawn;
+       struct crypto_alg *alg;
+       const char *ccm_name;
+       int err;
+
+       algt = crypto_get_attr_type(tb);
+       err = PTR_ERR(algt);
+       if (IS_ERR(algt))
+               return ERR_PTR(err);
+
+       if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
+               return ERR_PTR(-EINVAL);
+
+       ccm_name = crypto_attr_alg_name(tb[1]);
+       err = PTR_ERR(ccm_name);
+       if (IS_ERR(ccm_name))
+               return ERR_PTR(err);
+
+       inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+       if (!inst)
+               return ERR_PTR(-ENOMEM);
+
+       spawn = crypto_instance_ctx(inst);
+       crypto_set_aead_spawn(spawn, inst);
+       err = crypto_grab_aead(spawn, ccm_name, 0,
+                              crypto_requires_sync(algt->type, algt->mask));
+       if (err)
+               goto out_free_inst;
+
+       alg = crypto_aead_spawn_alg(spawn);
+
+       err = -EINVAL;
+
+       /* We only support 16-byte blocks. */
+       if (alg->cra_aead.ivsize != 16)
+               goto out_drop_alg;
+
+       /* Not a stream cipher? */
+       if (alg->cra_blocksize != 1)
+               goto out_drop_alg;
+
+       err = -ENAMETOOLONG;
+       if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
+                    "rfc4106(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME ||
+           snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+                    "rfc4106(%s)", alg->cra_driver_name) >=
+           CRYPTO_MAX_ALG_NAME)
+               goto out_drop_alg;
+
+       inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
+       inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
+       inst->alg.cra_priority = alg->cra_priority;
+       inst->alg.cra_blocksize = 1;
+       inst->alg.cra_alignmask = alg->cra_alignmask;
+       inst->alg.cra_type = &crypto_nivaead_type;
+
+       inst->alg.cra_aead.ivsize = 8;
+       inst->alg.cra_aead.maxauthsize = 16;
+
+       inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4106_ctx);
+
+       inst->alg.cra_init = crypto_rfc4106_init_tfm;
+       inst->alg.cra_exit = crypto_rfc4106_exit_tfm;
+
+       inst->alg.cra_aead.setkey = crypto_rfc4106_setkey;
+       inst->alg.cra_aead.setauthsize = crypto_rfc4106_setauthsize;
+       inst->alg.cra_aead.encrypt = crypto_rfc4106_encrypt;
+       inst->alg.cra_aead.decrypt = crypto_rfc4106_decrypt;
+
+       inst->alg.cra_aead.geniv = "seqiv";
+
+out:
+       return inst;
+
+out_drop_alg:
+       crypto_drop_aead(spawn);
+out_free_inst:
+       kfree(inst);
+       inst = ERR_PTR(err);
+       goto out;
+}
+
+static void crypto_rfc4106_free(struct crypto_instance *inst)
+{
+       crypto_drop_spawn(crypto_instance_ctx(inst));
+       kfree(inst);
+}
+
+static struct crypto_template crypto_rfc4106_tmpl = {
+       .name = "rfc4106",
+       .alloc = crypto_rfc4106_alloc,
+       .free = crypto_rfc4106_free,
+       .module = THIS_MODULE,
+};
+
+static int __init crypto_gcm_module_init(void)
+{
+       int err;
+
+       err = crypto_register_template(&crypto_gcm_base_tmpl);
+       if (err)
+               goto out;
+
+       err = crypto_register_template(&crypto_gcm_tmpl);
+       if (err)
+               goto out_undo_base;
+
+       err = crypto_register_template(&crypto_rfc4106_tmpl);
+       if (err)
+               goto out_undo_gcm;
+
+out:
+       return err;
+
+out_undo_gcm:
+       crypto_unregister_template(&crypto_gcm_tmpl);
+out_undo_base:
+       crypto_unregister_template(&crypto_gcm_base_tmpl);
+       goto out;
+}
+
+static void __exit crypto_gcm_module_exit(void)
+{
+       crypto_unregister_template(&crypto_rfc4106_tmpl);
+       crypto_unregister_template(&crypto_gcm_tmpl);
+       crypto_unregister_template(&crypto_gcm_base_tmpl);
+}
+
+module_init(crypto_gcm_module_init);
+module_exit(crypto_gcm_module_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Galois/Counter Mode");
+MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
+MODULE_ALIAS("gcm_base");
+MODULE_ALIAS("rfc4106");
index 0f05be7..a1d016a 100644 (file)
@@ -17,6 +17,7 @@
  */
 
 #include <crypto/algapi.h>
+#include <crypto/scatterwalk.h>
 #include <linux/err.h>
 #include <linux/init.h>
 #include <linux/kernel.h>
@@ -160,7 +161,7 @@ static int hmac_digest(struct hash_desc *pdesc, struct scatterlist *sg,
 
        sg_init_table(sg1, 2);
        sg_set_buf(sg1, ipad, bs);
-       sg_set_page(&sg1[1], (void *) sg, 0, 0);
+       scatterwalk_sg_chain(sg1, 2, sg);
 
        sg_init_table(sg2, 1);
        sg_set_buf(sg2, opad, bs + ds);
index abb01f7..32f4c21 100644 (file)
@@ -25,7 +25,6 @@
 #include <linux/notifier.h>
 #include <linux/rwsem.h>
 #include <linux/slab.h>
-#include <asm/kmap_types.h>
 
 /* Crypto notification events. */
 enum {
@@ -50,34 +49,6 @@ extern struct list_head crypto_alg_list;
 extern struct rw_semaphore crypto_alg_sem;
 extern struct blocking_notifier_head crypto_chain;
 
-static inline enum km_type crypto_kmap_type(int out)
-{
-       enum km_type type;
-
-       if (in_softirq())
-               type = out * (KM_SOFTIRQ1 - KM_SOFTIRQ0) + KM_SOFTIRQ0;
-       else
-               type = out * (KM_USER1 - KM_USER0) + KM_USER0;
-
-       return type;
-}
-
-static inline void *crypto_kmap(struct page *page, int out)
-{
-       return kmap_atomic(page, crypto_kmap_type(out));
-}
-
-static inline void crypto_kunmap(void *vaddr, int out)
-{
-       kunmap_atomic(vaddr, crypto_kmap_type(out));
-}
-
-static inline void crypto_yield(u32 flags)
-{
-       if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
-               cond_resched();
-}
-
 #ifdef CONFIG_PROC_FS
 void __init crypto_init_proc(void);
 void __exit crypto_exit_proc(void);
@@ -122,6 +93,8 @@ void crypto_exit_digest_ops(struct crypto_tfm *tfm);
 void crypto_exit_cipher_ops(struct crypto_tfm *tfm);
 void crypto_exit_compress_ops(struct crypto_tfm *tfm);
 
+void crypto_larval_kill(struct crypto_alg *alg);
+struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask);
 void crypto_larval_error(const char *name, u32 type, u32 mask);
 
 void crypto_shoot_alg(struct crypto_alg *alg);
diff --git a/crypto/lzo.c b/crypto/lzo.c
new file mode 100644 (file)
index 0000000..48c3288
--- /dev/null
@@ -0,0 +1,106 @@
+/*
+ * Cryptographic API.
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 as published by
+ * the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc., 51
+ * Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ */
+
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/crypto.h>
+#include <linux/vmalloc.h>
+#include <linux/lzo.h>
+
+struct lzo_ctx {
+       void *lzo_comp_mem;
+};
+
+static int lzo_init(struct crypto_tfm *tfm)
+{
+       struct lzo_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       ctx->lzo_comp_mem = vmalloc(LZO1X_MEM_COMPRESS);
+       if (!ctx->lzo_comp_mem)
+               return -ENOMEM;
+
+       return 0;
+}
+
+static void lzo_exit(struct crypto_tfm *tfm)
+{
+       struct lzo_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       vfree(ctx->lzo_comp_mem);
+}
+
+static int lzo_compress(struct crypto_tfm *tfm, const u8 *src,
+                           unsigned int slen, u8 *dst, unsigned int *dlen)
+{
+       struct lzo_ctx *ctx = crypto_tfm_ctx(tfm);
+       size_t tmp_len = *dlen; /* size_t(ulong) <-> uint on 64 bit */
+       int err;
+
+       err = lzo1x_1_compress(src, slen, dst, &tmp_len, ctx->lzo_comp_mem);
+
+       if (err != LZO_E_OK)
+               return -EINVAL;
+
+       *dlen = tmp_len;
+       return 0;
+}
+
+static int lzo_decompress(struct crypto_tfm *tfm, const u8 *src,
+                             unsigned int slen, u8 *dst, unsigned int *dlen)
+{
+       int err;
+       size_t tmp_len = *dlen; /* size_t(ulong) <-> uint on 64 bit */
+
+       err = lzo1x_decompress_safe(src, slen, dst, &tmp_len);
+
+       if (err != LZO_E_OK)
+               return -EINVAL;
+
+       *dlen = tmp_len;
+       return 0;
+
+}
+
+static struct crypto_alg alg = {
+       .cra_name               = "lzo",
+       .cra_flags              = CRYPTO_ALG_TYPE_COMPRESS,
+       .cra_ctxsize            = sizeof(struct lzo_ctx),
+       .cra_module             = THIS_MODULE,
+       .cra_list               = LIST_HEAD_INIT(alg.cra_list),
+       .cra_init               = lzo_init,
+       .cra_exit               = lzo_exit,
+       .cra_u                  = { .compress = {
+       .coa_compress           = lzo_compress,
+       .coa_decompress         = lzo_decompress } }
+};
+
+static int __init init(void)
+{
+       return crypto_register_alg(&alg);
+}
+
+static void __exit fini(void)
+{
+       crypto_unregister_alg(&alg);
+}
+
+module_init(init);
+module_exit(fini);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("LZO Compression Algorithm");
index c3ed8a1..fe70477 100644 (file)
@@ -24,7 +24,6 @@
 
 struct crypto_pcbc_ctx {
        struct crypto_cipher *child;
-       void (*xor)(u8 *dst, const u8 *src, unsigned int bs);
 };
 
 static int crypto_pcbc_setkey(struct crypto_tfm *parent, const u8 *key,
@@ -45,9 +44,7 @@ static int crypto_pcbc_setkey(struct crypto_tfm *parent, const u8 *key,
 
 static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc,
                                       struct blkcipher_walk *walk,
-                                      struct crypto_cipher *tfm,
-                                      void (*xor)(u8 *, const u8 *,
-                                                  unsigned int))
+                                      struct crypto_cipher *tfm)
 {
        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
                crypto_cipher_alg(tfm)->cia_encrypt;
@@ -58,10 +55,10 @@ static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc,
        u8 *iv = walk->iv;
 
        do {
-               xor(iv, src, bsize);
+               crypto_xor(iv, src, bsize);
                fn(crypto_cipher_tfm(tfm), dst, iv);
                memcpy(iv, dst, bsize);
-               xor(iv, src, bsize);
+               crypto_xor(iv, src, bsize);
 
                src += bsize;
                dst += bsize;
@@ -72,9 +69,7 @@ static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc,
 
 static int crypto_pcbc_encrypt_inplace(struct blkcipher_desc *desc,
                                       struct blkcipher_walk *walk,
-                                      struct crypto_cipher *tfm,
-                                      void (*xor)(u8 *, const u8 *,
-                                                  unsigned int))
+                                      struct crypto_cipher *tfm)
 {
        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
                crypto_cipher_alg(tfm)->cia_encrypt;
@@ -86,10 +81,10 @@ static int crypto_pcbc_encrypt_inplace(struct blkcipher_desc *desc,
 
        do {
                memcpy(tmpbuf, src, bsize);
-               xor(iv, tmpbuf, bsize);
+               crypto_xor(iv, src, bsize);
                fn(crypto_cipher_tfm(tfm), src, iv);
-               memcpy(iv, src, bsize);
-               xor(iv, tmpbuf, bsize);
+               memcpy(iv, tmpbuf, bsize);
+               crypto_xor(iv, src, bsize);
 
                src += bsize;
        } while ((nbytes -= bsize) >= bsize);
@@ -107,7 +102,6 @@ static int crypto_pcbc_encrypt(struct blkcipher_desc *desc,
        struct crypto_blkcipher *tfm = desc->tfm;
        struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
        struct crypto_cipher *child = ctx->child;
-       void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor;
        int err;
 
        blkcipher_walk_init(&walk, dst, src, nbytes);
@@ -115,11 +109,11 @@ static int crypto_pcbc_encrypt(struct blkcipher_desc *desc,
 
        while ((nbytes = walk.nbytes)) {
                if (walk.src.virt.addr == walk.dst.virt.addr)
-                       nbytes = crypto_pcbc_encrypt_inplace(desc, &walk, child,
-                                                            xor);
+                       nbytes = crypto_pcbc_encrypt_inplace(desc, &walk,
+                                                            child);
                else
-                       nbytes = crypto_pcbc_encrypt_segment(desc, &walk, child,
-                                                            xor);
+                       nbytes = crypto_pcbc_encrypt_segment(desc, &walk,
+                                                            child);
                err = blkcipher_walk_done(desc, &walk, nbytes);
        }
 
@@ -128,9 +122,7 @@ static int crypto_pcbc_encrypt(struct blkcipher_desc *desc,
 
 static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc,
                                       struct blkcipher_walk *walk,
-                                      struct crypto_cipher *tfm,
-                                      void (*xor)(u8 *, const u8 *,
-                                                  unsigned int))
+                                      struct crypto_cipher *tfm)
 {
        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
                crypto_cipher_alg(tfm)->cia_decrypt;
@@ -142,9 +134,9 @@ static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc,
 
        do {
                fn(crypto_cipher_tfm(tfm), dst, src);
-               xor(dst, iv, bsize);
+               crypto_xor(dst, iv, bsize);
                memcpy(iv, src, bsize);
-               xor(iv, dst, bsize);
+               crypto_xor(iv, dst, bsize);
 
                src += bsize;
                dst += bsize;
@@ -157,9 +149,7 @@ static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc,
 
 static int crypto_pcbc_decrypt_inplace(struct blkcipher_desc *desc,
                                       struct blkcipher_walk *walk,
-                                      struct crypto_cipher *tfm,
-                                      void (*xor)(u8 *, const u8 *,
-                                                  unsigned int))
+                                      struct crypto_cipher *tfm)
 {
        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
                crypto_cipher_alg(tfm)->cia_decrypt;
@@ -172,9 +162,9 @@ static int crypto_pcbc_decrypt_inplace(struct blkcipher_desc *desc,
        do {
                memcpy(tmpbuf, src, bsize);
                fn(crypto_cipher_tfm(tfm), src, src);
-               xor(src, iv, bsize);
+               crypto_xor(src, iv, bsize);
                memcpy(iv, tmpbuf, bsize);
-               xor(iv, src, bsize);
+               crypto_xor(iv, src, bsize);
 
                src += bsize;
        } while ((nbytes -= bsize) >= bsize);
@@ -192,7 +182,6 @@ static int crypto_pcbc_decrypt(struct blkcipher_desc *desc,
        struct crypto_blkcipher *tfm = desc->tfm;
        struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
        struct crypto_cipher *child = ctx->child;
-       void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor;
        int err;
 
        blkcipher_walk_init(&walk, dst, src, nbytes);
@@ -200,48 +189,17 @@ static int crypto_pcbc_decrypt(struct blkcipher_desc *desc,
 
        while ((nbytes = walk.nbytes)) {
                if (walk.src.virt.addr == walk.dst.virt.addr)
-                       nbytes = crypto_pcbc_decrypt_inplace(desc, &walk, child,
-                                                            xor);
+                       nbytes = crypto_pcbc_decrypt_inplace(desc, &walk,
+                                                            child);
                else
-                       nbytes = crypto_pcbc_decrypt_segment(desc, &walk, child,
-                                                            xor);
+                       nbytes = crypto_pcbc_decrypt_segment(desc, &walk,
+                                                            child);
                err = blkcipher_walk_done(desc, &walk, nbytes);
        }
 
        return err;
 }
 
-static void xor_byte(u8 *a, const u8 *b, unsigned int bs)
-{
-       do {
-               *a++ ^= *b++;
-       } while (--bs);
-}
-
-static void xor_quad(u8 *dst, const u8 *src, unsigned int bs)
-{
-       u32 *a = (u32 *)dst;
-       u32 *b = (u32 *)src;
-
-       do {
-               *a++ ^= *b++;
-       } while ((bs -= 4));
-}
-
-static void xor_64(u8 *a, const u8 *b, unsigned int bs)
-{
-       ((u32 *)a)[0] ^= ((u32 *)b)[0];
-       ((u32 *)a)[1] ^= ((u32 *)b)[1];
-}
-
-static void xor_128(u8 *a, const u8 *b, unsigned int bs)
-{
-       ((u32 *)a)[0] ^= ((u32 *)b)[0];
-       ((u32 *)a)[1] ^= ((u32 *)b)[1];
-       ((u32 *)a)[2] ^= ((u32 *)b)[2];
-       ((u32 *)a)[3] ^= ((u32 *)b)[3];
-}
-
 static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm)
 {
        struct crypto_instance *inst = (void *)tfm->__crt_alg;
@@ -249,22 +207,6 @@ static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm)
        struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm);
        struct crypto_cipher *cipher;
 
-       switch (crypto_tfm_alg_blocksize(tfm)) {
-       case 8:
-               ctx->xor = xor_64;
-               break;
-
-       case 16:
-               ctx->xor = xor_128;
-               break;
-
-       default:
-               if (crypto_tfm_alg_blocksize(tfm) % 4)
-                       ctx->xor = xor_byte;
-               else
-                       ctx->xor = xor_quad;
-       }
-
        cipher = crypto_spawn_cipher(spawn);
        if (IS_ERR(cipher))
                return PTR_ERR(cipher);
@@ -304,8 +246,9 @@ static struct crypto_instance *crypto_pcbc_alloc(struct rtattr **tb)
        inst->alg.cra_alignmask = alg->cra_alignmask;
        inst->alg.cra_type = &crypto_blkcipher_type;
 
-       if (!(alg->cra_blocksize % 4))
-               inst->alg.cra_alignmask |= 3;
+       /* We access the data as u32s when xoring. */
+       inst->alg.cra_alignmask |= __alignof__(u32) - 1;
+
        inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
        inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
        inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
diff --git a/crypto/salsa20_generic.c b/crypto/salsa20_generic.c
new file mode 100644 (file)
index 0000000..1fa4e4d
--- /dev/null
@@ -0,0 +1,255 @@
+/*
+ * Salsa20: Salsa20 stream cipher algorithm
+ *
+ * Copyright (c) 2007 Tan Swee Heng <thesweeheng@gmail.com>
+ *
+ * Derived from:
+ * - salsa20.c: Public domain C code by Daniel J. Bernstein <djb@cr.yp.to>
+ *
+ * Salsa20 is a stream cipher candidate in eSTREAM, the ECRYPT Stream
+ * Cipher Project. It is designed by Daniel J. Bernstein <djb@cr.yp.to>.
+ * More information about eSTREAM and Salsa20 can be found here:
+ *   http://www.ecrypt.eu.org/stream/
+ *   http://cr.yp.to/snuffle.html
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/errno.h>
+#include <linux/crypto.h>
+#include <linux/types.h>
+#include <crypto/algapi.h>
+#include <asm/byteorder.h>
+
+#define SALSA20_IV_SIZE        8U
+#define SALSA20_MIN_KEY_SIZE  16U
+#define SALSA20_MAX_KEY_SIZE  32U
+
+/*
+ * Start of code taken from D. J. Bernstein's reference implementation.
+ * With some modifications and optimizations made to suit our needs.
+ */
+
+/*
+salsa20-ref.c version 20051118
+D. J. Bernstein
+Public domain.
+*/
+
+#define ROTATE(v,n) (((v) << (n)) | ((v) >> (32 - (n))))
+#define XOR(v,w) ((v) ^ (w))
+#define PLUS(v,w) (((v) + (w)))
+#define PLUSONE(v) (PLUS((v),1))
+#define U32TO8_LITTLE(p, v) \
+       { (p)[0] = (v >>  0) & 0xff; (p)[1] = (v >>  8) & 0xff; \
+         (p)[2] = (v >> 16) & 0xff; (p)[3] = (v >> 24) & 0xff; }
+#define U8TO32_LITTLE(p)   \
+       (((u32)((p)[0])      ) | ((u32)((p)[1]) <<  8) | \
+        ((u32)((p)[2]) << 16) | ((u32)((p)[3]) << 24)   )
+
+struct salsa20_ctx
+{
+       u32 input[16];
+};
+
+static void salsa20_wordtobyte(u8 output[64], const u32 input[16])
+{
+       u32 x[16];
+       int i;
+
+       memcpy(x, input, sizeof(x));
+       for (i = 20; i > 0; i -= 2) {
+               x[ 4] = XOR(x[ 4],ROTATE(PLUS(x[ 0],x[12]), 7));
+               x[ 8] = XOR(x[ 8],ROTATE(PLUS(x[ 4],x[ 0]), 9));
+               x[12] = XOR(x[12],ROTATE(PLUS(x[ 8],x[ 4]),13));
+               x[ 0] = XOR(x[ 0],ROTATE(PLUS(x[12],x[ 8]),18));
+               x[ 9] = XOR(x[ 9],ROTATE(PLUS(x[ 5],x[ 1]), 7));
+               x[13] = XOR(x[13],ROTATE(PLUS(x[ 9],x[ 5]), 9));
+               x[ 1] = XOR(x[ 1],ROTATE(PLUS(x[13],x[ 9]),13));
+               x[ 5] = XOR(x[ 5],ROTATE(PLUS(x[ 1],x[13]),18));
+               x[14] = XOR(x[14],ROTATE(PLUS(x[10],x[ 6]), 7));
+               x[ 2] = XOR(x[ 2],ROTATE(PLUS(x[14],x[10]), 9));
+               x[ 6] = XOR(x[ 6],ROTATE(PLUS(x[ 2],x[14]),13));
+               x[10] = XOR(x[10],ROTATE(PLUS(x[ 6],x[ 2]),18));
+               x[ 3] = XOR(x[ 3],ROTATE(PLUS(x[15],x[11]), 7));
+               x[ 7] = XOR(x[ 7],ROTATE(PLUS(x[ 3],x[15]), 9));
+               x[11] = XOR(x[11],ROTATE(PLUS(x[ 7],x[ 3]),13));
+               x[15] = XOR(x[15],ROTATE(PLUS(x[11],x[ 7]),18));
+               x[ 1] = XOR(x[ 1],ROTATE(PLUS(x[ 0],x[ 3]), 7));
+               x[ 2] = XOR(x[ 2],ROTATE(PLUS(x[ 1],x[ 0]), 9));
+               x[ 3] = XOR(x[ 3],ROTATE(PLUS(x[ 2],x[ 1]),13));
+               x[ 0] = XOR(x[ 0],ROTATE(PLUS(x[ 3],x[ 2]),18));
+               x[ 6] = XOR(x[ 6],ROTATE(PLUS(x[ 5],x[ 4]), 7));
+               x[ 7] = XOR(x[ 7],ROTATE(PLUS(x[ 6],x[ 5]), 9));
+               x[ 4] = XOR(x[ 4],ROTATE(PLUS(x[ 7],x[ 6]),13));
+               x[ 5] = XOR(x[ 5],ROTATE(PLUS(x[ 4],x[ 7]),18));
+               x[11] = XOR(x[11],ROTATE(PLUS(x[10],x[ 9]), 7));
+               x[ 8] = XOR(x[ 8],ROTATE(PLUS(x[11],x[10]), 9));
+               x[ 9] = XOR(x[ 9],ROTATE(PLUS(x[ 8],x[11]),13));
+               x[10] = XOR(x[10],ROTATE(PLUS(x[ 9],x[ 8]),18));
+               x[12] = XOR(x[12],ROTATE(PLUS(x[15],x[14]), 7));
+               x[13] = XOR(x[13],ROTATE(PLUS(x[12],x[15]), 9));
+               x[14] = XOR(x[14],ROTATE(PLUS(x[13],x[12]),13));
+               x[15] = XOR(x[15],ROTATE(PLUS(x[14],x[13]),18));
+       }
+       for (i = 0; i < 16; ++i)
+               x[i] = PLUS(x[i],input[i]);
+       for (i = 0; i < 16; ++i)
+               U32TO8_LITTLE(output + 4 * i,x[i]);
+}
+
+static const char sigma[16] = "expand 32-byte k";
+static const char tau[16] = "expand 16-byte k";
+
+static void salsa20_keysetup(struct salsa20_ctx *ctx, const u8 *k, u32 kbytes)
+{
+       const char *constants;
+
+       ctx->input[1] = U8TO32_LITTLE(k + 0);
+       ctx->input[2] = U8TO32_LITTLE(k + 4);
+       ctx->input[3] = U8TO32_LITTLE(k + 8);
+       ctx->input[4] = U8TO32_LITTLE(k + 12);
+       if (kbytes == 32) { /* recommended */
+               k += 16;
+               constants = sigma;
+       } else { /* kbytes == 16 */
+               constants = tau;
+       }
+       ctx->input[11] = U8TO32_LITTLE(k + 0);
+       ctx->input[12] = U8TO32_LITTLE(k + 4);
+       ctx->input[13] = U8TO32_LITTLE(k + 8);
+       ctx->input[14] = U8TO32_LITTLE(k + 12);
+       ctx->input[0] = U8TO32_LITTLE(constants + 0);
+       ctx->input[5] = U8TO32_LITTLE(constants + 4);
+       ctx->input[10] = U8TO32_LITTLE(constants + 8);
+       ctx->input[15] = U8TO32_LITTLE(constants + 12);
+}
+
+static void salsa20_ivsetup(struct salsa20_ctx *ctx, const u8 *iv)
+{
+       ctx->input[6] = U8TO32_LITTLE(iv + 0);
+       ctx->input[7] = U8TO32_LITTLE(iv + 4);
+       ctx->input[8] = 0;
+       ctx->input[9] = 0;
+}
+
+static void salsa20_encrypt_bytes(struct salsa20_ctx *ctx, u8 *dst,
+                                 const u8 *src, unsigned int bytes)
+{
+       u8 buf[64];
+
+       if (dst != src)
+               memcpy(dst, src, bytes);
+
+       while (bytes) {
+               salsa20_wordtobyte(buf, ctx->input);
+
+               ctx->input[8] = PLUSONE(ctx->input[8]);
+               if (!ctx->input[8])
+                       ctx->input[9] = PLUSONE(ctx->input[9]);
+
+               if (bytes <= 64) {
+                       crypto_xor(dst, buf, bytes);
+                       return;
+               }
+
+               crypto_xor(dst, buf, 64);
+               bytes -= 64;
+               dst += 64;
+       }
+}
+
+/*
+ * End of code taken from D. J. Bernstein's reference implementation.
+ */
+
+static int setkey(struct crypto_tfm *tfm, const u8 *key,
+                 unsigned int keysize)
+{
+       struct salsa20_ctx *ctx = crypto_tfm_ctx(tfm);
+       salsa20_keysetup(ctx, key, keysize);
+       return 0;
+}
+
+static int encrypt(struct blkcipher_desc *desc,
+                  struct scatterlist *dst, struct scatterlist *src,
+                  unsigned int nbytes)
+{
+       struct blkcipher_walk walk;
+       struct crypto_blkcipher *tfm = desc->tfm;
+       struct salsa20_ctx *ctx = crypto_blkcipher_ctx(tfm);
+       int err;
+
+       blkcipher_walk_init(&walk, dst, src, nbytes);
+       err = blkcipher_walk_virt_block(desc, &walk, 64);
+
+       salsa20_ivsetup(ctx, walk.iv);
+
+       if (likely(walk.nbytes == nbytes))
+       {
+               salsa20_encrypt_bytes(ctx, walk.dst.virt.addr,
+                                     walk.src.virt.addr, nbytes);
+               return blkcipher_walk_done(desc, &walk, 0);
+       }
+
+       while (walk.nbytes >= 64) {
+               salsa20_encrypt_bytes(ctx, walk.dst.virt.addr,
+                                     walk.src.virt.addr,
+                                     walk.nbytes - (walk.nbytes % 64));
+               err = blkcipher_walk_done(desc, &walk, walk.nbytes % 64);
+       }
+
+       if (walk.nbytes) {
+               salsa20_encrypt_bytes(ctx, walk.dst.virt.addr,
+                                     walk.src.virt.addr, walk.nbytes);
+               err = blkcipher_walk_done(desc, &walk, 0);
+       }
+
+       return err;
+}
+
+static struct crypto_alg alg = {
+       .cra_name           =   "salsa20",
+       .cra_driver_name    =   "salsa20-generic",
+       .cra_priority       =   100,
+       .cra_flags          =   CRYPTO_ALG_TYPE_BLKCIPHER,
+       .cra_type           =   &crypto_blkcipher_type,
+       .cra_blocksize      =   1,
+       .cra_ctxsize        =   sizeof(struct salsa20_ctx),
+       .cra_alignmask      =   3,
+       .cra_module         =   THIS_MODULE,
+       .cra_list           =   LIST_HEAD_INIT(alg.cra_list),
+       .cra_u              =   {
+               .blkcipher = {
+                       .setkey         =   setkey,
+                       .encrypt        =   encrypt,
+                       .decrypt        =   encrypt,
+                       .min_keysize    =   SALSA20_MIN_KEY_SIZE,
+                       .max_keysize    =   SALSA20_MAX_KEY_SIZE,
+                       .ivsize         =   SALSA20_IV_SIZE,
+               }
+       }
+};
+
+static int __init init(void)
+{
+       return crypto_register_alg(&alg);
+}
+
+static void __exit fini(void)
+{
+       crypto_unregister_alg(&alg);
+}
+
+module_init(init);
+module_exit(fini);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION ("Salsa20 stream cipher algorithm");
+MODULE_ALIAS("salsa20");
index b9bbda0..9aeeb52 100644 (file)
@@ -13,6 +13,8 @@
  * any later version.
  *
  */
+
+#include <crypto/scatterwalk.h>
 #include <linux/kernel.h>
 #include <linux/mm.h>
 #include <linux/module.h>
@@ -20,9 +22,6 @@
 #include <linux/highmem.h>
 #include <linux/scatterlist.h>
 
-#include "internal.h"
-#include "scatterwalk.h"
-
 static inline void memcpy_dir(void *buf, void *sgdata, size_t nbytes, int out)
 {
        void *src = out ? buf : sgdata;
@@ -106,6 +105,9 @@ void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg,
        struct scatter_walk walk;
        unsigned int offset = 0;
 
+       if (!nbytes)
+               return;
+
        for (;;) {
                scatterwalk_start(&walk, sg);
 
@@ -113,7 +115,7 @@ void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg,
                        break;
 
                offset += sg->length;
-               sg = sg_next(sg);
+               sg = scatterwalk_sg_next(sg);
        }
 
        scatterwalk_advance(&walk, start - offset);
diff --git a/crypto/seqiv.c b/crypto/seqiv.c
new file mode 100644 (file)
index 0000000..b903aab
--- /dev/null
@@ -0,0 +1,345 @@
+/*
+ * seqiv: Sequence Number IV Generator
+ *
+ * This generator generates an IV based on a sequence number by xoring it
+ * with a salt.  This algorithm is mainly useful for CTR and similar modes.
+ *
+ * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <crypto/internal/aead.h>
+#include <crypto/internal/skcipher.h>
+#include <linux/err.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+#include <linux/random.h>
+#include <linux/spinlock.h>
+#include <linux/string.h>
+
+struct seqiv_ctx {
+       spinlock_t lock;
+       u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
+};
+
+static void seqiv_complete2(struct skcipher_givcrypt_request *req, int err)
+{
+       struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
+       struct crypto_ablkcipher *geniv;
+
+       if (err == -EINPROGRESS)
+               return;
+
+       if (err)
+               goto out;
+
+       geniv = skcipher_givcrypt_reqtfm(req);
+       memcpy(req->creq.info, subreq->info, crypto_ablkcipher_ivsize(geniv));
+
+out:
+       kfree(subreq->info);
+}
+
+static void seqiv_complete(struct crypto_async_request *base, int err)
+{
+       struct skcipher_givcrypt_request *req = base->data;
+
+       seqiv_complete2(req, err);
+       skcipher_givcrypt_complete(req, err);
+}
+
+static void seqiv_aead_complete2(struct aead_givcrypt_request *req, int err)
+{
+       struct aead_request *subreq = aead_givcrypt_reqctx(req);
+       struct crypto_aead *geniv;
+
+       if (err == -EINPROGRESS)
+               return;
+
+       if (err)
+               goto out;
+
+       geniv = aead_givcrypt_reqtfm(req);
+       memcpy(req->areq.iv, subreq->iv, crypto_aead_ivsize(geniv));
+
+out:
+       kfree(subreq->iv);
+}
+
+static void seqiv_aead_complete(struct crypto_async_request *base, int err)
+{
+       struct aead_givcrypt_request *req = base->data;
+
+       seqiv_aead_complete2(req, err);
+       aead_givcrypt_complete(req, err);
+}
+
+static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq,
+                       unsigned int ivsize)
+{
+       unsigned int len = ivsize;
+
+       if (ivsize > sizeof(u64)) {
+               memset(info, 0, ivsize - sizeof(u64));
+               len = sizeof(u64);
+       }
+       seq = cpu_to_be64(seq);
+       memcpy(info + ivsize - len, &seq, len);
+       crypto_xor(info, ctx->salt, ivsize);
+}
+
+static int seqiv_givencrypt(struct skcipher_givcrypt_request *req)
+{
+       struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
+       struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
+       struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
+       crypto_completion_t complete;
+       void *data;
+       u8 *info;
+       unsigned int ivsize;
+       int err;
+
+       ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
+
+       complete = req->creq.base.complete;
+       data = req->creq.base.data;
+       info = req->creq.info;
+
+       ivsize = crypto_ablkcipher_ivsize(geniv);
+
+       if (unlikely(!IS_ALIGNED((unsigned long)info,
+                                crypto_ablkcipher_alignmask(geniv) + 1))) {
+               info = kmalloc(ivsize, req->creq.base.flags &
+                                      CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
+                                                                 GFP_ATOMIC);
+               if (!info)
+                       return -ENOMEM;
+
+               complete = seqiv_complete;
+               data = req;
+       }
+
+       ablkcipher_request_set_callback(subreq, req->creq.base.flags, complete,
+                                       data);
+       ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst,
+                                    req->creq.nbytes, info);
+
+       seqiv_geniv(ctx, info, req->seq, ivsize);
+       memcpy(req->giv, info, ivsize);
+
+       err = crypto_ablkcipher_encrypt(subreq);
+       if (unlikely(info != req->creq.info))
+               seqiv_complete2(req, err);
+       return err;
+}
+
+static int seqiv_aead_givencrypt(struct aead_givcrypt_request *req)
+{
+       struct crypto_aead *geniv = aead_givcrypt_reqtfm(req);
+       struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
+       struct aead_request *areq = &req->areq;
+       struct aead_request *subreq = aead_givcrypt_reqctx(req);
+       crypto_completion_t complete;
+       void *data;
+       u8 *info;
+       unsigned int ivsize;
+       int err;
+
+       aead_request_set_tfm(subreq, aead_geniv_base(geniv));
+
+       complete = areq->base.complete;
+       data = areq->base.data;
+       info = areq->iv;
+
+       ivsize = crypto_aead_ivsize(geniv);
+
+       if (unlikely(!IS_ALIGNED((unsigned long)info,
+                                crypto_aead_alignmask(geniv) + 1))) {
+               info = kmalloc(ivsize, areq->base.flags &
+                                      CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
+                                                                 GFP_ATOMIC);
+               if (!info)
+                       return -ENOMEM;
+
+               complete = seqiv_aead_complete;
+               data = req;
+       }
+
+       aead_request_set_callback(subreq, areq->base.flags, complete, data);
+       aead_request_set_crypt(subreq, areq->src, areq->dst, areq->cryptlen,
+                              info);
+       aead_request_set_assoc(subreq, areq->assoc, areq->assoclen);
+
+       seqiv_geniv(ctx, info, req->seq, ivsize);
+       memcpy(req->giv, info, ivsize);
+
+       err = crypto_aead_encrypt(subreq);
+       if (unlikely(info != areq->iv))
+               seqiv_aead_complete2(req, err);
+       return err;
+}
+
+static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req)
+{
+       struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
+       struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
+
+       spin_lock_bh(&ctx->lock);
+       if (crypto_ablkcipher_crt(geniv)->givencrypt != seqiv_givencrypt_first)
+               goto unlock;
+
+       crypto_ablkcipher_crt(geniv)->givencrypt = seqiv_givencrypt;
+       get_random_bytes(ctx->salt, crypto_ablkcipher_ivsize(geniv));
+
+unlock:
+       spin_unlock_bh(&ctx->lock);
+
+       return seqiv_givencrypt(req);
+}
+
+static int seqiv_aead_givencrypt_first(struct aead_givcrypt_request *req)
+{
+       struct crypto_aead *geniv = aead_givcrypt_reqtfm(req);
+       struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
+
+       spin_lock_bh(&ctx->lock);
+       if (crypto_aead_crt(geniv)->givencrypt != seqiv_aead_givencrypt_first)
+               goto unlock;
+
+       crypto_aead_crt(geniv)->givencrypt = seqiv_aead_givencrypt;
+       get_random_bytes(ctx->salt, crypto_aead_ivsize(geniv));
+
+unlock:
+       spin_unlock_bh(&ctx->lock);
+
+       return seqiv_aead_givencrypt(req);
+}
+
+static int seqiv_init(struct crypto_tfm *tfm)
+{
+       struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm);
+       struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
+
+       spin_lock_init(&ctx->lock);
+
+       tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request);
+
+       return skcipher_geniv_init(tfm);
+}
+
+static int seqiv_aead_init(struct crypto_tfm *tfm)
+{
+       struct crypto_aead *geniv = __crypto_aead_cast(tfm);
+       struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
+
+       spin_lock_init(&ctx->lock);
+
+       tfm->crt_aead.reqsize = sizeof(struct aead_request);
+
+       return aead_geniv_init(tfm);
+}
+
+static struct crypto_template seqiv_tmpl;
+
+static struct crypto_instance *seqiv_ablkcipher_alloc(struct rtattr **tb)
+{
+       struct crypto_instance *inst;
+
+       inst = skcipher_geniv_alloc(&seqiv_tmpl, tb, 0, 0);
+
+       if (IS_ERR(inst))
+               goto out;
+
+       inst->alg.cra_ablkcipher.givencrypt = seqiv_givencrypt_first;
+
+       inst->alg.cra_init = seqiv_init;
+       inst->alg.cra_exit = skcipher_geniv_exit;
+
+       inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
+
+out:
+       return inst;
+}
+
+static struct crypto_instance *seqiv_aead_alloc(struct rtattr **tb)
+{
+       struct crypto_instance *inst;
+
+       inst = aead_geniv_alloc(&seqiv_tmpl, tb, 0, 0);
+
+       if (IS_ERR(inst))
+               goto out;
+
+       inst->alg.cra_aead.givencrypt = seqiv_aead_givencrypt_first;
+
+       inst->alg.cra_init = seqiv_aead_init;
+       inst->alg.cra_exit = aead_geniv_exit;
+
+       inst->alg.cra_ctxsize = inst->alg.cra_aead.ivsize;
+
+out:
+       return inst;
+}
+
+static struct crypto_instance *seqiv_alloc(struct rtattr **tb)
+{
+       struct crypto_attr_type *algt;
+       struct crypto_instance *inst;
+       int err;
+
+       algt = crypto_get_attr_type(tb);
+       err = PTR_ERR(algt);
+       if (IS_ERR(algt))
+               return ERR_PTR(err);
+
+       if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
+               inst = seqiv_ablkcipher_alloc(tb);
+       else
+               inst = seqiv_aead_alloc(tb);
+
+       if (IS_ERR(inst))
+               goto out;
+
+       inst->alg.cra_alignmask |= __alignof__(u32) - 1;
+       inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
+
+out:
+       return inst;
+}
+
+static void seqiv_free(struct crypto_instance *inst)
+{
+       if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
+               skcipher_geniv_free(inst);
+       else
+               aead_geniv_free(inst);
+}
+
+static struct crypto_template seqiv_tmpl = {
+       .name = "seqiv",
+       .alloc = seqiv_alloc,
+       .free = seqiv_free,
+       .module = THIS_MODULE,
+};
+
+static int __init seqiv_module_init(void)
+{
+       return crypto_register_template(&seqiv_tmpl);
+}
+
+static void __exit seqiv_module_exit(void)
+{
+       crypto_unregister_template(&seqiv_tmpl);
+}
+
+module_init(seqiv_module_init);
+module_exit(seqiv_module_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Sequence Number IV Generator");
index fd3918b..3cc93fd 100644 (file)
@@ -9,6 +9,7 @@
  * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
  * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
+ * SHA224 Support Copyright 2007 Intel Corporation <jonathan.lynch@intel.com>
  *
  * This program is free software; you can redistribute it and/or modify it
  * under the terms of the GNU General Public License as published by the Free
@@ -218,6 +219,22 @@ static void sha256_transform(u32 *state, const u8 *input)
        memset(W, 0, 64 * sizeof(u32));
 }
 
+
+static void sha224_init(struct crypto_tfm *tfm)
+{
+       struct sha256_ctx *sctx = crypto_tfm_ctx(tfm);
+       sctx->state[0] = SHA224_H0;
+       sctx->state[1] = SHA224_H1;
+       sctx->state[2] = SHA224_H2;
+       sctx->state[3] = SHA224_H3;
+       sctx->state[4] = SHA224_H4;
+       sctx->state[5] = SHA224_H5;
+       sctx->state[6] = SHA224_H6;
+       sctx->state[7] = SHA224_H7;
+       sctx->count[0] = 0;
+       sctx->count[1] = 0;
+}
+
 static void sha256_init(struct crypto_tfm *tfm)
 {
        struct sha256_ctx *sctx = crypto_tfm_ctx(tfm);
@@ -294,8 +311,17 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out)
        memset(sctx, 0, sizeof(*sctx));
 }
 
+static void sha224_final(struct crypto_tfm *tfm, u8 *hash)
+{
+       u8 D[SHA256_DIGEST_SIZE];
+
+       sha256_final(tfm, D);
+
+       memcpy(hash, D, SHA224_DIGEST_SIZE);
+       memset(D, 0, SHA256_DIGEST_SIZE);
+}
 
-static struct crypto_alg alg = {
+static struct crypto_alg sha256 = {
        .cra_name       =       "sha256",
        .cra_driver_name=       "sha256-generic",
        .cra_flags      =       CRYPTO_ALG_TYPE_DIGEST,
@@ -303,28 +329,58 @@ static struct crypto_alg alg = {
        .cra_ctxsize    =       sizeof(struct sha256_ctx),
        .cra_module     =       THIS_MODULE,
        .cra_alignmask  =       3,
-       .cra_list       =       LIST_HEAD_INIT(alg.cra_list),
+       .cra_list       =       LIST_HEAD_INIT(sha256.cra_list),
        .cra_u          =       { .digest = {
        .dia_digestsize =       SHA256_DIGEST_SIZE,
-       .dia_init       =       sha256_init,
-       .dia_update     =       sha256_update,
-       .dia_final      =       sha256_final } }
+       .dia_init       =       sha256_init,
+       .dia_update     =       sha256_update,
+       .dia_final      =       sha256_final } }
+};
+
+static struct crypto_alg sha224 = {
+       .cra_name       = "sha224",
+       .cra_driver_name = "sha224-generic",
+       .cra_flags      = CRYPTO_ALG_TYPE_DIGEST,
+       .cra_blocksize  = SHA224_BLOCK_SIZE,
+       .cra_ctxsize    = sizeof(struct sha256_ctx),
+       .cra_module     = THIS_MODULE,
+       .cra_alignmask  = 3,
+       .cra_list       = LIST_HEAD_INIT(sha224.cra_list),
+       .cra_u          = { .digest = {
+       .dia_digestsize = SHA224_DIGEST_SIZE,
+       .dia_init       = sha224_init,
+       .dia_update     = sha256_update,
+       .dia_final      = sha224_final } }
 };
 
 static int __init init(void)
 {
-       return crypto_register_alg(&alg);
+       int ret = 0;
+
+       ret = crypto_register_alg(&sha224);
+
+       if (ret < 0)
+               return ret;
+
+       ret = crypto_register_alg(&sha256);
+
+       if (ret < 0)
+               crypto_unregister_alg(&sha224);
+
+       return ret;
 }
 
 static void __exit fini(void)
 {
-       crypto_unregister_alg(&alg);
+       crypto_unregister_alg(&sha224);
+       crypto_unregister_alg(&sha256);
 }
 
 module_init(init);
 module_exit(fini);
 
 MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm");
+MODULE_DESCRIPTION("SHA-224 and SHA-256 Secure Hash Algorithm");
 
+MODULE_ALIAS("sha224");
 MODULE_ALIAS("sha256");
index 24141fb..1ab8c01 100644 (file)
@@ -6,12 +6,16 @@
  *
  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
+ * Copyright (c) 2007 Nokia Siemens Networks
  *
  * This program is free software; you can redistribute it and/or modify it
  * under the terms of the GNU General Public License as published by the Free
  * Software Foundation; either version 2 of the License, or (at your option)
  * any later version.
  *
+ * 2007-11-13 Added GCM tests
+ * 2007-11-13 Added AEAD support
+ * 2007-11-06 Added SHA-224 and SHA-224-HMAC tests
  * 2006-12-07 Added SHA384 HMAC and SHA512 HMAC tests
  * 2004-08-09 Added cipher speed tests (Reyk Floeter <reyk@vantronix.net>)
  * 2003-09-14 Rewritten by Kartikey Mahendra Bhatt
@@ -71,22 +75,23 @@ static unsigned int sec;
 
 static int mode;
 static char *xbuf;
+static char *axbuf;
 static char *tvmem;
 
 static char *check[] = {
-       "des", "md5", "des3_ede", "rot13", "sha1", "sha256", "blowfish",
-       "twofish", "serpent", "sha384", "sha512", "md4", "aes", "cast6",
+       "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
+       "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
+       "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
        "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
        "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta",  "fcrypt",
-       "camellia", "seed", NULL
+       "camellia", "seed", "salsa20", "lzo", NULL
 };
 
 static void hexdump(unsigned char *buf, unsigned int len)
 {
-       while (len--)
-               printk("%02x", *buf++);
-
-       printk("\n");
+       print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
+                       16, 1,
+                       buf, len, false);
 }
 
 static void tcrypt_complete(struct crypto_async_request *req, int err)
@@ -215,6 +220,238 @@ out:
        crypto_free_hash(tfm);
 }
 
+static void test_aead(char *algo, int enc, struct aead_testvec *template,
+                     unsigned int tcount)
+{
+       unsigned int ret, i, j, k, temp;
+       unsigned int tsize;
+       char *q;
+       struct crypto_aead *tfm;
+       char *key;
+       struct aead_testvec *aead_tv;
+       struct aead_request *req;
+       struct scatterlist sg[8];
+       struct scatterlist asg[8];
+       const char *e;
+       struct tcrypt_result result;
+       unsigned int authsize;
+
+       if (enc == ENCRYPT)
+               e = "encryption";
+       else
+               e = "decryption";
+
+       printk(KERN_INFO "\ntesting %s %s\n", algo, e);
+
+       tsize = sizeof(struct aead_testvec);
+       tsize *= tcount;
+
+       if (tsize > TVMEMSIZE) {
+               printk(KERN_INFO "template (%u) too big for tvmem (%u)\n",
+                      tsize, TVMEMSIZE);
+               return;
+       }
+
+       memcpy(tvmem, template, tsize);
+       aead_tv = (void *)tvmem;
+
+       init_completion(&result.completion);
+
+       tfm = crypto_alloc_aead(algo, 0, 0);
+
+       if (IS_ERR(tfm)) {
+               printk(KERN_INFO "failed to load transform for %s: %ld\n",
+                      algo, PTR_ERR(tfm));
+               return;
+       }
+
+       req = aead_request_alloc(tfm, GFP_KERNEL);
+       if (!req) {
+               printk(KERN_INFO "failed to allocate request for %s\n", algo);
+               goto out;
+       }
+
+       aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
+                                 tcrypt_complete, &result);
+
+       for (i = 0, j = 0; i < tcount; i++) {
+               if (!aead_tv[i].np) {
+                       printk(KERN_INFO "test %u (%d bit key):\n",
+                              ++j, aead_tv[i].klen * 8);
+
+                       crypto_aead_clear_flags(tfm, ~0);
+                       if (aead_tv[i].wk)
+                               crypto_aead_set_flags(
+                                       tfm, CRYPTO_TFM_REQ_WEAK_KEY);
+                       key = aead_tv[i].key;
+
+                       ret = crypto_aead_setkey(tfm, key,
+                                                aead_tv[i].klen);
+                       if (ret) {
+                               printk(KERN_INFO "setkey() failed flags=%x\n",
+                                      crypto_aead_get_flags(tfm));
+
+                               if (!aead_tv[i].fail)
+                                       goto out;
+                       }
+
+                       authsize = abs(aead_tv[i].rlen - aead_tv[i].ilen);
+                       ret = crypto_aead_setauthsize(tfm, authsize);
+                       if (ret) {
+                               printk(KERN_INFO
+                                      "failed to set authsize = %u\n",
+                                      authsize);
+                               goto out;
+                       }
+
+                       sg_init_one(&sg[0], aead_tv[i].input,
+                                   aead_tv[i].ilen + (enc ? authsize : 0));
+
+                       sg_init_one(&asg[0], aead_tv[i].assoc,
+                                   aead_tv[i].alen);
+
+                       aead_request_set_crypt(req, sg, sg,
+                                              aead_tv[i].ilen,
+                                              aead_tv[i].iv);
+
+                       aead_request_set_assoc(req, asg, aead_tv[i].alen);
+
+                       ret = enc ?
+                               crypto_aead_encrypt(req) :
+                               crypto_aead_decrypt(req);
+
+                       switch (ret) {
+                       case 0:
+                               break;
+                       case -EINPROGRESS:
+                       case -EBUSY:
+                               ret = wait_for_completion_interruptible(
+                                       &result.completion);
+                               if (!ret && !(ret = result.err)) {
+                                       INIT_COMPLETION(result.completion);
+                                       break;
+                               }
+                               /* fall through */
+                       default:
+                               printk(KERN_INFO "%s () failed err=%d\n",
+                                      e, -ret);
+                               goto out;
+                       }
+
+                       q = kmap(sg_page(&sg[0])) + sg[0].offset;
+                       hexdump(q, aead_tv[i].rlen);
+
+                       printk(KERN_INFO "enc/dec: %s\n",
+                              memcmp(q, aead_tv[i].result,
+                                     aead_tv[i].rlen) ? "fail" : "pass");
+               }
+       }
+
+       printk(KERN_INFO "\ntesting %s %s across pages (chunking)\n", algo, e);
+       memset(xbuf, 0, XBUFSIZE);
+       memset(axbuf, 0, XBUFSIZE);
+
+       for (i = 0, j = 0; i < tcount; i++) {
+               if (aead_tv[i].np) {
+                       printk(KERN_INFO "test %u (%d bit key):\n",
+                              ++j, aead_tv[i].klen * 8);
+
+                       crypto_aead_clear_flags(tfm, ~0);
+                       if (aead_tv[i].wk)
+                               crypto_aead_set_flags(
+                                       tfm, CRYPTO_TFM_REQ_WEAK_KEY);
+                       key = aead_tv[i].key;
+
+                       ret = crypto_aead_setkey(tfm, key, aead_tv[i].klen);
+                       if (ret) {
+                               printk(KERN_INFO "setkey() failed flags=%x\n",
+                                      crypto_aead_get_flags(tfm));
+
+                               if (!aead_tv[i].fail)
+                                       goto out;
+                       }
+
+                       sg_init_table(sg, aead_tv[i].np);
+                       for (k = 0, temp = 0; k < aead_tv[i].np; k++) {
+                               memcpy(&xbuf[IDX[k]],
+                                      aead_tv[i].input + temp,
+                                      aead_tv[i].tap[k]);
+                               temp += aead_tv[i].tap[k];
+                               sg_set_buf(&sg[k], &xbuf[IDX[k]],
+                                          aead_tv[i].tap[k]);
+                       }
+
+                       authsize = abs(aead_tv[i].rlen - aead_tv[i].ilen);
+                       ret = crypto_aead_setauthsize(tfm, authsize);
+                       if (ret) {
+                               printk(KERN_INFO
+                                      "failed to set authsize = %u\n",
+                                      authsize);
+                               goto out;
+                       }
+
+                       if (enc)
+                               sg[k - 1].length += authsize;
+
+                       sg_init_table(asg, aead_tv[i].anp);
+                       for (k = 0, temp = 0; k < aead_tv[i].anp; k++) {
+                               memcpy(&axbuf[IDX[k]],
+                                      aead_tv[i].assoc + temp,
+                                      aead_tv[i].atap[k]);
+                               temp += aead_tv[i].atap[k];
+                               sg_set_buf(&asg[k], &axbuf[IDX[k]],
+                                          aead_tv[i].atap[k]);
+                       }
+
+                       aead_request_set_crypt(req, sg, sg,
+                                              aead_tv[i].ilen,
+                                              aead_tv[i].iv);
+
+                       aead_request_set_assoc(req, asg, aead_tv[i].alen);
+
+                       ret = enc ?
+                               crypto_aead_encrypt(req) :
+                               crypto_aead_decrypt(req);
+
+                       switch (ret) {
+                       case 0:
+                               break;
+                       case -EINPROGRESS:
+                       case -EBUSY:
+                               ret = wait_for_completion_interruptible(
+                                       &result.completion);
+                               if (!ret && !(ret = result.err)) {
+                                       INIT_COMPLETION(result.completion);
+                                       break;
+                               }
+                               /* fall through */
+                       default:
+                               printk(KERN_INFO "%s () failed err=%d\n",
+                                      e, -ret);
+                               goto out;
+                       }
+
+                       for (k = 0, temp = 0; k < aead_tv[i].np; k++) {
+                               printk(KERN_INFO "page %u\n", k);
+                               q = kmap(sg_page(&sg[k])) + sg[k].offset;
+                               hexdump(q, aead_tv[i].tap[k]);
+                               printk(KERN_INFO "%s\n",
+                                      memcmp(q, aead_tv[i].result + temp,
+                                             aead_tv[i].tap[k] -
+                                             (k < aead_tv[i].np - 1 || enc ?
+                                              0 : authsize)) ?
+                                      "fail" : "pass");
+
+                               temp += aead_tv[i].tap[k];
+                       }
+               }
+       }
+
+out:
+       crypto_free_aead(tfm);
+       aead_request_free(req);
+}
+
 static void test_cipher(char *algo, int enc,
                        struct cipher_testvec *template, unsigned int tcount)
 {
@@ -237,15 +474,11 @@ static void test_cipher(char *algo, int enc,
        printk("\ntesting %s %s\n", algo, e);
 
        tsize = sizeof (struct cipher_testvec);
-       tsize *= tcount;
-
        if (tsize > TVMEMSIZE) {
                printk("template (%u) too big for tvmem (%u)\n", tsize,
                       TVMEMSIZE);
                return;
        }
-
-       memcpy(tvmem, template, tsize);
        cipher_tv = (void *)tvmem;
 
        init_completion(&result.completion);
@@ -269,33 +502,34 @@ static void test_cipher(char *algo, int enc,
 
        j = 0;
        for (i = 0; i < tcount; i++) {
-               if (!(cipher_tv[i].np)) {
+               memcpy(cipher_tv, &template[i], tsize);
+               if (!(cipher_tv->np)) {
                        j++;
                        printk("test %u (%d bit key):\n",
-                       j, cipher_tv[i].klen * 8);
+                       j, cipher_tv->klen * 8);
 
                        crypto_ablkcipher_clear_flags(tfm, ~0);
-                       if (cipher_tv[i].wk)
+                       if (cipher_tv->wk)
                                crypto_ablkcipher_set_flags(
                                        tfm, CRYPTO_TFM_REQ_WEAK_KEY);
-                       key = cipher_tv[i].key;
+                       key = cipher_tv->key;
 
                        ret = crypto_ablkcipher_setkey(tfm, key,
-                                                      cipher_tv[i].klen);
+                                                      cipher_tv->klen);
                        if (ret) {
                                printk("setkey() failed flags=%x\n",
                                       crypto_ablkcipher_get_flags(tfm));
 
-                               if (!cipher_tv[i].fail)
+                               if (!cipher_tv->fail)
                                        goto out;
                        }
 
-                       sg_init_one(&sg[0], cipher_tv[i].input,
-                                   cipher_tv[i].ilen);
+                       sg_init_one(&sg[0], cipher_tv->input,
+                                   cipher_tv->ilen);
 
                        ablkcipher_request_set_crypt(req, sg, sg,
-                                                    cipher_tv[i].ilen,
-                                                    cipher_tv[i].iv);
+                                                    cipher_tv->ilen,
+                                                    cipher_tv->iv);
 
                        ret = enc ?
                                crypto_ablkcipher_encrypt(req) :
@@ -319,11 +553,11 @@ static void test_cipher(char *algo, int enc,
                        }
 
                        q = kmap(sg_page(&sg[0])) + sg[0].offset;
-                       hexdump(q, cipher_tv[i].rlen);
+                       hexdump(q, cipher_tv->rlen);
 
                        printk("%s\n",
-                              memcmp(q, cipher_tv[i].result,
-                                     cipher_tv[i].rlen) ? "fail" : "pass");
+                              memcmp(q, cipher_tv->result,
+                                     cipher_tv->rlen) ? "fail" : "pass");
                }
        }
 
@@ -332,41 +566,42 @@ static void test_cipher(char *algo, int enc,
 
        j = 0;
        for (i = 0; i < tcount; i++) {
-               if (cipher_tv[i].np) {
+               memcpy(cipher_tv, &template[i], tsize);
+               if (cipher_tv->np) {
                        j++;
                        printk("test %u (%d bit key):\n",
-                       j, cipher_tv[i].klen * 8);
+                       j, cipher_tv->klen * 8);
 
                        crypto_ablkcipher_clear_flags(tfm, ~0);
-                       if (cipher_tv[i].wk)
+                       if (cipher_tv->wk)
                                crypto_ablkcipher_set_flags(
                                        tfm, CRYPTO_TFM_REQ_WEAK_KEY);
-                       key = cipher_tv[i].key;
+                       key = cipher_tv->key;
 
                        ret = crypto_ablkcipher_setkey(tfm, key,
-                                                      cipher_tv[i].klen);
+                                                      cipher_tv->klen);
                        if (ret) {
                                printk("setkey() failed flags=%x\n",
                                       crypto_ablkcipher_get_flags(tfm));
 
-                               if (!cipher_tv[i].fail)
+                               if (!cipher_tv->fail)
                                        goto out;
                        }
 
                        temp = 0;
-                       sg_init_table(sg, cipher_tv[i].np);
-                       for (k = 0; k < cipher_tv[i].np; k++) {
+                       sg_init_table(sg, cipher_tv->np);
+                       for (k = 0; k < cipher_tv->np; k++) {
                                memcpy(&xbuf[IDX[k]],
-                                      cipher_tv[i].input + temp,
-                                      cipher_tv[i].tap[k]);
-                               temp += cipher_tv[i].tap[k];
+                                      cipher_tv->input + temp,
+                                      cipher_tv->tap[k]);
+                               temp += cipher_tv->tap[k];
                                sg_set_buf(&sg[k], &xbuf[IDX[k]],
-                                          cipher_tv[i].tap[k]);
+                                          cipher_tv->tap[k]);
                        }
 
                        ablkcipher_request_set_crypt(req, sg, sg,
-                                                    cipher_tv[i].ilen,
-                                                    cipher_tv[i].iv);
+                                                    cipher_tv->ilen,
+                                                    cipher_tv->iv);
 
                        ret = enc ?
                                crypto_ablkcipher_encrypt(req) :
@@ -390,15 +625,15 @@ static void test_cipher(char *algo, int enc,
                        }
 
                        temp = 0;
-                       for (k = 0; k < cipher_tv[i].np; k++) {
+                       for (k = 0; k < cipher_tv->np; k++) {
                                printk("page %u\n", k);
                                q = kmap(sg_page(&sg[k])) + sg[k].offset;
-                               hexdump(q, cipher_tv[i].tap[k]);
+                               hexdump(q, cipher_tv->tap[k]);
                                printk("%s\n",
-                                       memcmp(q, cipher_tv[i].result + temp,
-                                               cipher_tv[i].tap[k]) ? "fail" :
+                                       memcmp(q, cipher_tv->result + temp,
+                                               cipher_tv->tap[k]) ? "fail" :
                                        "pass");
-                               temp += cipher_tv[i].tap[k];
+                               temp += cipher_tv->tap[k];
                        }
                }
        }
@@ -800,7 +1035,8 @@ out:
        crypto_free_hash(tfm);
 }
 
-static void test_deflate(void)
+static void test_comp(char *algo, struct comp_testvec *ctemplate,
+                      struct comp_testvec *dtemplate, int ctcount, int dtcount)
 {
        unsigned int i;
        char result[COMP_BUF_SIZE];
@@ -808,25 +1044,26 @@ static void test_deflate(void)
        struct comp_testvec *tv;
        unsigned int tsize;
 
-       printk("\ntesting deflate compression\n");
+       printk("\ntesting %s compression\n", algo);
 
-       tsize = sizeof (deflate_comp_tv_template);
+       tsize = sizeof(struct comp_testvec);
+       tsize *= ctcount;
        if (tsize > TVMEMSIZE) {
                printk("template (%u) too big for tvmem (%u)\n", tsize,
                       TVMEMSIZE);
                return;
        }
 
-       memcpy(tvmem, deflate_comp_tv_template, tsize);
+       memcpy(tvmem, ctemplate, tsize);
        tv = (void *)tvmem;
 
-       tfm = crypto_alloc_comp("deflate", 0, CRYPTO_ALG_ASYNC);
+       tfm = crypto_alloc_comp(algo, 0, CRYPTO_ALG_ASYNC);
        if (IS_ERR(tfm)) {
-               printk("failed to load transform for deflate\n");
+               printk("failed to load transform for %s\n", algo);
                return;
        }
 
-       for (i = 0; i < DEFLATE_COMP_TEST_VECTORS; i++) {
+       for (i = 0; i < ctcount; i++) {
                int ilen, ret, dlen = COMP_BUF_SIZE;
 
                printk("test %u:\n", i + 1);
@@ -845,19 +1082,20 @@ static void test_deflate(void)
                       ilen, dlen);
        }
 
-       printk("\ntesting deflate decompression\n");
+       printk("\ntesting %s decompression\n", algo);
 
-       tsize = sizeof (deflate_decomp_tv_template);
+       tsize = sizeof(struct comp_testvec);
+       tsize *= dtcount;
        if (tsize > TVMEMSIZE) {
                printk("template (%u) too big for tvmem (%u)\n", tsize,
                       TVMEMSIZE);
                goto out;
        }
 
-       memcpy(tvmem, deflate_decomp_tv_template, tsize);
+       memcpy(tvmem, dtemplate, tsize);
        tv = (void *)tvmem;
 
-       for (i = 0; i < DEFLATE_DECOMP_TEST_VECTORS; i++) {
+       for (i = 0; i < dtcount; i++) {
                int ilen, ret, dlen = COMP_BUF_SIZE;
 
                printk("test %u:\n", i + 1);
@@ -918,6 +1156,8 @@ static void do_test(void)
 
                test_hash("md4", md4_tv_template, MD4_TEST_VECTORS);
 
+               test_hash("sha224", sha224_tv_template, SHA224_TEST_VECTORS);
+
                test_hash("sha256", sha256_tv_template, SHA256_TEST_VECTORS);
 
                //BLOWFISH
@@ -969,6 +1209,18 @@ static void do_test(void)
                            AES_XTS_ENC_TEST_VECTORS);
                test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
                            AES_XTS_DEC_TEST_VECTORS);
+               test_cipher("rfc3686(ctr(aes))", ENCRYPT, aes_ctr_enc_tv_template,
+                           AES_CTR_ENC_TEST_VECTORS);
+               test_cipher("rfc3686(ctr(aes))", DECRYPT, aes_ctr_dec_tv_template,
+                           AES_CTR_DEC_TEST_VECTORS);
+               test_aead("gcm(aes)", ENCRYPT, aes_gcm_enc_tv_template,
+                         AES_GCM_ENC_TEST_VECTORS);
+               test_aead("gcm(aes)", DECRYPT, aes_gcm_dec_tv_template,
+                         AES_GCM_DEC_TEST_VECTORS);
+               test_aead("ccm(aes)", ENCRYPT, aes_ccm_enc_tv_template,
+                         AES_CCM_ENC_TEST_VECTORS);
+               test_aead("ccm(aes)", DECRYPT, aes_ccm_dec_tv_template,
+                         AES_CCM_DEC_TEST_VECTORS);
 
                //CAST5
                test_cipher("ecb(cast5)", ENCRYPT, cast5_enc_tv_template,
@@ -1057,12 +1309,18 @@ static void do_test(void)
                test_hash("tgr192", tgr192_tv_template, TGR192_TEST_VECTORS);
                test_hash("tgr160", tgr160_tv_template, TGR160_TEST_VECTORS);
                test_hash("tgr128", tgr128_tv_template, TGR128_TEST_VECTORS);
-               test_deflate();
+               test_comp("deflate", deflate_comp_tv_template,
+                         deflate_decomp_tv_template, DEFLATE_COMP_TEST_VECTORS,
+                         DEFLATE_DECOMP_TEST_VECTORS);
+               test_comp("lzo", lzo_comp_tv_template, lzo_decomp_tv_template,
+                         LZO_COMP_TEST_VECTORS, LZO_DECOMP_TEST_VECTORS);
                test_hash("crc32c", crc32c_tv_template, CRC32C_TEST_VECTORS);
                test_hash("hmac(md5)", hmac_md5_tv_template,
                          HMAC_MD5_TEST_VECTORS);
                test_hash("hmac(sha1)", hmac_sha1_tv_template,
                          HMAC_SHA1_TEST_VECTORS);
+               test_hash("hmac(sha224)", hmac_sha224_tv_template,
+                         HMAC_SHA224_TEST_VECTORS);
                test_hash("hmac(sha256)", hmac_sha256_tv_template,
                          HMAC_SHA256_TEST_VECTORS);
                test_hash("hmac(sha384)", hmac_sha384_tv_template,
@@ -1156,6 +1414,10 @@ static void do_test(void)
                            AES_XTS_ENC_TEST_VECTORS);
                test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
                            AES_XTS_DEC_TEST_VECTORS);
+               test_cipher("rfc3686(ctr(aes))", ENCRYPT, aes_ctr_enc_tv_template,
+                           AES_CTR_ENC_TEST_VECTORS);
+               test_cipher("rfc3686(ctr(aes))", DECRYPT, aes_ctr_dec_tv_template,
+                           AES_CTR_DEC_TEST_VECTORS);
                break;
 
        case 11:
@@ -1167,7 +1429,9 @@ static void do_test(void)
                break;
 
        case 13:
-               test_deflate();
+               test_comp("deflate", deflate_comp_tv_template,
+                         deflate_decomp_tv_template, DEFLATE_COMP_TEST_VECTORS,
+                         DEFLATE_DECOMP_TEST_VECTORS);
                break;
 
        case 14:
@@ -1291,6 +1555,34 @@ static void do_test(void)
                            camellia_cbc_dec_tv_template,
                            CAMELLIA_CBC_DEC_TEST_VECTORS);
                break;
+       case 33:
+               test_hash("sha224", sha224_tv_template, SHA224_TEST_VECTORS);
+               break;
+
+       case 34:
+               test_cipher("salsa20", ENCRYPT,
+                           salsa20_stream_enc_tv_template,
+                           SALSA20_STREAM_ENC_TEST_VECTORS);
+               break;
+
+       case 35:
+               test_aead("gcm(aes)", ENCRYPT, aes_gcm_enc_tv_template,
+                         AES_GCM_ENC_TEST_VECTORS);
+               test_aead("gcm(aes)", DECRYPT, aes_gcm_dec_tv_template,
+                         AES_GCM_DEC_TEST_VECTORS);
+               break;
+
+       case 36:
+               test_comp("lzo", lzo_comp_tv_template, lzo_decomp_tv_template,
+                         LZO_COMP_TEST_VECTORS, LZO_DECOMP_TEST_VECTORS);
+               break;
+
+       case 37:
+               test_aead("ccm(aes)", ENCRYPT, aes_ccm_enc_tv_template,
+                         AES_CCM_ENC_TEST_VECTORS);
+               test_aead("ccm(aes)", DECRYPT, aes_ccm_dec_tv_template,
+                         AES_CCM_DEC_TEST_VECTORS);
+               break;
 
        case 100:
                test_hash("hmac(md5)", hmac_md5_tv_template,
@@ -1317,6 +1609,15 @@ static void do_test(void)
                          HMAC_SHA512_TEST_VECTORS);
                break;
 
+       case 105:
+               test_hash("hmac(sha224)", hmac_sha224_tv_template,
+                         HMAC_SHA224_TEST_VECTORS);
+               break;
+
+       case 106:
+               test_hash("xcbc(aes)", aes_xcbc128_tv_template,
+                         XCBC_AES_TEST_VECTORS);
+               break;
 
        case 200:
                test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
@@ -1400,6 +1701,11 @@ static void do_test(void)
                                camellia_speed_template);
                break;
 
+       case 206:
+               test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
+                                 salsa20_speed_template);
+               break;
+
        case 300:
                /* fall through */
 
@@ -1451,6 +1757,10 @@ static void do_test(void)
                test_hash_speed("tgr192", sec, generic_hash_speed_template);
                if (mode > 300 && mode < 400) break;
 
+       case 313:
+               test_hash_speed("sha224", sec, generic_hash_speed_template);
+               if (mode > 300 && mode < 400) break;
+
        case 399:
                break;
 
@@ -1467,20 +1777,21 @@ static void do_test(void)
 
 static int __init init(void)
 {
+       int err = -ENOMEM;
+
        tvmem = kmalloc(TVMEMSIZE, GFP_KERNEL);
        if (tvmem == NULL)
-               return -ENOMEM;
+               return err;
 
        xbuf = kmalloc(XBUFSIZE, GFP_KERNEL);
-       if (xbuf == NULL) {
-               kfree(tvmem);
-               return -ENOMEM;
-       }
+       if (xbuf == NULL)
+               goto err_free_tv;
 
-       do_test();
+       axbuf = kmalloc(XBUFSIZE, GFP_KERNEL);
+       if (axbuf == NULL)
+               goto err_free_xbuf;
 
-       kfree(xbuf);
-       kfree(tvmem);
+       do_test();
 
        /* We intentionaly return -EAGAIN to prevent keeping
         * the module. It does all its work from init()
@@ -1488,7 +1799,15 @@ static int __init init(void)
         * => we don't need it in the memory, do we?
         *                                        -- mludvig
         */
-       return -EAGAIN;
+       err = -EAGAIN;
+
+       kfree(axbuf);
+ err_free_xbuf:
+       kfree(xbuf);
+ err_free_tv:
+       kfree(tvmem);
+
+       return err;
 }
 
 /*
index ec86138..f785e56 100644 (file)
@@ -6,12 +6,15 @@
  *
  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
+ * Copyright (c) 2007 Nokia Siemens Networks
  *
  * This program is free software; you can redistribute it and/or modify it
  * under the terms of the GNU General Public License as published by the Free
  * Software Foundation; either version 2 of the License, or (at your option)
  * any later version.
  *
+ * 2007-11-13 Added GCM tests
+ * 2007-11-13 Added AEAD support
  * 2006-12-07 Added SHA384 HMAC and SHA512 HMAC tests
  * 2004-08-09 Cipher speed tests by Reyk Floeter <reyk@vantronix.net>
  * 2003-09-14 Changes by Kartikey Mahendra Bhatt
@@ -38,16 +41,34 @@ struct hash_testvec {
 };
 
 struct cipher_testvec {
+       char key[MAX_KEYLEN] __attribute__ ((__aligned__(4)));
+       char iv[MAX_IVLEN];
+       char input[4100];
+       char result[4100];
+       unsigned char tap[MAX_TAP];
+       int np;
+       unsigned char fail;
+       unsigned char wk; /* weak key flag */
+       unsigned char klen;
+       unsigned short ilen;
+       unsigned short rlen;
+};
+
+struct aead_testvec {
        char key[MAX_KEYLEN] __attribute__ ((__aligned__(4)));
        char iv[MAX_IVLEN];
        char input[512];
+       char assoc[512];
        char result[512];
        unsigned char tap[MAX_TAP];
+       unsigned char atap[MAX_TAP];
        int np;
+       int anp;
        unsigned char fail;
        unsigned char wk; /* weak key flag */
        unsigned char klen;
        unsigned short ilen;
+       unsigned short alen;
        unsigned short rlen;
 };
 
@@ -173,6 +194,33 @@ static struct hash_testvec sha1_tv_template[] = {
        }
 };
 
+
+/*
+ * SHA224 test vectors from from FIPS PUB 180-2
+ */
+#define SHA224_TEST_VECTORS     2
+
+static struct hash_testvec sha224_tv_template[] = {
+       {
+               .plaintext = "abc",
+               .psize  = 3,
+               .digest = { 0x23, 0x09, 0x7D, 0x22, 0x34, 0x05, 0xD8, 0x22,
+                       0x86, 0x42, 0xA4, 0x77, 0xBD, 0xA2, 0x55, 0xB3,
+                       0x2A, 0xAD, 0xBC, 0xE4, 0xBD, 0xA0, 0xB3, 0xF7,
+                       0xE3, 0x6C, 0x9D, 0xA7},
+       }, {
+               .plaintext =
+               "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+               .psize  = 56,
+               .digest = { 0x75, 0x38, 0x8B, 0x16, 0x51, 0x27, 0x76, 0xCC,
+                       0x5D, 0xBA, 0x5D, 0xA1, 0xFD, 0x89, 0x01, 0x50,
+                       0xB0, 0xC6, 0x45, 0x5C, 0xB4, 0xF5, 0x8B, 0x19,
+                       0x52, 0x52, 0x25, 0x25 },
+               .np     = 2,
+               .tap    = { 28, 28 }
+       }
+};
+
 /*
  * SHA256 test vectors from from NIST
  */
@@ -817,6 +865,121 @@ static struct hash_testvec hmac_sha1_tv_template[] = {
        },
 };
 
+
+/*
+ * SHA224 HMAC test vectors from RFC4231
+ */
+#define HMAC_SHA224_TEST_VECTORS    4
+
+static struct hash_testvec hmac_sha224_tv_template[] = {
+       {
+               .key    = { 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+                       0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+                       0x0b, 0x0b, 0x0b, 0x0b },
+               .ksize  = 20,
+               /*  ("Hi There") */
+               .plaintext = { 0x48, 0x69, 0x20, 0x54, 0x68, 0x65, 0x72, 0x65 },
+               .psize  = 8,
+               .digest = { 0x89, 0x6f, 0xb1, 0x12, 0x8a, 0xbb, 0xdf, 0x19,
+                       0x68, 0x32, 0x10, 0x7c, 0xd4, 0x9d, 0xf3, 0x3f,
+                       0x47, 0xb4, 0xb1, 0x16, 0x99, 0x12, 0xba, 0x4f,
+                       0x53, 0x68, 0x4b, 0x22},
+       }, {
+               .key    = { 0x4a, 0x65, 0x66, 0x65 }, /* ("Jefe") */
+               .ksize  = 4,
+               /* ("what do ya want for nothing?") */
+               .plaintext = { 0x77, 0x68, 0x61, 0x74, 0x20, 0x64, 0x6f, 0x20,
+                       0x79, 0x61, 0x20, 0x77, 0x61, 0x6e, 0x74, 0x20,
+                       0x66, 0x6f, 0x72, 0x20, 0x6e, 0x6f, 0x74, 0x68,
+                       0x69, 0x6e, 0x67, 0x3f },
+               .psize  = 28,
+               .digest = { 0xa3, 0x0e, 0x01, 0x09, 0x8b, 0xc6, 0xdb, 0xbf,
+                       0x45, 0x69, 0x0f, 0x3a, 0x7e, 0x9e, 0x6d, 0x0f,
+                       0x8b, 0xbe, 0xa2, 0xa3, 0x9e, 0x61, 0x48, 0x00,
+                       0x8f, 0xd0, 0x5e, 0x44 },
+               .np = 4,
+               .tap    = { 7, 7, 7, 7 }
+       }, {
+               .key    = { 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa },
+               .ksize  = 131,
+               /* ("Test Using Larger Than Block-Size Key - Hash Key First") */
+               .plaintext = { 0x54, 0x65, 0x73, 0x74, 0x20, 0x55, 0x73, 0x69,
+                       0x6e, 0x67, 0x20, 0x4c, 0x61, 0x72, 0x67, 0x65,
+                       0x72, 0x20, 0x54, 0x68, 0x61, 0x6e, 0x20, 0x42,
+                       0x6c, 0x6f, 0x63, 0x6b, 0x2d, 0x53, 0x69, 0x7a,
+                       0x65, 0x20, 0x4b, 0x65, 0x79, 0x20, 0x2d, 0x20,
+                       0x48, 0x61, 0x73, 0x68, 0x20, 0x4b, 0x65, 0x79,
+                       0x20, 0x46, 0x69, 0x72, 0x73, 0x74 },
+               .psize  = 54,
+               .digest = { 0x95, 0xe9, 0xa0, 0xdb, 0x96, 0x20, 0x95, 0xad,
+                       0xae, 0xbe, 0x9b, 0x2d, 0x6f, 0x0d, 0xbc, 0xe2,
+                       0xd4, 0x99, 0xf1, 0x12, 0xf2, 0xd2, 0xb7, 0x27,
+                       0x3f, 0xa6, 0x87, 0x0e },
+       }, {
+               .key    = { 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+                       0xaa, 0xaa, 0xaa },
+               .ksize  = 131,
+               /* ("This is a test using a larger than block-size key and a")
+               (" larger than block-size data. The key needs to be")
+                       (" hashed before being used by the HMAC algorithm.") */
+               .plaintext = { 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20,
+                       0x61, 0x20, 0x74, 0x65, 0x73, 0x74, 0x20, 0x75,
+                       0x73, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x6c,
+                       0x61, 0x72, 0x67, 0x65, 0x72, 0x20, 0x74, 0x68,
+                       0x61, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b,
+                       0x2d, 0x73, 0x69, 0x7a, 0x65, 0x20, 0x6b, 0x65,
+                       0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x20,
+                       0x6c, 0x61, 0x72, 0x67, 0x65, 0x72, 0x20, 0x74,
+                       0x68, 0x61, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63,
+                       0x6b, 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x20, 0x64,
+                       0x61, 0x74, 0x61, 0x2e, 0x20, 0x54, 0x68, 0x65,
+                       0x20, 0x6b, 0x65, 0x79, 0x20, 0x6e, 0x65, 0x65,
+                       0x64, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x65,
+                       0x20, 0x68, 0x61, 0x73, 0x68, 0x65, 0x64, 0x20,
+                       0x62, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x20, 0x62,
+                       0x65, 0x69, 0x6e, 0x67, 0x20, 0x75, 0x73, 0x65,
+                       0x64, 0x20, 0x62, 0x79, 0x20, 0x74, 0x68, 0x65,
+                       0x20, 0x48, 0x4d, 0x41, 0x43, 0x20, 0x61, 0x6c,
+                       0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x2e },
+               .psize  = 152,
+               .digest = { 0x3a, 0x85, 0x41, 0x66, 0xac, 0x5d, 0x9f, 0x02,
+                       0x3f, 0x54, 0xd5, 0x17, 0xd0, 0xb3, 0x9d, 0xbd,
+                       0x94, 0x67, 0x70, 0xdb, 0x9c, 0x2b, 0x95, 0xc9,
+                       0xf6, 0xf5, 0x65, 0xd1 },
+       },
+};
+
 /*
  * HMAC-SHA256 test vectors from
  * draft-ietf-ipsec-ciph-sha-256-01.txt
@@ -2140,12 +2303,18 @@ static struct cipher_testvec cast6_dec_tv_template[] = {
  */
 #define AES_ENC_TEST_VECTORS 3
 #define AES_DEC_TEST_VECTORS 3
-#define AES_CBC_ENC_TEST_VECTORS 2
-#define AES_CBC_DEC_TEST_VECTORS 2
+#define AES_CBC_ENC_TEST_VECTORS 4
+#define AES_CBC_DEC_TEST_VECTORS 4
 #define AES_LRW_ENC_TEST_VECTORS 8
 #define AES_LRW_DEC_TEST_VECTORS 8
 #define AES_XTS_ENC_TEST_VECTORS 4
 #define AES_XTS_DEC_TEST_VECTORS 4
+#define AES_CTR_ENC_TEST_VECTORS 7
+#define AES_CTR_DEC_TEST_VECTORS 6
+#define AES_GCM_ENC_TEST_VECTORS 9
+#define AES_GCM_DEC_TEST_VECTORS 8
+#define AES_CCM_ENC_TEST_VECTORS 7
+#define AES_CCM_DEC_TEST_VECTORS 7
 
 static struct cipher_testvec aes_enc_tv_template[] = {
        { /* From FIPS-197 */
@@ -2249,6 +2418,57 @@ static struct cipher_testvec aes_cbc_enc_tv_template[] = {
                            0x75, 0x86, 0x60, 0x2d, 0x25, 0x3c, 0xff, 0xf9,
                            0x1b, 0x82, 0x66, 0xbe, 0xa6, 0xd6, 0x1a, 0xb1 },
                .rlen   = 32,
+       }, { /* From NIST SP800-38A */
+               .key    = { 0x8e, 0x73, 0xb0, 0xf7, 0xda, 0x0e, 0x64, 0x52,
+                           0xc8, 0x10, 0xf3, 0x2b, 0x80, 0x90, 0x79, 0xe5,
+                           0x62, 0xf8, 0xea, 0xd2, 0x52, 0x2c, 0x6b, 0x7b },
+               .klen   = 24,
+               .iv     = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f },
+               .input  = { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96,
+                           0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a,
+                           0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c,
+                           0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51,
+                           0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11,
+                           0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef,
+                           0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17,
+                           0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 },
+               .ilen   = 64,
+               .result = { 0x4f, 0x02, 0x1d, 0xb2, 0x43, 0xbc, 0x63, 0x3d,
+                           0x71, 0x78, 0x18, 0x3a, 0x9f, 0xa0, 0x71, 0xe8,
+                           0xb4, 0xd9, 0xad, 0xa9, 0xad, 0x7d, 0xed, 0xf4,
+                           0xe5, 0xe7, 0x38, 0x76, 0x3f, 0x69, 0x14, 0x5a,
+                           0x57, 0x1b, 0x24, 0x20, 0x12, 0xfb, 0x7a, 0xe0,
+                           0x7f, 0xa9, 0xba, 0xac, 0x3d, 0xf1, 0x02, 0xe0,
+                           0x08, 0xb0, 0xe2, 0x79, 0x88, 0x59, 0x88, 0x81,
+                           0xd9, 0x20, 0xa9, 0xe6, 0x4f, 0x56, 0x15, 0xcd },
+               .rlen   = 64,
+       }, {
+               .key    = { 0x60, 0x3d, 0xeb, 0x10, 0x15, 0xca, 0x71, 0xbe,
+                           0x2b, 0x73, 0xae, 0xf0, 0x85, 0x7d, 0x77, 0x81,
+                           0x1f, 0x35, 0x2c, 0x07, 0x3b, 0x61, 0x08, 0xd7,
+                           0x2d, 0x98, 0x10, 0xa3, 0x09, 0x14, 0xdf, 0xf4 },
+               .klen   = 32,
+               .iv     = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f },
+               .input  = { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96,
+                           0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a,
+                           0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c,
+                           0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51,
+                           0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11,
+                           0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef,
+                           0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17,
+                           0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 },
+               .ilen   = 64,
+               .result = { 0xf5, 0x8c, 0x4c, 0x04, 0xd6, 0xe5, 0xf1, 0xba,
+                           0x77, 0x9e, 0xab, 0xfb, 0x5f, 0x7b, 0xfb, 0xd6,
+                           0x9c, 0xfc, 0x4e, 0x96, 0x7e, 0xdb, 0x80, 0x8d,
+                           0x67, 0x9f, 0x77, 0x7b, 0xc6, 0x70, 0x2c, 0x7d,
+                           0x39, 0xf2, 0x33, 0x69, 0xa9, 0xd9, 0xba, 0xcf,
+                           0xa5, 0x30, 0xe2, 0x63, 0x04, 0x23, 0x14, 0x61,
+                           0xb2, 0xeb, 0x05, 0xe2, 0xc3, 0x9b, 0xe9, 0xfc,
+                           0xda, 0x6c, 0x19, 0x07, 0x8c, 0x6a, 0x9d, 0x1b },
+               .rlen   = 64,
        },
 };
 
@@ -2280,6 +2500,57 @@ static struct cipher_testvec aes_cbc_dec_tv_template[] = {
                            0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
                            0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
                .rlen   = 32,
+       }, { /* From NIST SP800-38A */
+               .key    = { 0x8e, 0x73, 0xb0, 0xf7, 0xda, 0x0e, 0x64, 0x52,
+                           0xc8, 0x10, 0xf3, 0x2b, 0x80, 0x90, 0x79, 0xe5,
+                           0x62, 0xf8, 0xea, 0xd2, 0x52, 0x2c, 0x6b, 0x7b },
+               .klen   = 24,
+               .iv     = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f },
+               .input  = { 0x4f, 0x02, 0x1d, 0xb2, 0x43, 0xbc, 0x63, 0x3d,
+                           0x71, 0x78, 0x18, 0x3a, 0x9f, 0xa0, 0x71, 0xe8,
+                           0xb4, 0xd9, 0xad, 0xa9, 0xad, 0x7d, 0xed, 0xf4,
+                           0xe5, 0xe7, 0x38, 0x76, 0x3f, 0x69, 0x14, 0x5a,
+                           0x57, 0x1b, 0x24, 0x20, 0x12, 0xfb, 0x7a, 0xe0,
+                           0x7f, 0xa9, 0xba, 0xac, 0x3d, 0xf1, 0x02, 0xe0,
+                           0x08, 0xb0, 0xe2, 0x79, 0x88, 0x59, 0x88, 0x81,
+                           0xd9, 0x20, 0xa9, 0xe6, 0x4f, 0x56, 0x15, 0xcd },
+               .ilen   = 64,
+               .result = { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96,
+                           0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a,
+                           0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c,
+                           0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51,
+                           0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11,
+                           0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef,
+                           0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17,
+                           0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 },
+               .rlen   = 64,
+       }, {
+               .key    = { 0x60, 0x3d, 0xeb, 0x10, 0x15, 0xca, 0x71, 0xbe,
+                           0x2b, 0x73, 0xae, 0xf0, 0x85, 0x7d, 0x77, 0x81,
+                           0x1f, 0x35, 0x2c, 0x07, 0x3b, 0x61, 0x08, 0xd7,
+                           0x2d, 0x98, 0x10, 0xa3, 0x09, 0x14, 0xdf, 0xf4 },
+               .klen   = 32,
+               .iv     = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f },
+               .input  = { 0xf5, 0x8c, 0x4c, 0x04, 0xd6, 0xe5, 0xf1, 0xba,
+                           0x77, 0x9e, 0xab, 0xfb, 0x5f, 0x7b, 0xfb, 0xd6,
+                           0x9c, 0xfc, 0x4e, 0x96, 0x7e, 0xdb, 0x80, 0x8d,
+                           0x67, 0x9f, 0x77, 0x7b, 0xc6, 0x70, 0x2c, 0x7d,
+                           0x39, 0xf2, 0x33, 0x69, 0xa9, 0xd9, 0xba, 0xcf,
+                           0xa5, 0x30, 0xe2, 0x63, 0x04, 0x23, 0x14, 0x61,
+                           0xb2, 0xeb, 0x05, 0xe2, 0xc3, 0x9b, 0xe9, 0xfc,
+                           0xda, 0x6c, 0x19, 0x07, 0x8c, 0x6a, 0x9d, 0x1b },
+               .ilen   = 64,
+               .result = { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96,
+                           0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a,
+                           0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c,
+                           0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51,
+                           0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11,
+                           0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef,
+                           0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17,
+                           0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 },
+               .rlen   = 64,
        },
 };
 
@@ -3180,6 +3451,1843 @@ static struct cipher_testvec aes_xts_dec_tv_template[] = {
        }
 };
 
+
+static struct cipher_testvec aes_ctr_enc_tv_template[] = {
+       { /* From RFC 3686 */
+               .key    = { 0xae, 0x68, 0x52, 0xf8, 0x12, 0x10, 0x67, 0xcc,
+                           0x4b, 0xf7, 0xa5, 0x76, 0x55, 0x77, 0xf3, 0x9e,
+                           0x00, 0x00, 0x00, 0x30 },
+               .klen   = 20,
+               .iv     = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+               .input  = { "Single block msg" },
+               .ilen   = 16,
+               .result = { 0xe4, 0x09, 0x5d, 0x4f, 0xb7, 0xa7, 0xb3, 0x79,
+                           0x2d, 0x61, 0x75, 0xa3, 0x26, 0x13, 0x11, 0xb8 },
+               .rlen   = 16,
+       }, {
+               .key    = { 0x7e, 0x24, 0x06, 0x78, 0x17, 0xfa, 0xe0, 0xd7,
+                           0x43, 0xd6, 0xce, 0x1f, 0x32, 0x53, 0x91, 0x63,
+                           0x00, 0x6c, 0xb6, 0xdb },
+               .klen   = 20,
+               .iv     = { 0xc0, 0x54, 0x3b, 0x59, 0xda, 0x48, 0xd9, 0x0b },
+               .input  = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                           0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                           0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
+               .ilen   = 32,
+               .result = { 0x51, 0x04, 0xa1, 0x06, 0x16, 0x8a, 0x72, 0xd9,
+                           0x79, 0x0d, 0x41, 0xee, 0x8e, 0xda, 0xd3, 0x88,
+                           0xeb, 0x2e, 0x1e, 0xfc, 0x46, 0xda, 0x57, 0xc8,
+                           0xfc, 0xe6, 0x30, 0xdf, 0x91, 0x41, 0xbe, 0x28 },
+               .rlen   = 32,
+       }, {
+               .key    = { 0x16, 0xaf, 0x5b, 0x14, 0x5f, 0xc9, 0xf5, 0x79,
+                           0xc1, 0x75, 0xf9, 0x3e, 0x3b, 0xfb, 0x0e, 0xed,
+                           0x86, 0x3d, 0x06, 0xcc, 0xfd, 0xb7, 0x85, 0x15,
+                           0x00, 0x00, 0x00, 0x48 },
+               .klen   = 28,
+               .iv     = { 0x36, 0x73, 0x3c, 0x14, 0x7d, 0x6d, 0x93, 0xcb },
+               .input  = { "Single block msg" },
+               .ilen   = 16,
+               .result = { 0x4b, 0x55, 0x38, 0x4f, 0xe2, 0x59, 0xc9, 0xc8,
+                           0x4e, 0x79, 0x35, 0xa0, 0x03, 0xcb, 0xe9, 0x28 },
+               .rlen   = 16,
+       }, {
+               .key    = { 0x7c, 0x5c, 0xb2, 0x40, 0x1b, 0x3d, 0xc3, 0x3c,
+                           0x19, 0xe7, 0x34, 0x08, 0x19, 0xe0, 0xf6, 0x9c,
+                           0x67, 0x8c, 0x3d, 0xb8, 0xe6, 0xf6, 0xa9, 0x1a,
+                           0x00, 0x96, 0xb0, 0x3b },
+               .klen   = 28,
+               .iv     = { 0x02, 0x0c, 0x6e, 0xad, 0xc2, 0xcb, 0x50, 0x0d },
+               .input  = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                           0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                           0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
+               .ilen   = 32,
+               .result = { 0x45, 0x32, 0x43, 0xfc, 0x60, 0x9b, 0x23, 0x32,
+                           0x7e, 0xdf, 0xaa, 0xfa, 0x71, 0x31, 0xcd, 0x9f,
+                           0x84, 0x90, 0x70, 0x1c, 0x5a, 0xd4, 0xa7, 0x9c,
+                           0xfc, 0x1f, 0xe0, 0xff, 0x42, 0xf4, 0xfb, 0x00 },
+               .rlen   = 32,
+       }, {
+               .key    = { 0x77, 0x6b, 0xef, 0xf2, 0x85, 0x1d, 0xb0, 0x6f,
+                           0x4c, 0x8a, 0x05, 0x42, 0xc8, 0x69, 0x6f, 0x6c,
+                           0x6a, 0x81, 0xaf, 0x1e, 0xec, 0x96, 0xb4, 0xd3,
+                           0x7f, 0xc1, 0xd6, 0x89, 0xe6, 0xc1, 0xc1, 0x04,
+                           0x00, 0x00, 0x00, 0x60 },
+               .klen   = 36,
+               .iv     = { 0xdb, 0x56, 0x72, 0xc9, 0x7a, 0xa8, 0xf0, 0xb2 },
+               .input  = { "Single block msg" },
+               .ilen   = 16,
+               .result = { 0x14, 0x5a, 0xd0, 0x1d, 0xbf, 0x82, 0x4e, 0xc7,
+                           0x56, 0x08, 0x63, 0xdc, 0x71, 0xe3, 0xe0, 0xc0 },
+               .rlen   = 16,
+       }, {
+               .key    = { 0xf6, 0xd6, 0x6d, 0x6b, 0xd5, 0x2d, 0x59, 0xbb,
+                           0x07, 0x96, 0x36, 0x58, 0x79, 0xef, 0xf8, 0x86,
+                           0xc6, 0x6d, 0xd5, 0x1a, 0x5b, 0x6a, 0x99, 0x74,
+                           0x4b, 0x50, 0x59, 0x0c, 0x87, 0xa2, 0x38, 0x84,
+                           0x00, 0xfa, 0xac, 0x24 },
+               .klen   = 36,
+               .iv     = { 0xc1, 0x58, 0x5e, 0xf1, 0x5a, 0x43, 0xd8, 0x75 },
+               .input  = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                           0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                           0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
+               .ilen   = 32,
+               .result = { 0xf0, 0x5e, 0x23, 0x1b, 0x38, 0x94, 0x61, 0x2c,
+                           0x49, 0xee, 0x00, 0x0b, 0x80, 0x4e, 0xb2, 0xa9,
+                           0xb8, 0x30, 0x6b, 0x50, 0x8f, 0x83, 0x9d, 0x6a,
+                           0x55, 0x30, 0x83, 0x1d, 0x93, 0x44, 0xaf, 0x1c },
+               .rlen   = 32,
+       }, {
+       // generated using Crypto++
+               .key = {
+                       0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                       0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                       0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                       0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
+                       0x00, 0x00, 0x00, 0x00,
+               },
+               .klen = 32 + 4,
+               .iv = {
+                       0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+               },
+               .input = {
+                       0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                       0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                       0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                       0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
+                       0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
+                       0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
+                       0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
+                       0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
+                       0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
+                       0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
+                       0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
+                       0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
+                       0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67,
+                       0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f,
+                       0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77,
+                       0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
+                       0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+                       0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
+                       0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
+                       0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
+                       0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
+                       0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf,
+                       0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7,
+                       0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
+                       0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+                       0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf,
+                       0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
+                       0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf,
+                       0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7,
+                       0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef,
+                       0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7,
+                       0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff,
+                       0x00, 0x03, 0x06, 0x09, 0x0c, 0x0f, 0x12, 0x15,
+                       0x18, 0x1b, 0x1e, 0x21, 0x24, 0x27, 0x2a, 0x2d,
+                       0x30, 0x33, 0x36, 0x39, 0x3c, 0x3f, 0x42, 0x45,
+                       0x48, 0x4b, 0x4e, 0x51, 0x54, 0x57, 0x5a, 0x5d,
+                       0x60, 0x63, 0x66, 0x69, 0x6c, 0x6f, 0x72, 0x75,
+                       0x78, 0x7b, 0x7e, 0x81, 0x84, 0x87, 0x8a, 0x8d,
+                       0x90, 0x93, 0x96, 0x99, 0x9c, 0x9f, 0xa2, 0xa5,
+                       0xa8, 0xab, 0xae, 0xb1, 0xb4, 0xb7, 0xba, 0xbd,
+                       0xc0, 0xc3, 0xc6, 0xc9, 0xcc, 0xcf, 0xd2, 0xd5,
+                       0xd8, 0xdb, 0xde, 0xe1, 0xe4, 0xe7, 0xea, 0xed,
+                       0xf0, 0xf3, 0xf6, 0xf9, 0xfc, 0xff, 0x02, 0x05,
+                       0x08, 0x0b, 0x0e, 0x11, 0x14, 0x17, 0x1a, 0x1d,
+                       0x20, 0x23, 0x26, 0x29, 0x2c, 0x2f, 0x32, 0x35,
+                       0x38, 0x3b, 0x3e, 0x41, 0x44, 0x47, 0x4a, 0x4d,
+                       0x50, 0x53, 0x56, 0x59, 0x5c, 0x5f, 0x62, 0x65,
+                       0x68, 0x6b, 0x6e, 0x71, 0x74, 0x77, 0x7a, 0x7d,
+                       0x80, 0x83, 0x86, 0x89, 0x8c, 0x8f, 0x92, 0x95,
+                       0x98, 0x9b, 0x9e, 0xa1, 0xa4, 0xa7, 0xaa, 0xad,
+                       0xb0, 0xb3, 0xb6, 0xb9, 0xbc, 0xbf, 0xc2, 0xc5,
+                       0xc8, 0xcb, 0xce, 0xd1, 0xd4, 0xd7, 0xda, 0xdd,
+                       0xe0, 0xe3, 0xe6, 0xe9, 0xec, 0xef, 0xf2, 0xf5,
+                       0xf8, 0xfb, 0xfe, 0x01, 0x04, 0x07, 0x0a, 0x0d,
+                       0x10, 0x13, 0x16, 0x19, 0x1c, 0x1f, 0x22, 0x25,
+                       0x28, 0x2b, 0x2e, 0x31, 0x34, 0x37, 0x3a, 0x3d,
+                       0x40, 0x43, 0x46, 0x49, 0x4c, 0x4f, 0x52, 0x55,
+                       0x58, 0x5b, 0x5e, 0x61, 0x64, 0x67, 0x6a, 0x6d,
+                       0x70, 0x73, 0x76, 0x79, 0x7c, 0x7f, 0x82, 0x85,
+                       0x88, 0x8b, 0x8e, 0x91, 0x94, 0x97, 0x9a, 0x9d,
+                       0xa0, 0xa3, 0xa6, 0xa9, 0xac, 0xaf, 0xb2, 0xb5,
+                       0xb8, 0xbb, 0xbe, 0xc1, 0xc4, 0xc7, 0xca, 0xcd,
+                       0xd0, 0xd3, 0xd6, 0xd9, 0xdc, 0xdf, 0xe2, 0xe5,
+                       0xe8, 0xeb, 0xee, 0xf1, 0xf4, 0xf7, 0xfa, 0xfd,
+                       0x00, 0x05, 0x0a, 0x0f, 0x14, 0x19, 0x1e, 0x23,
+                       0x28, 0x2d, 0x32, 0x37, 0x3c, 0x41, 0x46, 0x4b,
+                       0x50, 0x55, 0x5a, 0x5f, 0x64, 0x69, 0x6e, 0x73,
+                       0x78, 0x7d, 0x82, 0x87, 0x8c, 0x91, 0x96, 0x9b,
+                       0xa0, 0xa5, 0xaa, 0xaf, 0xb4, 0xb9, 0xbe, 0xc3,
+                       0xc8, 0xcd, 0xd2, 0xd7, 0xdc, 0xe1, 0xe6, 0xeb,
+                       0xf0, 0xf5, 0xfa, 0xff, 0x04, 0x09, 0x0e, 0x13,
+                       0x18, 0x1d, 0x22, 0x27, 0x2c, 0x31, 0x36, 0x3b,
+                       0x40, 0x45, 0x4a, 0x4f, 0x54, 0x59, 0x5e, 0x63,
+                       0x68, 0x6d, 0x72, 0x77, 0x7c, 0x81, 0x86, 0x8b,
+                       0x90, 0x95, 0x9a, 0x9f, 0xa4, 0xa9, 0xae, 0xb3,
+                       0xb8, 0xbd, 0xc2, 0xc7, 0xcc, 0xd1, 0xd6, 0xdb,
+                       0xe0, 0xe5, 0xea, 0xef, 0xf4, 0xf9, 0xfe, 0x03,
+                       0x08, 0x0d, 0x12, 0x17, 0x1c, 0x21, 0x26, 0x2b,
+                       0x30, 0x35, 0x3a, 0x3f, 0x44, 0x49, 0x4e, 0x53,
+                       0x58, 0x5d, 0x62, 0x67, 0x6c, 0x71, 0x76, 0x7b,
+                       0x80, 0x85, 0x8a, 0x8f, 0x94, 0x99, 0x9e, 0xa3,
+                       0xa8, 0xad, 0xb2, 0xb7, 0xbc, 0xc1, 0xc6, 0xcb,
+                       0xd0, 0xd5, 0xda, 0xdf, 0xe4, 0xe9, 0xee, 0xf3,
+                       0xf8, 0xfd, 0x02, 0x07, 0x0c, 0x11, 0x16, 0x1b,
+                       0x20, 0x25, 0x2a, 0x2f, 0x34, 0x39, 0x3e, 0x43,
+                       0x48, 0x4d, 0x52, 0x57, 0x5c, 0x61, 0x66, 0x6b,
+                       0x70, 0x75, 0x7a, 0x7f, 0x84, 0x89, 0x8e, 0x93,
+                       0x98, 0x9d, 0xa2, 0xa7, 0xac, 0xb1, 0xb6, 0xbb,
+                       0xc0, 0xc5, 0xca, 0xcf, 0xd4, 0xd9, 0xde, 0xe3,
+                       0xe8, 0xed, 0xf2, 0xf7, 0xfc, 0x01, 0x06, 0x0b,
+                       0x10, 0x15, 0x1a, 0x1f, 0x24, 0x29, 0x2e, 0x33,
+                       0x38, 0x3d, 0x42, 0x47, 0x4c, 0x51, 0x56, 0x5b,
+                       0x60, 0x65, 0x6a, 0x6f, 0x74, 0x79, 0x7e, 0x83,
+                       0x88, 0x8d, 0x92, 0x97, 0x9c, 0xa1, 0xa6, 0xab,
+                       0xb0, 0xb5, 0xba, 0xbf, 0xc4, 0xc9, 0xce, 0xd3,
+                       0xd8, 0xdd, 0xe2, 0xe7, 0xec, 0xf1, 0xf6, 0xfb,
+                       0x00, 0x07, 0x0e, 0x15, 0x1c, 0x23, 0x2a, 0x31,
+                       0x38, 0x3f, 0x46, 0x4d, 0x54, 0x5b, 0x62, 0x69,
+                       0x70, 0x77, 0x7e, 0x85, 0x8c, 0x93, 0x9a, 0xa1,
+                       0xa8, 0xaf, 0xb6, 0xbd, 0xc4, 0xcb, 0xd2, 0xd9,
+                       0xe0, 0xe7, 0xee, 0xf5, 0xfc, 0x03, 0x0a, 0x11,
+                       0x18, 0x1f, 0x26, 0x2d, 0x34, 0x3b, 0x42, 0x49,
+                       0x50, 0x57, 0x5e, 0x65, 0x6c, 0x73, 0x7a, 0x81,
+                       0x88, 0x8f, 0x96, 0x9d, 0xa4, 0xab, 0xb2, 0xb9,
+                       0xc0, 0xc7, 0xce, 0xd5, 0xdc, 0xe3, 0xea, 0xf1,
+                       0xf8, 0xff, 0x06, 0x0d, 0x14, 0x1b, 0x22, 0x29,
+                       0x30, 0x37, 0x3e, 0x45, 0x4c, 0x53, 0x5a, 0x61,
+                       0x68, 0x6f, 0x76, 0x7d, 0x84, 0x8b, 0x92, 0x99,
+                       0xa0, 0xa7, 0xae, 0xb5, 0xbc, 0xc3, 0xca, 0xd1,
+                       0xd8, 0xdf, 0xe6, 0xed, 0xf4, 0xfb, 0x02, 0x09,
+                       0x10, 0x17, 0x1e, 0x25, 0x2c, 0x33, 0x3a, 0x41,
+                       0x48, 0x4f, 0x56, 0x5d, 0x64, 0x6b, 0x72, 0x79,
+                       0x80, 0x87, 0x8e, 0x95, 0x9c, 0xa3, 0xaa, 0xb1,
+                       0xb8, 0xbf, 0xc6, 0xcd, 0xd4, 0xdb, 0xe2, 0xe9,
+                       0xf0, 0xf7, 0xfe, 0x05, 0x0c, 0x13, 0x1a, 0x21,
+                       0x28, 0x2f, 0x36, 0x3d, 0x44, 0x4b, 0x52, 0x59,
+                       0x60, 0x67, 0x6e, 0x75, 0x7c, 0x83, 0x8a, 0x91,
+                       0x98, 0x9f, 0xa6, 0xad, 0xb4, 0xbb, 0xc2, 0xc9,
+                       0xd0, 0xd7, 0xde, 0xe5, 0xec, 0xf3, 0xfa, 0x01,
+                       0x08, 0x0f, 0x16, 0x1d, 0x24, 0x2b, 0x32, 0x39,
+                       0x40, 0x47, 0x4e, 0x55, 0x5c, 0x63, 0x6a, 0x71,
+                       0x78, 0x7f, 0x86, 0x8d, 0x94, 0x9b, 0xa2, 0xa9,
+                       0xb0, 0xb7, 0xbe, 0xc5, 0xcc, 0xd3, 0xda, 0xe1,
+                       0xe8, 0xef, 0xf6, 0xfd, 0x04, 0x0b, 0x12, 0x19,
+                       0x20, 0x27, 0x2e, 0x35, 0x3c, 0x43, 0x4a, 0x51,
+                       0x58, 0x5f, 0x66, 0x6d, 0x74, 0x7b, 0x82, 0x89,
+                       0x90, 0x97, 0x9e, 0xa5, 0xac, 0xb3, 0xba, 0xc1,
+                       0xc8, 0xcf, 0xd6, 0xdd, 0xe4, 0xeb, 0xf2, 0xf9,
+                       0x00, 0x09, 0x12, 0x1b, 0x24, 0x2d, 0x36, 0x3f,
+                       0x48, 0x51, 0x5a, 0x63, 0x6c, 0x75, 0x7e, 0x87,
+                       0x90, 0x99, 0xa2, 0xab, 0xb4, 0xbd, 0xc6, 0xcf,
+                       0xd8, 0xe1, 0xea, 0xf3, 0xfc, 0x05, 0x0e, 0x17,
+                       0x20, 0x29, 0x32, 0x3b, 0x44, 0x4d, 0x56, 0x5f,
+                       0x68, 0x71, 0x7a, 0x83, 0x8c, 0x95, 0x9e, 0xa7,
+                       0xb0, 0xb9, 0xc2, 0xcb, 0xd4, 0xdd, 0xe6, 0xef,
+                       0xf8, 0x01, 0x0a, 0x13, 0x1c, 0x25, 0x2e, 0x37,
+                       0x40, 0x49, 0x52, 0x5b, 0x64, 0x6d, 0x76, 0x7f,
+                       0x88, 0x91, 0x9a, 0xa3, 0xac, 0xb5, 0xbe, 0xc7,
+                       0xd0, 0xd9, 0xe2, 0xeb, 0xf4, 0xfd, 0x06, 0x0f,
+                       0x18, 0x21, 0x2a, 0x33, 0x3c, 0x45, 0x4e, 0x57,
+                       0x60, 0x69, 0x72, 0x7b, 0x84, 0x8d, 0x96, 0x9f,
+                       0xa8, 0xb1, 0xba, 0xc3, 0xcc, 0xd5, 0xde, 0xe7,
+                       0xf0, 0xf9, 0x02, 0x0b, 0x14, 0x1d, 0x26, 0x2f,
+                       0x38, 0x41, 0x4a, 0x53, 0x5c, 0x65, 0x6e, 0x77,
+                       0x80, 0x89, 0x92, 0x9b, 0xa4, 0xad, 0xb6, 0xbf,
+                       0xc8, 0xd1, 0xda, 0xe3, 0xec, 0xf5, 0xfe, 0x07,
+                       0x10, 0x19, 0x22, 0x2b, 0x34, 0x3d, 0x46, 0x4f,
+                       0x58, 0x61, 0x6a, 0x73, 0x7c, 0x85, 0x8e, 0x97,
+                       0xa0, 0xa9, 0xb2, 0xbb, 0xc4, 0xcd, 0xd6, 0xdf,
+                       0xe8, 0xf1, 0xfa, 0x03, 0x0c, 0x15, 0x1e, 0x27,
+                       0x30, 0x39, 0x42, 0x4b, 0x54, 0x5d, 0x66, 0x6f,
+                       0x78, 0x81, 0x8a, 0x93, 0x9c, 0xa5, 0xae, 0xb7,
+                       0xc0, 0xc9, 0xd2, 0xdb, 0xe4, 0xed, 0xf6, 0xff,
+                       0x08, 0x11, 0x1a, 0x23, 0x2c, 0x35, 0x3e, 0x47,
+                       0x50, 0x59, 0x62, 0x6b, 0x74, 0x7d, 0x86, 0x8f,
+                       0x98, 0xa1, 0xaa, 0xb3, 0xbc, 0xc5, 0xce, 0xd7,
+                       0xe0, 0xe9, 0xf2, 0xfb, 0x04, 0x0d, 0x16, 0x1f,
+                       0x28, 0x31, 0x3a, 0x43, 0x4c, 0x55, 0x5e, 0x67,
+                       0x70, 0x79, 0x82, 0x8b, 0x94, 0x9d, 0xa6, 0xaf,
+                       0xb8, 0xc1, 0xca, 0xd3, 0xdc, 0xe5, 0xee, 0xf7,
+                       0x00, 0x0b, 0x16, 0x21, 0x2c, 0x37, 0x42, 0x4d,
+                       0x58, 0x63, 0x6e, 0x79, 0x84, 0x8f, 0x9a, 0xa5,
+                       0xb0, 0xbb, 0xc6, 0xd1, 0xdc, 0xe7, 0xf2, 0xfd,
+                       0x08, 0x13, 0x1e, 0x29, 0x34, 0x3f, 0x4a, 0x55,
+                       0x60, 0x6b, 0x76, 0x81, 0x8c, 0x97, 0xa2, 0xad,
+                       0xb8, 0xc3, 0xce, 0xd9, 0xe4, 0xef, 0xfa, 0x05,
+                       0x10, 0x1b, 0x26, 0x31, 0x3c, 0x47, 0x52, 0x5d,
+                       0x68, 0x73, 0x7e, 0x89, 0x94, 0x9f, 0xaa, 0xb5,
+                       0xc0, 0xcb, 0xd6, 0xe1, 0xec, 0xf7, 0x02, 0x0d,
+                       0x18, 0x23, 0x2e, 0x39, 0x44, 0x4f, 0x5a, 0x65,
+                       0x70, 0x7b, 0x86, 0x91, 0x9c, 0xa7, 0xb2, 0xbd,
+                       0xc8, 0xd3, 0xde, 0xe9, 0xf4, 0xff, 0x0a, 0x15,
+                       0x20, 0x2b, 0x36, 0x41, 0x4c, 0x57, 0x62, 0x6d,
+                       0x78, 0x83, 0x8e, 0x99, 0xa4, 0xaf, 0xba, 0xc5,
+                       0xd0, 0xdb, 0xe6, 0xf1, 0xfc, 0x07, 0x12, 0x1d,
+                       0x28, 0x33, 0x3e, 0x49, 0x54, 0x5f, 0x6a, 0x75,
+                       0x80, 0x8b, 0x96, 0xa1, 0xac, 0xb7, 0xc2, 0xcd,
+                       0xd8, 0xe3, 0xee, 0xf9, 0x04, 0x0f, 0x1a, 0x25,
+                       0x30, 0x3b, 0x46, 0x51, 0x5c, 0x67, 0x72, 0x7d,
+                       0x88, 0x93, 0x9e, 0xa9, 0xb4, 0xbf, 0xca, 0xd5,
+                       0xe0, 0xeb, 0xf6, 0x01, 0x0c, 0x17, 0x22, 0x2d,
+                       0x38, 0x43, 0x4e, 0x59, 0x64, 0x6f, 0x7a, 0x85,
+                       0x90, 0x9b, 0xa6, 0xb1, 0xbc, 0xc7, 0xd2, 0xdd,
+                       0xe8, 0xf3, 0xfe, 0x09, 0x14, 0x1f, 0x2a, 0x35,
+                       0x40, 0x4b, 0x56, 0x61, 0x6c, 0x77, 0x82, 0x8d,
+                       0x98, 0xa3, 0xae, 0xb9, 0xc4, 0xcf, 0xda, 0xe5,
+                       0xf0, 0xfb, 0x06, 0x11, 0x1c, 0x27, 0x32, 0x3d,
+                       0x48, 0x53, 0x5e, 0x69, 0x74, 0x7f, 0x8a, 0x95,
+                       0xa0, 0xab, 0xb6, 0xc1, 0xcc, 0xd7, 0xe2, 0xed,
+                       0xf8, 0x03, 0x0e, 0x19, 0x24, 0x2f, 0x3a, 0x45,
+                       0x50, 0x5b, 0x66, 0x71, 0x7c, 0x87, 0x92, 0x9d,
+                       0xa8, 0xb3, 0xbe, 0xc9, 0xd4, 0xdf, 0xea, 0xf5,
+                       0x00, 0x0d, 0x1a, 0x27, 0x34, 0x41, 0x4e, 0x5b,
+                       0x68, 0x75, 0x82, 0x8f, 0x9c, 0xa9, 0xb6, 0xc3,
+                       0xd0, 0xdd, 0xea, 0xf7, 0x04, 0x11, 0x1e, 0x2b,
+                       0x38, 0x45, 0x52, 0x5f, 0x6c, 0x79, 0x86, 0x93,
+                       0xa0, 0xad, 0xba, 0xc7, 0xd4, 0xe1, 0xee, 0xfb,
+                       0x08, 0x15, 0x22, 0x2f, 0x3c, 0x49, 0x56, 0x63,
+                       0x70, 0x7d, 0x8a, 0x97, 0xa4, 0xb1, 0xbe, 0xcb,
+                       0xd8, 0xe5, 0xf2, 0xff, 0x0c, 0x19, 0x26, 0x33,
+                       0x40, 0x4d, 0x5a, 0x67, 0x74, 0x81, 0x8e, 0x9b,
+                       0xa8, 0xb5, 0xc2, 0xcf, 0xdc, 0xe9, 0xf6, 0x03,
+                       0x10, 0x1d, 0x2a, 0x37, 0x44, 0x51, 0x5e, 0x6b,
+                       0x78, 0x85, 0x92, 0x9f, 0xac, 0xb9, 0xc6, 0xd3,
+                       0xe0, 0xed, 0xfa, 0x07, 0x14, 0x21, 0x2e, 0x3b,
+                       0x48, 0x55, 0x62, 0x6f, 0x7c, 0x89, 0x96, 0xa3,
+                       0xb0, 0xbd, 0xca, 0xd7, 0xe4, 0xf1, 0xfe, 0x0b,
+                       0x18, 0x25, 0x32, 0x3f, 0x4c, 0x59, 0x66, 0x73,
+                       0x80, 0x8d, 0x9a, 0xa7, 0xb4, 0xc1, 0xce, 0xdb,
+                       0xe8, 0xf5, 0x02, 0x0f, 0x1c, 0x29, 0x36, 0x43,
+                       0x50, 0x5d, 0x6a, 0x77, 0x84, 0x91, 0x9e, 0xab,
+                       0xb8, 0xc5, 0xd2, 0xdf, 0xec, 0xf9, 0x06, 0x13,
+                       0x20, 0x2d, 0x3a, 0x47, 0x54, 0x61, 0x6e, 0x7b,
+                       0x88, 0x95, 0xa2, 0xaf, 0xbc, 0xc9, 0xd6, 0xe3,
+                       0xf0, 0xfd, 0x0a, 0x17, 0x24, 0x31, 0x3e, 0x4b,
+                       0x58, 0x65, 0x72, 0x7f, 0x8c, 0x99, 0xa6, 0xb3,
+                       0xc0, 0xcd, 0xda, 0xe7, 0xf4, 0x01, 0x0e, 0x1b,
+                       0x28, 0x35, 0x42, 0x4f, 0x5c, 0x69, 0x76, 0x83,
+                       0x90, 0x9d, 0xaa, 0xb7, 0xc4, 0xd1, 0xde, 0xeb,
+                       0xf8, 0x05, 0x12, 0x1f, 0x2c, 0x39, 0x46, 0x53,
+                       0x60, 0x6d, 0x7a, 0x87, 0x94, 0xa1, 0xae, 0xbb,
+                       0xc8, 0xd5, 0xe2, 0xef, 0xfc, 0x09, 0x16, 0x23,
+                       0x30, 0x3d, 0x4a, 0x57, 0x64, 0x71, 0x7e, 0x8b,
+                       0x98, 0xa5, 0xb2, 0xbf, 0xcc, 0xd9, 0xe6, 0xf3,
+                       0x00, 0x0f, 0x1e, 0x2d, 0x3c, 0x4b, 0x5a, 0x69,
+                       0x78, 0x87, 0x96, 0xa5, 0xb4, 0xc3, 0xd2, 0xe1,
+                       0xf0, 0xff, 0x0e, 0x1d, 0x2c, 0x3b, 0x4a, 0x59,
+                       0x68, 0x77, 0x86, 0x95, 0xa4, 0xb3, 0xc2, 0xd1,
+                       0xe0, 0xef, 0xfe, 0x0d, 0x1c, 0x2b, 0x3a, 0x49,
+                       0x58, 0x67, 0x76, 0x85, 0x94, 0xa3, 0xb2, 0xc1,
+                       0xd0, 0xdf, 0xee, 0xfd, 0x0c, 0x1b, 0x2a, 0x39,
+                       0x48, 0x57, 0x66, 0x75, 0x84, 0x93, 0xa2, 0xb1,
+                       0xc0, 0xcf, 0xde, 0xed, 0xfc, 0x0b, 0x1a, 0x29,
+                       0x38, 0x47, 0x56, 0x65, 0x74, 0x83, 0x92, 0xa1,
+                       0xb0, 0xbf, 0xce, 0xdd, 0xec, 0xfb, 0x0a, 0x19,
+                       0x28, 0x37, 0x46, 0x55, 0x64, 0x73, 0x82, 0x91,
+                       0xa0, 0xaf, 0xbe, 0xcd, 0xdc, 0xeb, 0xfa, 0x09,
+                       0x18, 0x27, 0x36, 0x45, 0x54, 0x63, 0x72, 0x81,
+                       0x90, 0x9f, 0xae, 0xbd, 0xcc, 0xdb, 0xea, 0xf9,
+                       0x08, 0x17, 0x26, 0x35, 0x44, 0x53, 0x62, 0x71,
+                       0x80, 0x8f, 0x9e, 0xad, 0xbc, 0xcb, 0xda, 0xe9,
+                       0xf8, 0x07, 0x16, 0x25, 0x34, 0x43, 0x52, 0x61,
+                       0x70, 0x7f, 0x8e, 0x9d, 0xac, 0xbb, 0xca, 0xd9,
+                       0xe8, 0xf7, 0x06, 0x15, 0x24, 0x33, 0x42, 0x51,
+                       0x60, 0x6f, 0x7e, 0x8d, 0x9c, 0xab, 0xba, 0xc9,
+                       0xd8, 0xe7, 0xf6, 0x05, 0x14, 0x23, 0x32, 0x41,
+                       0x50, 0x5f, 0x6e, 0x7d, 0x8c, 0x9b, 0xaa, 0xb9,
+                       0xc8, 0xd7, 0xe6, 0xf5, 0x04, 0x13, 0x22, 0x31,
+                       0x40, 0x4f, 0x5e, 0x6d, 0x7c, 0x8b, 0x9a, 0xa9,
+                       0xb8, 0xc7, 0xd6, 0xe5, 0xf4, 0x03, 0x12, 0x21,
+                       0x30, 0x3f, 0x4e, 0x5d, 0x6c, 0x7b, 0x8a, 0x99,
+                       0xa8, 0xb7, 0xc6, 0xd5, 0xe4, 0xf3, 0x02, 0x11,
+                       0x20, 0x2f, 0x3e, 0x4d, 0x5c, 0x6b, 0x7a, 0x89,
+                       0x98, 0xa7, 0xb6, 0xc5, 0xd4, 0xe3, 0xf2, 0x01,
+                       0x10, 0x1f, 0x2e, 0x3d, 0x4c, 0x5b, 0x6a, 0x79,
+                       0x88, 0x97, 0xa6, 0xb5, 0xc4, 0xd3, 0xe2, 0xf1,
+                       0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77,
+                       0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff,
+                       0x10, 0x21, 0x32, 0x43, 0x54, 0x65, 0x76, 0x87,
+                       0x98, 0xa9, 0xba, 0xcb, 0xdc, 0xed, 0xfe, 0x0f,
+                       0x20, 0x31, 0x42, 0x53, 0x64, 0x75, 0x86, 0x97,
+                       0xa8, 0xb9, 0xca, 0xdb, 0xec, 0xfd, 0x0e, 0x1f,
+                       0x30, 0x41, 0x52, 0x63, 0x74, 0x85, 0x96, 0xa7,
+                       0xb8, 0xc9, 0xda, 0xeb, 0xfc, 0x0d, 0x1e, 0x2f,
+                       0x40, 0x51, 0x62, 0x73, 0x84, 0x95, 0xa6, 0xb7,
+                       0xc8, 0xd9, 0xea, 0xfb, 0x0c, 0x1d, 0x2e, 0x3f,
+                       0x50, 0x61, 0x72, 0x83, 0x94, 0xa5, 0xb6, 0xc7,
+                       0xd8, 0xe9, 0xfa, 0x0b, 0x1c, 0x2d, 0x3e, 0x4f,
+                       0x60, 0x71, 0x82, 0x93, 0xa4, 0xb5, 0xc6, 0xd7,
+                       0xe8, 0xf9, 0x0a, 0x1b, 0x2c, 0x3d, 0x4e, 0x5f,
+                       0x70, 0x81, 0x92, 0xa3, 0xb4, 0xc5, 0xd6, 0xe7,
+                       0xf8, 0x09, 0x1a, 0x2b, 0x3c, 0x4d, 0x5e, 0x6f,
+                       0x80, 0x91, 0xa2, 0xb3, 0xc4, 0xd5, 0xe6, 0xf7,
+                       0x08, 0x19, 0x2a, 0x3b, 0x4c, 0x5d, 0x6e, 0x7f,
+                       0x90, 0xa1, 0xb2, 0xc3, 0xd4, 0xe5, 0xf6, 0x07,
+                       0x18, 0x29, 0x3a, 0x4b, 0x5c, 0x6d, 0x7e, 0x8f,
+                       0xa0, 0xb1, 0xc2, 0xd3, 0xe4, 0xf5, 0x06, 0x17,
+                       0x28, 0x39, 0x4a, 0x5b, 0x6c, 0x7d, 0x8e, 0x9f,
+                       0xb0, 0xc1, 0xd2, 0xe3, 0xf4, 0x05, 0x16, 0x27,
+                       0x38, 0x49, 0x5a, 0x6b, 0x7c, 0x8d, 0x9e, 0xaf,
+                       0xc0, 0xd1, 0xe2, 0xf3, 0x04, 0x15, 0x26, 0x37,
+                       0x48, 0x59, 0x6a, 0x7b, 0x8c, 0x9d, 0xae, 0xbf,
+                       0xd0, 0xe1, 0xf2, 0x03, 0x14, 0x25, 0x36, 0x47,
+                       0x58, 0x69, 0x7a, 0x8b, 0x9c, 0xad, 0xbe, 0xcf,
+                       0xe0, 0xf1, 0x02, 0x13, 0x24, 0x35, 0x46, 0x57,
+                       0x68, 0x79, 0x8a, 0x9b, 0xac, 0xbd, 0xce, 0xdf,
+                       0xf0, 0x01, 0x12, 0x23, 0x34, 0x45, 0x56, 0x67,
+                       0x78, 0x89, 0x9a, 0xab, 0xbc, 0xcd, 0xde, 0xef,
+                       0x00, 0x13, 0x26, 0x39, 0x4c, 0x5f, 0x72, 0x85,
+                       0x98, 0xab, 0xbe, 0xd1, 0xe4, 0xf7, 0x0a, 0x1d,
+                       0x30, 0x43, 0x56, 0x69, 0x7c, 0x8f, 0xa2, 0xb5,
+                       0xc8, 0xdb, 0xee, 0x01, 0x14, 0x27, 0x3a, 0x4d,
+                       0x60, 0x73, 0x86, 0x99, 0xac, 0xbf, 0xd2, 0xe5,
+                       0xf8, 0x0b, 0x1e, 0x31, 0x44, 0x57, 0x6a, 0x7d,
+                       0x90, 0xa3, 0xb6, 0xc9, 0xdc, 0xef, 0x02, 0x15,
+                       0x28, 0x3b, 0x4e, 0x61, 0x74, 0x87, 0x9a, 0xad,
+                       0xc0, 0xd3, 0xe6, 0xf9, 0x0c, 0x1f, 0x32, 0x45,
+                       0x58, 0x6b, 0x7e, 0x91, 0xa4, 0xb7, 0xca, 0xdd,
+                       0xf0, 0x03, 0x16, 0x29, 0x3c, 0x4f, 0x62, 0x75,
+                       0x88, 0x9b, 0xae, 0xc1, 0xd4, 0xe7, 0xfa, 0x0d,
+                       0x20, 0x33, 0x46, 0x59, 0x6c, 0x7f, 0x92, 0xa5,
+                       0xb8, 0xcb, 0xde, 0xf1, 0x04, 0x17, 0x2a, 0x3d,
+                       0x50, 0x63, 0x76, 0x89, 0x9c, 0xaf, 0xc2, 0xd5,
+                       0xe8, 0xfb, 0x0e, 0x21, 0x34, 0x47, 0x5a, 0x6d,
+                       0x80, 0x93, 0xa6, 0xb9, 0xcc, 0xdf, 0xf2, 0x05,
+                       0x18, 0x2b, 0x3e, 0x51, 0x64, 0x77, 0x8a, 0x9d,
+                       0xb0, 0xc3, 0xd6, 0xe9, 0xfc, 0x0f, 0x22, 0x35,
+                       0x48, 0x5b, 0x6e, 0x81, 0x94, 0xa7, 0xba, 0xcd,
+                       0xe0, 0xf3, 0x06, 0x19, 0x2c, 0x3f, 0x52, 0x65,
+                       0x78, 0x8b, 0x9e, 0xb1, 0xc4, 0xd7, 0xea, 0xfd,
+                       0x10, 0x23, 0x36, 0x49, 0x5c, 0x6f, 0x82, 0x95,
+                       0xa8, 0xbb, 0xce, 0xe1, 0xf4, 0x07, 0x1a, 0x2d,
+                       0x40, 0x53, 0x66, 0x79, 0x8c, 0x9f, 0xb2, 0xc5,
+                       0xd8, 0xeb, 0xfe, 0x11, 0x24, 0x37, 0x4a, 0x5d,
+                       0x70, 0x83, 0x96, 0xa9, 0xbc, 0xcf, 0xe2, 0xf5,
+                       0x08, 0x1b, 0x2e, 0x41, 0x54, 0x67, 0x7a, 0x8d,
+                       0xa0, 0xb3, 0xc6, 0xd9, 0xec, 0xff, 0x12, 0x25,
+                       0x38, 0x4b, 0x5e, 0x71, 0x84, 0x97, 0xaa, 0xbd,
+                       0xd0, 0xe3, 0xf6, 0x09, 0x1c, 0x2f, 0x42, 0x55,
+                       0x68, 0x7b, 0x8e, 0xa1, 0xb4, 0xc7, 0xda, 0xed,
+                       0x00, 0x15, 0x2a, 0x3f, 0x54, 0x69, 0x7e, 0x93,
+                       0xa8, 0xbd, 0xd2, 0xe7, 0xfc, 0x11, 0x26, 0x3b,
+                       0x50, 0x65, 0x7a, 0x8f, 0xa4, 0xb9, 0xce, 0xe3,
+                       0xf8, 0x0d, 0x22, 0x37, 0x4c, 0x61, 0x76, 0x8b,
+                       0xa0, 0xb5, 0xca, 0xdf, 0xf4, 0x09, 0x1e, 0x33,
+                       0x48, 0x5d, 0x72, 0x87, 0x9c, 0xb1, 0xc6, 0xdb,
+                       0xf0, 0x05, 0x1a, 0x2f, 0x44, 0x59, 0x6e, 0x83,
+                       0x98, 0xad, 0xc2, 0xd7, 0xec, 0x01, 0x16, 0x2b,
+                       0x40, 0x55, 0x6a, 0x7f, 0x94, 0xa9, 0xbe, 0xd3,
+                       0xe8, 0xfd, 0x12, 0x27, 0x3c, 0x51, 0x66, 0x7b,
+                       0x90, 0xa5, 0xba, 0xcf, 0xe4, 0xf9, 0x0e, 0x23,
+                       0x38, 0x4d, 0x62, 0x77, 0x8c, 0xa1, 0xb6, 0xcb,
+                       0xe0, 0xf5, 0x0a, 0x1f, 0x34, 0x49, 0x5e, 0x73,
+                       0x88, 0x9d, 0xb2, 0xc7, 0xdc, 0xf1, 0x06, 0x1b,
+                       0x30, 0x45, 0x5a, 0x6f, 0x84, 0x99, 0xae, 0xc3,
+                       0xd8, 0xed, 0x02, 0x17, 0x2c, 0x41, 0x56, 0x6b,
+                       0x80, 0x95, 0xaa, 0xbf, 0xd4, 0xe9, 0xfe, 0x13,
+                       0x28, 0x3d, 0x52, 0x67, 0x7c, 0x91, 0xa6, 0xbb,
+                       0xd0, 0xe5, 0xfa, 0x0f, 0x24, 0x39, 0x4e, 0x63,
+                       0x78, 0x8d, 0xa2, 0xb7, 0xcc, 0xe1, 0xf6, 0x0b,
+                       0x20, 0x35, 0x4a, 0x5f, 0x74, 0x89, 0x9e, 0xb3,
+                       0xc8, 0xdd, 0xf2, 0x07, 0x1c, 0x31, 0x46, 0x5b,
+                       0x70, 0x85, 0x9a, 0xaf, 0xc4, 0xd9, 0xee, 0x03,
+                       0x18, 0x2d, 0x42, 0x57, 0x6c, 0x81, 0x96, 0xab,
+                       0xc0, 0xd5, 0xea, 0xff, 0x14, 0x29, 0x3e, 0x53,
+                       0x68, 0x7d, 0x92, 0xa7, 0xbc, 0xd1, 0xe6, 0xfb,
+                       0x10, 0x25, 0x3a, 0x4f, 0x64, 0x79, 0x8e, 0xa3,
+                       0xb8, 0xcd, 0xe2, 0xf7, 0x0c, 0x21, 0x36, 0x4b,
+                       0x60, 0x75, 0x8a, 0x9f, 0xb4, 0xc9, 0xde, 0xf3,
+                       0x08, 0x1d, 0x32, 0x47, 0x5c, 0x71, 0x86, 0x9b,
+                       0xb0, 0xc5, 0xda, 0xef, 0x04, 0x19, 0x2e, 0x43,
+                       0x58, 0x6d, 0x82, 0x97, 0xac, 0xc1, 0xd6, 0xeb,
+                       0x00, 0x17, 0x2e, 0x45, 0x5c, 0x73, 0x8a, 0xa1,
+                       0xb8, 0xcf, 0xe6, 0xfd, 0x14, 0x2b, 0x42, 0x59,
+                       0x70, 0x87, 0x9e, 0xb5, 0xcc, 0xe3, 0xfa, 0x11,
+                       0x28, 0x3f, 0x56, 0x6d, 0x84, 0x9b, 0xb2, 0xc9,
+                       0xe0, 0xf7, 0x0e, 0x25, 0x3c, 0x53, 0x6a, 0x81,
+                       0x98, 0xaf, 0xc6, 0xdd, 0xf4, 0x0b, 0x22, 0x39,
+                       0x50, 0x67, 0x7e, 0x95, 0xac, 0xc3, 0xda, 0xf1,
+                       0x08, 0x1f, 0x36, 0x4d, 0x64, 0x7b, 0x92, 0xa9,
+                       0xc0, 0xd7, 0xee, 0x05, 0x1c, 0x33, 0x4a, 0x61,
+                       0x78, 0x8f, 0xa6, 0xbd, 0xd4, 0xeb, 0x02, 0x19,
+                       0x30, 0x47, 0x5e, 0x75, 0x8c, 0xa3, 0xba, 0xd1,
+                       0xe8, 0xff, 0x16, 0x2d, 0x44, 0x5b, 0x72, 0x89,
+                       0xa0, 0xb7, 0xce, 0xe5, 0xfc, 0x13, 0x2a, 0x41,
+                       0x58, 0x6f, 0x86, 0x9d, 0xb4, 0xcb, 0xe2, 0xf9,
+                       0x10, 0x27, 0x3e, 0x55, 0x6c, 0x83, 0x9a, 0xb1,
+                       0xc8, 0xdf, 0xf6, 0x0d, 0x24, 0x3b, 0x52, 0x69,
+                       0x80, 0x97, 0xae, 0xc5, 0xdc, 0xf3, 0x0a, 0x21,
+                       0x38, 0x4f, 0x66, 0x7d, 0x94, 0xab, 0xc2, 0xd9,
+                       0xf0, 0x07, 0x1e, 0x35, 0x4c, 0x63, 0x7a, 0x91,
+                       0xa8, 0xbf, 0xd6, 0xed, 0x04, 0x1b, 0x32, 0x49,
+                       0x60, 0x77, 0x8e, 0xa5, 0xbc, 0xd3, 0xea, 0x01,
+                       0x18, 0x2f, 0x46, 0x5d, 0x74, 0x8b, 0xa2, 0xb9,
+                       0xd0, 0xe7, 0xfe, 0x15, 0x2c, 0x43, 0x5a, 0x71,
+                       0x88, 0x9f, 0xb6, 0xcd, 0xe4, 0xfb, 0x12, 0x29,
+                       0x40, 0x57, 0x6e, 0x85, 0x9c, 0xb3, 0xca, 0xe1,
+                       0xf8, 0x0f, 0x26, 0x3d, 0x54, 0x6b, 0x82, 0x99,
+                       0xb0, 0xc7, 0xde, 0xf5, 0x0c, 0x23, 0x3a, 0x51,
+                       0x68, 0x7f, 0x96, 0xad, 0xc4, 0xdb, 0xf2, 0x09,
+                       0x20, 0x37, 0x4e, 0x65, 0x7c, 0x93, 0xaa, 0xc1,
+                       0xd8, 0xef, 0x06, 0x1d, 0x34, 0x4b, 0x62, 0x79,
+                       0x90, 0xa7, 0xbe, 0xd5, 0xec, 0x03, 0x1a, 0x31,
+                       0x48, 0x5f, 0x76, 0x8d, 0xa4, 0xbb, 0xd2, 0xe9,
+                       0x00, 0x19, 0x32, 0x4b, 0x64, 0x7d, 0x96, 0xaf,
+                       0xc8, 0xe1, 0xfa, 0x13, 0x2c, 0x45, 0x5e, 0x77,
+                       0x90, 0xa9, 0xc2, 0xdb, 0xf4, 0x0d, 0x26, 0x3f,
+                       0x58, 0x71, 0x8a, 0xa3, 0xbc, 0xd5, 0xee, 0x07,
+                       0x20, 0x39, 0x52, 0x6b, 0x84, 0x9d, 0xb6, 0xcf,
+                       0xe8, 0x01, 0x1a, 0x33, 0x4c, 0x65, 0x7e, 0x97,
+                       0xb0, 0xc9, 0xe2, 0xfb, 0x14, 0x2d, 0x46, 0x5f,
+                       0x78, 0x91, 0xaa, 0xc3, 0xdc, 0xf5, 0x0e, 0x27,
+                       0x40, 0x59, 0x72, 0x8b, 0xa4, 0xbd, 0xd6, 0xef,
+                       0x08, 0x21, 0x3a, 0x53, 0x6c, 0x85, 0x9e, 0xb7,
+                       0xd0, 0xe9, 0x02, 0x1b, 0x34, 0x4d, 0x66, 0x7f,
+                       0x98, 0xb1, 0xca, 0xe3, 0xfc, 0x15, 0x2e, 0x47,
+                       0x60, 0x79, 0x92, 0xab, 0xc4, 0xdd, 0xf6, 0x0f,
+                       0x28, 0x41, 0x5a, 0x73, 0x8c, 0xa5, 0xbe, 0xd7,
+                       0xf0, 0x09, 0x22, 0x3b, 0x54, 0x6d, 0x86, 0x9f,
+                       0xb8, 0xd1, 0xea, 0x03, 0x1c, 0x35, 0x4e, 0x67,
+                       0x80, 0x99, 0xb2, 0xcb, 0xe4, 0xfd, 0x16, 0x2f,
+                       0x48, 0x61, 0x7a, 0x93, 0xac, 0xc5, 0xde, 0xf7,
+                       0x10, 0x29, 0x42, 0x5b, 0x74, 0x8d, 0xa6, 0xbf,
+                       0xd8, 0xf1, 0x0a, 0x23, 0x3c, 0x55, 0x6e, 0x87,
+                       0xa0, 0xb9, 0xd2, 0xeb, 0x04, 0x1d, 0x36, 0x4f,
+                       0x68, 0x81, 0x9a, 0xb3, 0xcc, 0xe5, 0xfe, 0x17,
+                       0x30, 0x49, 0x62, 0x7b, 0x94, 0xad, 0xc6, 0xdf,
+                       0xf8, 0x11, 0x2a, 0x43, 0x5c, 0x75, 0x8e, 0xa7,
+                       0xc0, 0xd9, 0xf2, 0x0b, 0x24, 0x3d, 0x56, 0x6f,
+                       0x88, 0xa1, 0xba, 0xd3, 0xec, 0x05, 0x1e, 0x37,
+                       0x50, 0x69, 0x82, 0x9b, 0xb4, 0xcd, 0xe6, 0xff,
+                       0x18, 0x31, 0x4a, 0x63, 0x7c, 0x95, 0xae, 0xc7,
+                       0xe0, 0xf9, 0x12, 0x2b, 0x44, 0x5d, 0x76, 0x8f,
+                       0xa8, 0xc1, 0xda, 0xf3, 0x0c, 0x25, 0x3e, 0x57,
+                       0x70, 0x89, 0xa2, 0xbb, 0xd4, 0xed, 0x06, 0x1f,
+                       0x38, 0x51, 0x6a, 0x83, 0x9c, 0xb5, 0xce, 0xe7,
+                       0x00, 0x1b, 0x36, 0x51, 0x6c, 0x87, 0xa2, 0xbd,
+                       0xd8, 0xf3, 0x0e, 0x29, 0x44, 0x5f, 0x7a, 0x95,
+                       0xb0, 0xcb, 0xe6, 0x01, 0x1c, 0x37, 0x52, 0x6d,
+                       0x88, 0xa3, 0xbe, 0xd9, 0xf4, 0x0f, 0x2a, 0x45,
+                       0x60, 0x7b, 0x96, 0xb1, 0xcc, 0xe7, 0x02, 0x1d,
+                       0x38, 0x53, 0x6e, 0x89, 0xa4, 0xbf, 0xda, 0xf5,
+                       0x10, 0x2b, 0x46, 0x61, 0x7c, 0x97, 0xb2, 0xcd,
+                       0xe8, 0x03, 0x1e, 0x39, 0x54, 0x6f, 0x8a, 0xa5,
+                       0xc0, 0xdb, 0xf6, 0x11, 0x2c, 0x47, 0x62, 0x7d,
+                       0x98, 0xb3, 0xce, 0xe9, 0x04, 0x1f, 0x3a, 0x55,
+                       0x70, 0x8b, 0xa6, 0xc1, 0xdc, 0xf7, 0x12, 0x2d,
+                       0x48, 0x63, 0x7e, 0x99, 0xb4, 0xcf, 0xea, 0x05,
+                       0x20, 0x3b, 0x56, 0x71, 0x8c, 0xa7, 0xc2, 0xdd,
+                       0xf8, 0x13, 0x2e, 0x49, 0x64, 0x7f, 0x9a, 0xb5,
+                       0xd0, 0xeb, 0x06, 0x21, 0x3c, 0x57, 0x72, 0x8d,
+                       0xa8, 0xc3, 0xde, 0xf9, 0x14, 0x2f, 0x4a, 0x65,
+                       0x80, 0x9b, 0xb6, 0xd1, 0xec, 0x07, 0x22, 0x3d,
+                       0x58, 0x73, 0x8e, 0xa9, 0xc4, 0xdf, 0xfa, 0x15,
+                       0x30, 0x4b, 0x66, 0x81, 0x9c, 0xb7, 0xd2, 0xed,
+                       0x08, 0x23, 0x3e, 0x59, 0x74, 0x8f, 0xaa, 0xc5,
+                       0xe0, 0xfb, 0x16, 0x31, 0x4c, 0x67, 0x82, 0x9d,
+                       0xb8, 0xd3, 0xee, 0x09, 0x24, 0x3f, 0x5a, 0x75,
+                       0x90, 0xab, 0xc6, 0xe1, 0xfc, 0x17, 0x32, 0x4d,
+                       0x68, 0x83, 0x9e, 0xb9, 0xd4, 0xef, 0x0a, 0x25,
+                       0x40, 0x5b, 0x76, 0x91, 0xac, 0xc7, 0xe2, 0xfd,
+                       0x18, 0x33, 0x4e, 0x69, 0x84, 0x9f, 0xba, 0xd5,
+                       0xf0, 0x0b, 0x26, 0x41, 0x5c, 0x77, 0x92, 0xad,
+                       0xc8, 0xe3, 0xfe, 0x19, 0x34, 0x4f, 0x6a, 0x85,
+                       0xa0, 0xbb, 0xd6, 0xf1, 0x0c, 0x27, 0x42, 0x5d,
+                       0x78, 0x93, 0xae, 0xc9, 0xe4, 0xff, 0x1a, 0x35,
+                       0x50, 0x6b, 0x86, 0xa1, 0xbc, 0xd7, 0xf2, 0x0d,
+                       0x28, 0x43, 0x5e, 0x79, 0x94, 0xaf, 0xca, 0xe5,
+                       0x00, 0x1d, 0x3a, 0x57, 0x74, 0x91, 0xae, 0xcb,
+                       0xe8, 0x05, 0x22, 0x3f, 0x5c, 0x79, 0x96, 0xb3,
+                       0xd0, 0xed, 0x0a, 0x27, 0x44, 0x61, 0x7e, 0x9b,
+                       0xb8, 0xd5, 0xf2, 0x0f, 0x2c, 0x49, 0x66, 0x83,
+                       0xa0, 0xbd, 0xda, 0xf7, 0x14, 0x31, 0x4e, 0x6b,
+                       0x88, 0xa5, 0xc2, 0xdf, 0xfc, 0x19, 0x36, 0x53,
+                       0x70, 0x8d, 0xaa, 0xc7, 0xe4, 0x01, 0x1e, 0x3b,
+                       0x58, 0x75, 0x92, 0xaf, 0xcc, 0xe9, 0x06, 0x23,
+                       0x40, 0x5d, 0x7a, 0x97, 0xb4, 0xd1, 0xee, 0x0b,
+                       0x28, 0x45, 0x62, 0x7f, 0x9c, 0xb9, 0xd6, 0xf3,
+                       0x10, 0x2d, 0x4a, 0x67, 0x84, 0xa1, 0xbe, 0xdb,
+                       0xf8, 0x15, 0x32, 0x4f, 0x6c, 0x89, 0xa6, 0xc3,
+                       0xe0, 0xfd, 0x1a, 0x37, 0x54, 0x71, 0x8e, 0xab,
+                       0xc8, 0xe5, 0x02, 0x1f, 0x3c, 0x59, 0x76, 0x93,
+                       0xb0, 0xcd, 0xea, 0x07, 0x24, 0x41, 0x5e, 0x7b,
+                       0x98, 0xb5, 0xd2, 0xef, 0x0c, 0x29, 0x46, 0x63,
+                       0x80, 0x9d, 0xba, 0xd7, 0xf4, 0x11, 0x2e, 0x4b,
+                       0x68, 0x85, 0xa2, 0xbf, 0xdc, 0xf9, 0x16, 0x33,
+                       0x50, 0x6d, 0x8a, 0xa7, 0xc4, 0xe1, 0xfe, 0x1b,
+                       0x38, 0x55, 0x72, 0x8f, 0xac, 0xc9, 0xe6, 0x03,
+                       0x20, 0x3d, 0x5a, 0x77, 0x94, 0xb1, 0xce, 0xeb,
+                       0x08, 0x25, 0x42, 0x5f, 0x7c, 0x99, 0xb6, 0xd3,
+                       0xf0, 0x0d, 0x2a, 0x47, 0x64, 0x81, 0x9e, 0xbb,
+                       0xd8, 0xf5, 0x12, 0x2f, 0x4c, 0x69, 0x86, 0xa3,
+                       0xc0, 0xdd, 0xfa, 0x17, 0x34, 0x51, 0x6e, 0x8b,
+                       0xa8, 0xc5, 0xe2, 0xff, 0x1c, 0x39, 0x56, 0x73,
+                       0x90, 0xad, 0xca, 0xe7, 0x04, 0x21, 0x3e, 0x5b,
+                       0x78, 0x95, 0xb2, 0xcf, 0xec, 0x09, 0x26, 0x43,
+                       0x60, 0x7d, 0x9a, 0xb7, 0xd4, 0xf1, 0x0e, 0x2b,
+                       0x48, 0x65, 0x82, 0x9f, 0xbc, 0xd9, 0xf6, 0x13,
+                       0x30, 0x4d, 0x6a, 0x87, 0xa4, 0xc1, 0xde, 0xfb,
+                       0x18, 0x35, 0x52, 0x6f, 0x8c, 0xa9, 0xc6, 0xe3,
+                       0x00, 0x1f, 0x3e, 0x5d, 0x7c, 0x9b, 0xba, 0xd9,
+                       0xf8, 0x17, 0x36, 0x55, 0x74, 0x93, 0xb2, 0xd1,
+                       0xf0, 0x0f, 0x2e, 0x4d, 0x6c, 0x8b, 0xaa, 0xc9,
+                       0xe8, 0x07, 0x26, 0x45, 0x64, 0x83, 0xa2, 0xc1,
+                       0xe0, 0xff, 0x1e, 0x3d, 0x5c, 0x7b, 0x9a, 0xb9,
+                       0xd8, 0xf7, 0x16, 0x35, 0x54, 0x73, 0x92, 0xb1,
+                       0xd0, 0xef, 0x0e, 0x2d, 0x4c, 0x6b, 0x8a, 0xa9,
+                       0xc8, 0xe7, 0x06, 0x25, 0x44, 0x63, 0x82, 0xa1,
+                       0xc0, 0xdf, 0xfe, 0x1d, 0x3c, 0x5b, 0x7a, 0x99,
+                       0xb8, 0xd7, 0xf6, 0x15, 0x34, 0x53, 0x72, 0x91,
+                       0xb0, 0xcf, 0xee, 0x0d, 0x2c, 0x4b, 0x6a, 0x89,
+                       0xa8, 0xc7, 0xe6, 0x05, 0x24, 0x43, 0x62, 0x81,
+                       0xa0, 0xbf, 0xde, 0xfd, 0x1c, 0x3b, 0x5a, 0x79,
+                       0x98, 0xb7, 0xd6, 0xf5, 0x14, 0x33, 0x52, 0x71,
+                       0x90, 0xaf, 0xce, 0xed, 0x0c, 0x2b, 0x4a, 0x69,
+                       0x88, 0xa7, 0xc6, 0xe5, 0x04, 0x23, 0x42, 0x61,
+                       0x80, 0x9f, 0xbe, 0xdd, 0xfc, 0x1b, 0x3a, 0x59,
+                       0x78, 0x97, 0xb6, 0xd5, 0xf4, 0x13, 0x32, 0x51,
+                       0x70, 0x8f, 0xae, 0xcd, 0xec, 0x0b, 0x2a, 0x49,
+                       0x68, 0x87, 0xa6, 0xc5, 0xe4, 0x03, 0x22, 0x41,
+                       0x60, 0x7f, 0x9e, 0xbd, 0xdc, 0xfb, 0x1a, 0x39,
+                       0x58, 0x77, 0x96, 0xb5, 0xd4, 0xf3, 0x12, 0x31,
+                       0x50, 0x6f, 0x8e, 0xad, 0xcc, 0xeb, 0x0a, 0x29,
+                       0x48, 0x67, 0x86, 0xa5, 0xc4, 0xe3, 0x02, 0x21,
+                       0x40, 0x5f, 0x7e, 0x9d, 0xbc, 0xdb, 0xfa, 0x19,
+                       0x38, 0x57, 0x76, 0x95, 0xb4, 0xd3, 0xf2, 0x11,
+                       0x30, 0x4f, 0x6e, 0x8d, 0xac, 0xcb, 0xea, 0x09,
+                       0x28, 0x47, 0x66, 0x85, 0xa4, 0xc3, 0xe2, 0x01,
+                       0x20, 0x3f, 0x5e, 0x7d, 0x9c, 0xbb, 0xda, 0xf9,
+                       0x18, 0x37, 0x56, 0x75, 0x94, 0xb3, 0xd2, 0xf1,
+                       0x10, 0x2f, 0x4e, 0x6d, 0x8c, 0xab, 0xca, 0xe9,
+                       0x08, 0x27, 0x46, 0x65, 0x84, 0xa3, 0xc2, 0xe1,
+                       0x00, 0x21, 0x42, 0x63,
+               },
+               .ilen = 4100,
+               .result = {
+                       0xf0, 0x5c, 0x74, 0xad, 0x4e, 0xbc, 0x99, 0xe2,
+                       0xae, 0xff, 0x91, 0x3a, 0x44, 0xcf, 0x38, 0x32,
+                       0x1e, 0xad, 0xa7, 0xcd, 0xa1, 0x39, 0x95, 0xaa,
+                       0x10, 0xb1, 0xb3, 0x2e, 0x04, 0x31, 0x8f, 0x86,
+                       0xf2, 0x62, 0x74, 0x70, 0x0c, 0xa4, 0x46, 0x08,
+                       0xa8, 0xb7, 0x99, 0xa8, 0xe9, 0xd2, 0x73, 0x79,
+                       0x7e, 0x6e, 0xd4, 0x8f, 0x1e, 0xc7, 0x8e, 0x31,
+                       0x0b, 0xfa, 0x4b, 0xce, 0xfd, 0xf3, 0x57, 0x71,
+                       0xe9, 0x46, 0x03, 0xa5, 0x3d, 0x34, 0x00, 0xe2,
+                       0x18, 0xff, 0x75, 0x6d, 0x06, 0x2d, 0x00, 0xab,
+                       0xb9, 0x3e, 0x6c, 0x59, 0xc5, 0x84, 0x06, 0xb5,
+                       0x8b, 0xd0, 0x89, 0x9c, 0x4a, 0x79, 0x16, 0xc6,
+                       0x3d, 0x74, 0x54, 0xfa, 0x44, 0xcd, 0x23, 0x26,
+                       0x5c, 0xcf, 0x7e, 0x28, 0x92, 0x32, 0xbf, 0xdf,
+                       0xa7, 0x20, 0x3c, 0x74, 0x58, 0x2a, 0x9a, 0xde,
+                       0x61, 0x00, 0x1c, 0x4f, 0xff, 0x59, 0xc4, 0x22,
+                       0xac, 0x3c, 0xd0, 0xe8, 0x6c, 0xf9, 0x97, 0x1b,
+                       0x58, 0x9b, 0xad, 0x71, 0xe8, 0xa9, 0xb5, 0x0d,
+                       0xee, 0x2f, 0x04, 0x1f, 0x7f, 0xbc, 0x99, 0xee,
+                       0x84, 0xff, 0x42, 0x60, 0xdc, 0x3a, 0x18, 0xa5,
+                       0x81, 0xf9, 0xef, 0xdc, 0x7a, 0x0f, 0x65, 0x41,
+                       0x2f, 0xa3, 0xd3, 0xf9, 0xc2, 0xcb, 0xc0, 0x4d,
+                       0x8f, 0xd3, 0x76, 0x96, 0xad, 0x49, 0x6d, 0x38,
+                       0x3d, 0x39, 0x0b, 0x6c, 0x80, 0xb7, 0x54, 0x69,
+                       0xf0, 0x2c, 0x90, 0x02, 0x29, 0x0d, 0x1c, 0x12,
+                       0xad, 0x55, 0xc3, 0x8b, 0x68, 0xd9, 0xcc, 0xb3,
+                       0xb2, 0x64, 0x33, 0x90, 0x5e, 0xca, 0x4b, 0xe2,
+                       0xfb, 0x75, 0xdc, 0x63, 0xf7, 0x9f, 0x82, 0x74,
+                       0xf0, 0xc9, 0xaa, 0x7f, 0xe9, 0x2a, 0x9b, 0x33,
+                       0xbc, 0x88, 0x00, 0x7f, 0xca, 0xb2, 0x1f, 0x14,
+                       0xdb, 0xc5, 0x8e, 0x7b, 0x11, 0x3c, 0x3e, 0x08,
+                       0xf3, 0x83, 0xe8, 0xe0, 0x94, 0x86, 0x2e, 0x92,
+                       0x78, 0x6b, 0x01, 0xc9, 0xc7, 0x83, 0xba, 0x21,
+                       0x6a, 0x25, 0x15, 0x33, 0x4e, 0x45, 0x08, 0xec,
+                       0x35, 0xdb, 0xe0, 0x6e, 0x31, 0x51, 0x79, 0xa9,
+                       0x42, 0x44, 0x65, 0xc1, 0xa0, 0xf1, 0xf9, 0x2a,
+                       0x70, 0xd5, 0xb6, 0xc6, 0xc1, 0x8c, 0x39, 0xfc,
+                       0x25, 0xa6, 0x55, 0xd9, 0xdd, 0x2d, 0x4c, 0xec,
+                       0x49, 0xc6, 0xeb, 0x0e, 0xa8, 0x25, 0x2a, 0x16,
+                       0x1b, 0x66, 0x84, 0xda, 0xe2, 0x92, 0xe5, 0xc0,
+                       0xc8, 0x53, 0x07, 0xaf, 0x80, 0x84, 0xec, 0xfd,
+                       0xcd, 0xd1, 0x6e, 0xcd, 0x6f, 0x6a, 0xf5, 0x36,
+                       0xc5, 0x15, 0xe5, 0x25, 0x7d, 0x77, 0xd1, 0x1a,
+                       0x93, 0x36, 0xa9, 0xcf, 0x7c, 0xa4, 0x54, 0x4a,
+                       0x06, 0x51, 0x48, 0x4e, 0xf6, 0x59, 0x87, 0xd2,
+                       0x04, 0x02, 0xef, 0xd3, 0x44, 0xde, 0x76, 0x31,
+                       0xb3, 0x34, 0x17, 0x1b, 0x9d, 0x66, 0x11, 0x9f,
+                       0x1e, 0xcc, 0x17, 0xe9, 0xc7, 0x3c, 0x1b, 0xe7,
+                       0xcb, 0x50, 0x08, 0xfc, 0xdc, 0x2b, 0x24, 0xdb,
+                       0x65, 0x83, 0xd0, 0x3b, 0xe3, 0x30, 0xea, 0x94,
+                       0x6c, 0xe7, 0xe8, 0x35, 0x32, 0xc7, 0xdb, 0x64,
+                       0xb4, 0x01, 0xab, 0x36, 0x2c, 0x77, 0x13, 0xaf,
+                       0xf8, 0x2b, 0x88, 0x3f, 0x54, 0x39, 0xc4, 0x44,
+                       0xfe, 0xef, 0x6f, 0x68, 0x34, 0xbe, 0x0f, 0x05,
+                       0x16, 0x6d, 0xf6, 0x0a, 0x30, 0xe7, 0xe3, 0xed,
+                       0xc4, 0xde, 0x3c, 0x1b, 0x13, 0xd8, 0xdb, 0xfe,
+                       0x41, 0x62, 0xe5, 0x28, 0xd4, 0x8d, 0xa3, 0xc7,
+                       0x93, 0x97, 0xc6, 0x48, 0x45, 0x1d, 0x9f, 0x83,
+                       0xdf, 0x4b, 0x40, 0x3e, 0x42, 0x25, 0x87, 0x80,
+                       0x4c, 0x7d, 0xa8, 0xd4, 0x98, 0x23, 0x95, 0x75,
+                       0x41, 0x8c, 0xda, 0x41, 0x9b, 0xd4, 0xa7, 0x06,
+                       0xb5, 0xf1, 0x71, 0x09, 0x53, 0xbe, 0xca, 0xbf,
+                       0x32, 0x03, 0xed, 0xf0, 0x50, 0x1c, 0x56, 0x39,
+                       0x5b, 0xa4, 0x75, 0x18, 0xf7, 0x9b, 0x58, 0xef,
+                       0x53, 0xfc, 0x2a, 0x38, 0x23, 0x15, 0x75, 0xcd,
+                       0x45, 0xe5, 0x5a, 0x82, 0x55, 0xba, 0x21, 0xfa,
+                       0xd4, 0xbd, 0xc6, 0x94, 0x7c, 0xc5, 0x80, 0x12,
+                       0xf7, 0x4b, 0x32, 0xc4, 0x9a, 0x82, 0xd8, 0x28,
+                       0x8f, 0xd9, 0xc2, 0x0f, 0x60, 0x03, 0xbe, 0x5e,
+                       0x21, 0xd6, 0x5f, 0x58, 0xbf, 0x5c, 0xb1, 0x32,
+                       0x82, 0x8d, 0xa9, 0xe5, 0xf2, 0x66, 0x1a, 0xc0,
+                       0xa0, 0xbc, 0x58, 0x2f, 0x71, 0xf5, 0x2f, 0xed,
+                       0xd1, 0x26, 0xb9, 0xd8, 0x49, 0x5a, 0x07, 0x19,
+                       0x01, 0x7c, 0x59, 0xb0, 0xf8, 0xa4, 0xb7, 0xd3,
+                       0x7b, 0x1a, 0x8c, 0x38, 0xf4, 0x50, 0xa4, 0x59,
+                       0xb0, 0xcc, 0x41, 0x0b, 0x88, 0x7f, 0xe5, 0x31,
+                       0xb3, 0x42, 0xba, 0xa2, 0x7e, 0xd4, 0x32, 0x71,
+                       0x45, 0x87, 0x48, 0xa9, 0xc2, 0xf2, 0x89, 0xb3,
+                       0xe4, 0xa7, 0x7e, 0x52, 0x15, 0x61, 0xfa, 0xfe,
+                       0xc9, 0xdd, 0x81, 0xeb, 0x13, 0xab, 0xab, 0xc3,
+                       0x98, 0x59, 0xd8, 0x16, 0x3d, 0x14, 0x7a, 0x1c,
+                       0x3c, 0x41, 0x9a, 0x16, 0x16, 0x9b, 0xd2, 0xd2,
+                       0x69, 0x3a, 0x29, 0x23, 0xac, 0x86, 0x32, 0xa5,
+                       0x48, 0x9c, 0x9e, 0xf3, 0x47, 0x77, 0x81, 0x70,
+                       0x24, 0xe8, 0x85, 0xd2, 0xf5, 0xb5, 0xfa, 0xff,
+                       0x59, 0x6a, 0xd3, 0x50, 0x59, 0x43, 0x59, 0xde,
+                       0xd9, 0xf1, 0x55, 0xa5, 0x0c, 0xc3, 0x1a, 0x1a,
+                       0x18, 0x34, 0x0d, 0x1a, 0x63, 0x33, 0xed, 0x10,
+                       0xe0, 0x1d, 0x2a, 0x18, 0xd2, 0xc0, 0x54, 0xa8,
+                       0xca, 0xb5, 0x9a, 0xd3, 0xdd, 0xca, 0x45, 0x84,
+                       0x50, 0xe7, 0x0f, 0xfe, 0xa4, 0x99, 0x5a, 0xbe,
+                       0x43, 0x2d, 0x9a, 0xcb, 0x92, 0x3f, 0x5a, 0x1d,
+                       0x85, 0xd8, 0xc9, 0xdf, 0x68, 0xc9, 0x12, 0x80,
+                       0x56, 0x0c, 0xdc, 0x00, 0xdc, 0x3a, 0x7d, 0x9d,
+                       0xa3, 0xa2, 0xe8, 0x4d, 0xbf, 0xf9, 0x70, 0xa0,
+                       0xa4, 0x13, 0x4f, 0x6b, 0xaf, 0x0a, 0x89, 0x7f,
+                       0xda, 0xf0, 0xbf, 0x9b, 0xc8, 0x1d, 0xe5, 0xf8,
+                       0x2e, 0x8b, 0x07, 0xb5, 0x73, 0x1b, 0xcc, 0xa2,
+                       0xa6, 0xad, 0x30, 0xbc, 0x78, 0x3c, 0x5b, 0x10,
+                       0xfa, 0x5e, 0x62, 0x2d, 0x9e, 0x64, 0xb3, 0x33,
+                       0xce, 0xf9, 0x1f, 0x86, 0xe7, 0x8b, 0xa2, 0xb8,
+                       0xe8, 0x99, 0x57, 0x8c, 0x11, 0xed, 0x66, 0xd9,
+                       0x3c, 0x72, 0xb9, 0xc3, 0xe6, 0x4e, 0x17, 0x3a,
+                       0x6a, 0xcb, 0x42, 0x24, 0x06, 0xed, 0x3e, 0x4e,
+                       0xa3, 0xe8, 0x6a, 0x94, 0xda, 0x0d, 0x4e, 0xd5,
+                       0x14, 0x19, 0xcf, 0xb6, 0x26, 0xd8, 0x2e, 0xcc,
+                       0x64, 0x76, 0x38, 0x49, 0x4d, 0xfe, 0x30, 0x6d,
+                       0xe4, 0xc8, 0x8c, 0x7b, 0xc4, 0xe0, 0x35, 0xba,
+                       0x22, 0x6e, 0x76, 0xe1, 0x1a, 0xf2, 0x53, 0xc3,
+                       0x28, 0xa2, 0x82, 0x1f, 0x61, 0x69, 0xad, 0xc1,
+                       0x7b, 0x28, 0x4b, 0x1e, 0x6c, 0x85, 0x95, 0x9b,
+                       0x51, 0xb5, 0x17, 0x7f, 0x12, 0x69, 0x8c, 0x24,
+                       0xd5, 0xc7, 0x5a, 0x5a, 0x11, 0x54, 0xff, 0x5a,
+                       0xf7, 0x16, 0xc3, 0x91, 0xa6, 0xf0, 0xdc, 0x0a,
+                       0xb6, 0xa7, 0x4a, 0x0d, 0x7a, 0x58, 0xfe, 0xa5,
+                       0xf5, 0xcb, 0x8f, 0x7b, 0x0e, 0xea, 0x57, 0xe7,
+                       0xbd, 0x79, 0xd6, 0x1c, 0x88, 0x23, 0x6c, 0xf2,
+                       0x4d, 0x29, 0x77, 0x53, 0x35, 0x6a, 0x00, 0x8d,
+                       0xcd, 0xa3, 0x58, 0xbe, 0x77, 0x99, 0x18, 0xf8,
+                       0xe6, 0xe1, 0x8f, 0xe9, 0x37, 0x8f, 0xe3, 0xe2,
+                       0x5a, 0x8a, 0x93, 0x25, 0xaf, 0xf3, 0x78, 0x80,
+                       0xbe, 0xa6, 0x1b, 0xc6, 0xac, 0x8b, 0x1c, 0x91,
+                       0x58, 0xe1, 0x9f, 0x89, 0x35, 0x9d, 0x1d, 0x21,
+                       0x29, 0x9f, 0xf4, 0x99, 0x02, 0x27, 0x0f, 0xa8,
+                       0x4f, 0x79, 0x94, 0x2b, 0x33, 0x2c, 0xda, 0xa2,
+                       0x26, 0x39, 0x83, 0x94, 0xef, 0x27, 0xd8, 0x53,
+                       0x8f, 0x66, 0x0d, 0xe4, 0x41, 0x7d, 0x34, 0xcd,
+                       0x43, 0x7c, 0x95, 0x0a, 0x53, 0xef, 0x66, 0xda,
+                       0x7e, 0x9b, 0xf3, 0x93, 0xaf, 0xd0, 0x73, 0x71,
+                       0xba, 0x40, 0x9b, 0x74, 0xf8, 0xd7, 0xd7, 0x41,
+                       0x6d, 0xaf, 0x72, 0x9c, 0x8d, 0x21, 0x87, 0x3c,
+                       0xfd, 0x0a, 0x90, 0xa9, 0x47, 0x96, 0x9e, 0xd3,
+                       0x88, 0xee, 0x73, 0xcf, 0x66, 0x2f, 0x52, 0x56,
+                       0x6d, 0xa9, 0x80, 0x4c, 0xe2, 0x6f, 0x62, 0x88,
+                       0x3f, 0x0e, 0x54, 0x17, 0x48, 0x80, 0x5d, 0xd3,
+                       0xc3, 0xda, 0x25, 0x3d, 0xa1, 0xc8, 0xcb, 0x9f,
+                       0x9b, 0x70, 0xb3, 0xa1, 0xeb, 0x04, 0x52, 0xa1,
+                       0xf2, 0x22, 0x0f, 0xfc, 0xc8, 0x18, 0xfa, 0xf9,
+                       0x85, 0x9c, 0xf1, 0xac, 0xeb, 0x0c, 0x02, 0x46,
+                       0x75, 0xd2, 0xf5, 0x2c, 0xe3, 0xd2, 0x59, 0x94,
+                       0x12, 0xf3, 0x3c, 0xfc, 0xd7, 0x92, 0xfa, 0x36,
+                       0xba, 0x61, 0x34, 0x38, 0x7c, 0xda, 0x48, 0x3e,
+                       0x08, 0xc9, 0x39, 0x23, 0x5e, 0x02, 0x2c, 0x1a,
+                       0x18, 0x7e, 0xb4, 0xd9, 0xfd, 0x9e, 0x40, 0x02,
+                       0xb1, 0x33, 0x37, 0x32, 0xe7, 0xde, 0xd6, 0xd0,
+                       0x7c, 0x58, 0x65, 0x4b, 0xf8, 0x34, 0x27, 0x9c,
+                       0x44, 0xb4, 0xbd, 0xe9, 0xe9, 0x4c, 0x78, 0x7d,
+                       0x4b, 0x9f, 0xce, 0xb1, 0xcd, 0x47, 0xa5, 0x37,
+                       0xe5, 0x6d, 0xbd, 0xb9, 0x43, 0x94, 0x0a, 0xd4,
+                       0xd6, 0xf9, 0x04, 0x5f, 0xb5, 0x66, 0x6c, 0x1a,
+                       0x35, 0x12, 0xe3, 0x36, 0x28, 0x27, 0x36, 0x58,
+                       0x01, 0x2b, 0x79, 0xe4, 0xba, 0x6d, 0x10, 0x7d,
+                       0x65, 0xdf, 0x84, 0x95, 0xf4, 0xd5, 0xb6, 0x8f,
+                       0x2b, 0x9f, 0x96, 0x00, 0x86, 0x60, 0xf0, 0x21,
+                       0x76, 0xa8, 0x6a, 0x8c, 0x28, 0x1c, 0xb3, 0x6b,
+                       0x97, 0xd7, 0xb6, 0x53, 0x2a, 0xcc, 0xab, 0x40,
+                       0x9d, 0x62, 0x79, 0x58, 0x52, 0xe6, 0x65, 0xb7,
+                       0xab, 0x55, 0x67, 0x9c, 0x89, 0x7c, 0x03, 0xb0,
+                       0x73, 0x59, 0xc5, 0x81, 0xf5, 0x18, 0x17, 0x5c,
+                       0x89, 0xf3, 0x78, 0x35, 0x44, 0x62, 0x78, 0x72,
+                       0xd0, 0x96, 0xeb, 0x31, 0xe7, 0x87, 0x77, 0x14,
+                       0x99, 0x51, 0xf2, 0x59, 0x26, 0x9e, 0xb5, 0xa6,
+                       0x45, 0xfe, 0x6e, 0xbd, 0x07, 0x4c, 0x94, 0x5a,
+                       0xa5, 0x7d, 0xfc, 0xf1, 0x2b, 0x77, 0xe2, 0xfe,
+                       0x17, 0xd4, 0x84, 0xa0, 0xac, 0xb5, 0xc7, 0xda,
+                       0xa9, 0x1a, 0xb6, 0xf3, 0x74, 0x11, 0xb4, 0x9d,
+                       0xfb, 0x79, 0x2e, 0x04, 0x2d, 0x50, 0x28, 0x83,
+                       0xbf, 0xc6, 0x52, 0xd3, 0x34, 0xd6, 0xe8, 0x7a,
+                       0xb6, 0xea, 0xe7, 0xa8, 0x6c, 0x15, 0x1e, 0x2c,
+                       0x57, 0xbc, 0x48, 0x4e, 0x5f, 0x5c, 0xb6, 0x92,
+                       0xd2, 0x49, 0x77, 0x81, 0x6d, 0x90, 0x70, 0xae,
+                       0x98, 0xa1, 0x03, 0x0d, 0x6b, 0xb9, 0x77, 0x14,
+                       0xf1, 0x4e, 0x23, 0xd3, 0xf8, 0x68, 0xbd, 0xc2,
+                       0xfe, 0x04, 0xb7, 0x5c, 0xc5, 0x17, 0x60, 0x8f,
+                       0x65, 0x54, 0xa4, 0x7a, 0x42, 0xdc, 0x18, 0x0d,
+                       0xb5, 0xcf, 0x0f, 0xd3, 0xc7, 0x91, 0x66, 0x1b,
+                       0x45, 0x42, 0x27, 0x75, 0x50, 0xe5, 0xee, 0xb8,
+                       0x7f, 0x33, 0x2c, 0xba, 0x4a, 0x92, 0x4d, 0x2c,
+                       0x3c, 0xe3, 0x0d, 0x80, 0x01, 0xba, 0x0d, 0x29,
+                       0xd8, 0x3c, 0xe9, 0x13, 0x16, 0x57, 0xe6, 0xea,
+                       0x94, 0x52, 0xe7, 0x00, 0x4d, 0x30, 0xb0, 0x0f,
+                       0x35, 0xb8, 0xb8, 0xa7, 0xb1, 0xb5, 0x3b, 0x44,
+                       0xe1, 0x2f, 0xfd, 0x88, 0xed, 0x43, 0xe7, 0x52,
+                       0x10, 0x93, 0xb3, 0x8a, 0x30, 0x6b, 0x0a, 0xf7,
+                       0x23, 0xc6, 0x50, 0x9d, 0x4a, 0xb0, 0xde, 0xc3,
+                       0xdc, 0x9b, 0x2f, 0x01, 0x56, 0x36, 0x09, 0xc5,
+                       0x2f, 0x6b, 0xfe, 0xf1, 0xd8, 0x27, 0x45, 0x03,
+                       0x30, 0x5e, 0x5c, 0x5b, 0xb4, 0x62, 0x0e, 0x1a,
+                       0xa9, 0x21, 0x2b, 0x92, 0x94, 0x87, 0x62, 0x57,
+                       0x4c, 0x10, 0x74, 0x1a, 0xf1, 0x0a, 0xc5, 0x84,
+                       0x3b, 0x9e, 0x72, 0x02, 0xd7, 0xcc, 0x09, 0x56,
+                       0xbd, 0x54, 0xc1, 0xf0, 0xc3, 0xe3, 0xb3, 0xf8,
+                       0xd2, 0x0d, 0x61, 0xcb, 0xef, 0xce, 0x0d, 0x05,
+                       0xb0, 0x98, 0xd9, 0x8e, 0x4f, 0xf9, 0xbc, 0x93,
+                       0xa6, 0xea, 0xc8, 0xcf, 0x10, 0x53, 0x4b, 0xf1,
+                       0xec, 0xfc, 0x89, 0xf9, 0x64, 0xb0, 0x22, 0xbf,
+                       0x9e, 0x55, 0x46, 0x9f, 0x7c, 0x50, 0x8e, 0x84,
+                       0x54, 0x20, 0x98, 0xd7, 0x6c, 0x40, 0x1e, 0xdb,
+                       0x69, 0x34, 0x78, 0x61, 0x24, 0x21, 0x9c, 0x8a,
+                       0xb3, 0x62, 0x31, 0x8b, 0x6e, 0xf5, 0x2a, 0x35,
+                       0x86, 0x13, 0xb1, 0x6c, 0x64, 0x2e, 0x41, 0xa5,
+                       0x05, 0xf2, 0x42, 0xba, 0xd2, 0x3a, 0x0d, 0x8e,
+                       0x8a, 0x59, 0x94, 0x3c, 0xcf, 0x36, 0x27, 0x82,
+                       0xc2, 0x45, 0xee, 0x58, 0xcd, 0x88, 0xb4, 0xec,
+                       0xde, 0xb2, 0x96, 0x0a, 0xaf, 0x38, 0x6f, 0x88,
+                       0xd7, 0xd8, 0xe1, 0xdf, 0xb9, 0x96, 0xa9, 0x0a,
+                       0xb1, 0x95, 0x28, 0x86, 0x20, 0xe9, 0x17, 0x49,
+                       0xa2, 0x29, 0x38, 0xaa, 0xa5, 0xe9, 0x6e, 0xf1,
+                       0x19, 0x27, 0xc0, 0xd5, 0x2a, 0x22, 0xc3, 0x0b,
+                       0xdb, 0x7c, 0x73, 0x10, 0xb9, 0xba, 0x89, 0x76,
+                       0x54, 0xae, 0x7d, 0x71, 0xb3, 0x93, 0xf6, 0x32,
+                       0xe6, 0x47, 0x43, 0x55, 0xac, 0xa0, 0x0d, 0xc2,
+                       0x93, 0x27, 0x4a, 0x8e, 0x0e, 0x74, 0x15, 0xc7,
+                       0x0b, 0x85, 0xd9, 0x0c, 0xa9, 0x30, 0x7a, 0x3e,
+                       0xea, 0x8f, 0x85, 0x6d, 0x3a, 0x12, 0x4f, 0x72,
+                       0x69, 0x58, 0x7a, 0x80, 0xbb, 0xb5, 0x97, 0xf3,
+                       0xcf, 0x70, 0xd2, 0x5d, 0xdd, 0x4d, 0x21, 0x79,
+                       0x54, 0x4d, 0xe4, 0x05, 0xe8, 0xbd, 0xc2, 0x62,
+                       0xb1, 0x3b, 0x77, 0x1c, 0xd6, 0x5c, 0xf3, 0xa0,
+                       0x79, 0x00, 0xa8, 0x6c, 0x29, 0xd9, 0x18, 0x24,
+                       0x36, 0xa2, 0x46, 0xc0, 0x96, 0x65, 0x7f, 0xbd,
+                       0x2a, 0xed, 0x36, 0x16, 0x0c, 0xaa, 0x9f, 0xf4,
+                       0xc5, 0xb4, 0xe2, 0x12, 0xed, 0x69, 0xed, 0x4f,
+                       0x26, 0x2c, 0x39, 0x52, 0x89, 0x98, 0xe7, 0x2c,
+                       0x99, 0xa4, 0x9e, 0xa3, 0x9b, 0x99, 0x46, 0x7a,
+                       0x3a, 0xdc, 0xa8, 0x59, 0xa3, 0xdb, 0xc3, 0x3b,
+                       0x95, 0x0d, 0x3b, 0x09, 0x6e, 0xee, 0x83, 0x5d,
+                       0x32, 0x4d, 0xed, 0xab, 0xfa, 0x98, 0x14, 0x4e,
+                       0xc3, 0x15, 0x45, 0x53, 0x61, 0xc4, 0x93, 0xbd,
+                       0x90, 0xf4, 0x99, 0x95, 0x4c, 0xe6, 0x76, 0x92,
+                       0x29, 0x90, 0x46, 0x30, 0x92, 0x69, 0x7d, 0x13,
+                       0xf2, 0xa5, 0xcd, 0x69, 0x49, 0x44, 0xb2, 0x0f,
+                       0x63, 0x40, 0x36, 0x5f, 0x09, 0xe2, 0x78, 0xf8,
+                       0x91, 0xe3, 0xe2, 0xfa, 0x10, 0xf7, 0xc8, 0x24,
+                       0xa8, 0x89, 0x32, 0x5c, 0x37, 0x25, 0x1d, 0xb2,
+                       0xea, 0x17, 0x8a, 0x0a, 0xa9, 0x64, 0xc3, 0x7c,
+                       0x3c, 0x7c, 0xbd, 0xc6, 0x79, 0x34, 0xe7, 0xe2,
+                       0x85, 0x8e, 0xbf, 0xf8, 0xde, 0x92, 0xa0, 0xae,
+                       0x20, 0xc4, 0xf6, 0xbb, 0x1f, 0x38, 0x19, 0x0e,
+                       0xe8, 0x79, 0x9c, 0xa1, 0x23, 0xe9, 0x54, 0x7e,
+                       0x37, 0x2f, 0xe2, 0x94, 0x32, 0xaf, 0xa0, 0x23,
+                       0x49, 0xe4, 0xc0, 0xb3, 0xac, 0x00, 0x8f, 0x36,
+                       0x05, 0xc4, 0xa6, 0x96, 0xec, 0x05, 0x98, 0x4f,
+                       0x96, 0x67, 0x57, 0x1f, 0x20, 0x86, 0x1b, 0x2d,
+                       0x69, 0xe4, 0x29, 0x93, 0x66, 0x5f, 0xaf, 0x6b,
+                       0x88, 0x26, 0x2c, 0x67, 0x02, 0x4b, 0x52, 0xd0,
+                       0x83, 0x7a, 0x43, 0x1f, 0xc0, 0x71, 0x15, 0x25,
+                       0x77, 0x65, 0x08, 0x60, 0x11, 0x76, 0x4c, 0x8d,
+                       0xed, 0xa9, 0x27, 0xc6, 0xb1, 0x2a, 0x2c, 0x6a,
+                       0x4a, 0x97, 0xf5, 0xc6, 0xb7, 0x70, 0x42, 0xd3,
+                       0x03, 0xd1, 0x24, 0x95, 0xec, 0x6d, 0xab, 0x38,
+                       0x72, 0xce, 0xe2, 0x8b, 0x33, 0xd7, 0x51, 0x09,
+                       0xdc, 0x45, 0xe0, 0x09, 0x96, 0x32, 0xf3, 0xc4,
+                       0x84, 0xdc, 0x73, 0x73, 0x2d, 0x1b, 0x11, 0x98,
+                       0xc5, 0x0e, 0x69, 0x28, 0x94, 0xc7, 0xb5, 0x4d,
+                       0xc8, 0x8a, 0xd0, 0xaa, 0x13, 0x2e, 0x18, 0x74,
+                       0xdd, 0xd1, 0x1e, 0xf3, 0x90, 0xe8, 0xfc, 0x9a,
+                       0x72, 0x4a, 0x0e, 0xd1, 0xe4, 0xfb, 0x0d, 0x96,
+                       0xd1, 0x0c, 0x79, 0x85, 0x1b, 0x1c, 0xfe, 0xe1,
+                       0x62, 0x8f, 0x7a, 0x73, 0x32, 0xab, 0xc8, 0x18,
+                       0x69, 0xe3, 0x34, 0x30, 0xdf, 0x13, 0xa6, 0xe5,
+                       0xe8, 0x0e, 0x67, 0x7f, 0x81, 0x11, 0xb4, 0x60,
+                       0xc7, 0xbd, 0x79, 0x65, 0x50, 0xdc, 0xc4, 0x5b,
+                       0xde, 0x39, 0xa4, 0x01, 0x72, 0x63, 0xf3, 0xd1,
+                       0x64, 0x4e, 0xdf, 0xfc, 0x27, 0x92, 0x37, 0x0d,
+                       0x57, 0xcd, 0x11, 0x4f, 0x11, 0x04, 0x8e, 0x1d,
+                       0x16, 0xf7, 0xcd, 0x92, 0x9a, 0x99, 0x30, 0x14,
+                       0xf1, 0x7c, 0x67, 0x1b, 0x1f, 0x41, 0x0b, 0xe8,
+                       0x32, 0xe8, 0xb8, 0xc1, 0x4f, 0x54, 0x86, 0x4f,
+                       0xe5, 0x79, 0x81, 0x73, 0xcd, 0x43, 0x59, 0x68,
+                       0x73, 0x02, 0x3b, 0x78, 0x21, 0x72, 0x43, 0x00,
+                       0x49, 0x17, 0xf7, 0x00, 0xaf, 0x68, 0x24, 0x53,
+                       0x05, 0x0a, 0xc3, 0x33, 0xe0, 0x33, 0x3f, 0x69,
+                       0xd2, 0x84, 0x2f, 0x0b, 0xed, 0xde, 0x04, 0xf4,
+                       0x11, 0x94, 0x13, 0x69, 0x51, 0x09, 0x28, 0xde,
+                       0x57, 0x5c, 0xef, 0xdc, 0x9a, 0x49, 0x1c, 0x17,
+                       0x97, 0xf3, 0x96, 0xc1, 0x7f, 0x5d, 0x2e, 0x7d,
+                       0x55, 0xb8, 0xb3, 0x02, 0x09, 0xb3, 0x1f, 0xe7,
+                       0xc9, 0x8d, 0xa3, 0x36, 0x34, 0x8a, 0x77, 0x13,
+                       0x30, 0x63, 0x4c, 0xa5, 0xcd, 0xc3, 0xe0, 0x7e,
+                       0x05, 0xa1, 0x7b, 0x0c, 0xcb, 0x74, 0x47, 0x31,
+                       0x62, 0x03, 0x43, 0xf1, 0x87, 0xb4, 0xb0, 0x85,
+                       0x87, 0x8e, 0x4b, 0x25, 0xc7, 0xcf, 0xae, 0x4b,
+                       0x36, 0x46, 0x3e, 0x62, 0xbc, 0x6f, 0xeb, 0x5f,
+                       0x73, 0xac, 0xe6, 0x07, 0xee, 0xc1, 0xa1, 0xd6,
+                       0xc4, 0xab, 0xc9, 0xd6, 0x89, 0x45, 0xe1, 0xf1,
+                       0x04, 0x4e, 0x1a, 0x6f, 0xbb, 0x4f, 0x3a, 0xa3,
+                       0xa0, 0xcb, 0xa3, 0x0a, 0xd8, 0x71, 0x35, 0x55,
+                       0xe4, 0xbc, 0x2e, 0x04, 0x06, 0xe6, 0xff, 0x5b,
+                       0x1c, 0xc0, 0x11, 0x7c, 0xc5, 0x17, 0xf3, 0x38,
+                       0xcf, 0xe9, 0xba, 0x0f, 0x0e, 0xef, 0x02, 0xc2,
+                       0x8d, 0xc6, 0xbc, 0x4b, 0x67, 0x20, 0x95, 0xd7,
+                       0x2c, 0x45, 0x5b, 0x86, 0x44, 0x8c, 0x6f, 0x2e,
+                       0x7e, 0x9f, 0x1c, 0x77, 0xba, 0x6b, 0x0e, 0xa3,
+                       0x69, 0xdc, 0xab, 0x24, 0x57, 0x60, 0x47, 0xc1,
+                       0xd1, 0xa5, 0x9d, 0x23, 0xe6, 0xb1, 0x37, 0xfe,
+                       0x93, 0xd2, 0x4c, 0x46, 0xf9, 0x0c, 0xc6, 0xfb,
+                       0xd6, 0x9d, 0x99, 0x69, 0xab, 0x7a, 0x07, 0x0c,
+                       0x65, 0xe7, 0xc4, 0x08, 0x96, 0xe2, 0xa5, 0x01,
+                       0x3f, 0x46, 0x07, 0x05, 0x7e, 0xe8, 0x9a, 0x90,
+                       0x50, 0xdc, 0xe9, 0x7a, 0xea, 0xa1, 0x39, 0x6e,
+                       0x66, 0xe4, 0x6f, 0xa5, 0x5f, 0xb2, 0xd9, 0x5b,
+                       0xf5, 0xdb, 0x2a, 0x32, 0xf0, 0x11, 0x6f, 0x7c,
+                       0x26, 0x10, 0x8f, 0x3d, 0x80, 0xe9, 0x58, 0xf7,
+                       0xe0, 0xa8, 0x57, 0xf8, 0xdb, 0x0e, 0xce, 0x99,
+                       0x63, 0x19, 0x3d, 0xd5, 0xec, 0x1b, 0x77, 0x69,
+                       0x98, 0xf6, 0xe4, 0x5f, 0x67, 0x17, 0x4b, 0x09,
+                       0x85, 0x62, 0x82, 0x70, 0x18, 0xe2, 0x9a, 0x78,
+                       0xe2, 0x62, 0xbd, 0xb4, 0xf1, 0x42, 0xc6, 0xfb,
+                       0x08, 0xd0, 0xbd, 0xeb, 0x4e, 0x09, 0xf2, 0xc8,
+                       0x1e, 0xdc, 0x3d, 0x32, 0x21, 0x56, 0x9c, 0x4f,
+                       0x35, 0xf3, 0x61, 0x06, 0x72, 0x84, 0xc4, 0x32,
+                       0xf2, 0xf1, 0xfa, 0x0b, 0x2f, 0xc3, 0xdb, 0x02,
+                       0x04, 0xc2, 0xde, 0x57, 0x64, 0x60, 0x8d, 0xcf,
+                       0xcb, 0x86, 0x5d, 0x97, 0x3e, 0xb1, 0x9c, 0x01,
+                       0xd6, 0x28, 0x8f, 0x99, 0xbc, 0x46, 0xeb, 0x05,
+                       0xaf, 0x7e, 0xb8, 0x21, 0x2a, 0x56, 0x85, 0x1c,
+                       0xb3, 0x71, 0xa0, 0xde, 0xca, 0x96, 0xf1, 0x78,
+                       0x49, 0xa2, 0x99, 0x81, 0x80, 0x5c, 0x01, 0xf5,
+                       0xa0, 0xa2, 0x56, 0x63, 0xe2, 0x70, 0x07, 0xa5,
+                       0x95, 0xd6, 0x85, 0xeb, 0x36, 0x9e, 0xa9, 0x51,
+                       0x66, 0x56, 0x5f, 0x1d, 0x02, 0x19, 0xe2, 0xf6,
+                       0x4f, 0x73, 0x38, 0x09, 0x75, 0x64, 0x48, 0xe0,
+                       0xf1, 0x7e, 0x0e, 0xe8, 0x9d, 0xf9, 0xed, 0x94,
+                       0xfe, 0x16, 0x26, 0x62, 0x49, 0x74, 0xf4, 0xb0,
+                       0xd4, 0xa9, 0x6c, 0xb0, 0xfd, 0x53, 0xe9, 0x81,
+                       0xe0, 0x7a, 0xbf, 0xcf, 0xb5, 0xc4, 0x01, 0x81,
+                       0x79, 0x99, 0x77, 0x01, 0x3b, 0xe9, 0xa2, 0xb6,
+                       0xe6, 0x6a, 0x8a, 0x9e, 0x56, 0x1c, 0x8d, 0x1e,
+                       0x8f, 0x06, 0x55, 0x2c, 0x6c, 0xdc, 0x92, 0x87,
+                       0x64, 0x3b, 0x4b, 0x19, 0xa1, 0x13, 0x64, 0x1d,
+                       0x4a, 0xe9, 0xc0, 0x00, 0xb8, 0x95, 0xef, 0x6b,
+                       0x1a, 0x86, 0x6d, 0x37, 0x52, 0x02, 0xc2, 0xe0,
+                       0xc8, 0xbb, 0x42, 0x0c, 0x02, 0x21, 0x4a, 0xc9,
+                       0xef, 0xa0, 0x54, 0xe4, 0x5e, 0x16, 0x53, 0x81,
+                       0x70, 0x62, 0x10, 0xaf, 0xde, 0xb8, 0xb5, 0xd3,
+                       0xe8, 0x5e, 0x6c, 0xc3, 0x8a, 0x3e, 0x18, 0x07,
+                       0xf2, 0x2f, 0x7d, 0xa7, 0xe1, 0x3d, 0x4e, 0xb4,
+                       0x26, 0xa7, 0xa3, 0x93, 0x86, 0xb2, 0x04, 0x1e,
+                       0x53, 0x5d, 0x86, 0xd6, 0xde, 0x65, 0xca, 0xe3,
+                       0x4e, 0xc1, 0xcf, 0xef, 0xc8, 0x70, 0x1b, 0x83,
+                       0x13, 0xdd, 0x18, 0x8b, 0x0d, 0x76, 0xd2, 0xf6,
+                       0x37, 0x7a, 0x93, 0x7a, 0x50, 0x11, 0x9f, 0x96,
+                       0x86, 0x25, 0xfd, 0xac, 0xdc, 0xbe, 0x18, 0x93,
+                       0x19, 0x6b, 0xec, 0x58, 0x4f, 0xb9, 0x75, 0xa7,
+                       0xdd, 0x3f, 0x2f, 0xec, 0xc8, 0x5a, 0x84, 0xab,
+                       0xd5, 0xe4, 0x8a, 0x07, 0xf6, 0x4d, 0x23, 0xd6,
+                       0x03, 0xfb, 0x03, 0x6a, 0xea, 0x66, 0xbf, 0xd4,
+                       0xb1, 0x34, 0xfb, 0x78, 0xe9, 0x55, 0xdc, 0x7c,
+                       0x3d, 0x9c, 0xe5, 0x9a, 0xac, 0xc3, 0x7a, 0x80,
+                       0x24, 0x6d, 0xa0, 0xef, 0x25, 0x7c, 0xb7, 0xea,
+                       0xce, 0x4d, 0x5f, 0x18, 0x60, 0xce, 0x87, 0x22,
+                       0x66, 0x2f, 0xd5, 0xdd, 0xdd, 0x02, 0x21, 0x75,
+                       0x82, 0xa0, 0x1f, 0x58, 0xc6, 0xd3, 0x62, 0xf7,
+                       0x32, 0xd8, 0xaf, 0x1e, 0x07, 0x77, 0x51, 0x96,
+                       0xd5, 0x6b, 0x1e, 0x7e, 0x80, 0x02, 0xe8, 0x67,
+                       0xea, 0x17, 0x0b, 0x10, 0xd2, 0x3f, 0x28, 0x25,
+                       0x4f, 0x05, 0x77, 0x02, 0x14, 0x69, 0xf0, 0x2c,
+                       0xbe, 0x0c, 0xf1, 0x74, 0x30, 0xd1, 0xb9, 0x9b,
+                       0xfc, 0x8c, 0xbb, 0x04, 0x16, 0xd9, 0xba, 0xc3,
+                       0xbc, 0x91, 0x8a, 0xc4, 0x30, 0xa4, 0xb0, 0x12,
+                       0x4c, 0x21, 0x87, 0xcb, 0xc9, 0x1d, 0x16, 0x96,
+                       0x07, 0x6f, 0x23, 0x54, 0xb9, 0x6f, 0x79, 0xe5,
+                       0x64, 0xc0, 0x64, 0xda, 0xb1, 0xae, 0xdd, 0x60,
+                       0x6c, 0x1a, 0x9d, 0xd3, 0x04, 0x8e, 0x45, 0xb0,
+                       0x92, 0x61, 0xd0, 0x48, 0x81, 0xed, 0x5e, 0x1d,
+                       0xa0, 0xc9, 0xa4, 0x33, 0xc7, 0x13, 0x51, 0x5d,
+                       0x7f, 0x83, 0x73, 0xb6, 0x70, 0x18, 0x65, 0x3e,
+                       0x2f, 0x0e, 0x7a, 0x12, 0x39, 0x98, 0xab, 0xd8,
+                       0x7e, 0x6f, 0xa3, 0xd1, 0xba, 0x56, 0xad, 0xbd,
+                       0xf0, 0x03, 0x01, 0x1c, 0x85, 0x35, 0x9f, 0xeb,
+                       0x19, 0x63, 0xa1, 0xaf, 0xfe, 0x2d, 0x35, 0x50,
+                       0x39, 0xa0, 0x65, 0x7c, 0x95, 0x7e, 0x6b, 0xfe,
+                       0xc1, 0xac, 0x07, 0x7c, 0x98, 0x4f, 0xbe, 0x57,
+                       0xa7, 0x22, 0xec, 0xe2, 0x7e, 0x29, 0x09, 0x53,
+                       0xe8, 0xbf, 0xb4, 0x7e, 0x3f, 0x8f, 0xfc, 0x14,
+                       0xce, 0x54, 0xf9, 0x18, 0x58, 0xb5, 0xff, 0x44,
+                       0x05, 0x9d, 0xce, 0x1b, 0xb6, 0x82, 0x23, 0xc8,
+                       0x2e, 0xbc, 0x69, 0xbb, 0x4a, 0x29, 0x0f, 0x65,
+                       0x94, 0xf0, 0x63, 0x06, 0x0e, 0xef, 0x8c, 0xbd,
+                       0xff, 0xfd, 0xb0, 0x21, 0x6e, 0x57, 0x05, 0x75,
+                       0xda, 0xd5, 0xc4, 0xeb, 0x8d, 0x32, 0xf7, 0x50,
+                       0xd3, 0x6f, 0x22, 0xed, 0x5f, 0x8e, 0xa2, 0x5b,
+                       0x80, 0x8c, 0xc8, 0x78, 0x40, 0x24, 0x4b, 0x89,
+                       0x30, 0xce, 0x7a, 0x97, 0x0e, 0xc4, 0xaf, 0xef,
+                       0x9b, 0xb4, 0xcd, 0x66, 0x74, 0x14, 0x04, 0x2b,
+                       0xf7, 0xce, 0x0b, 0x1c, 0x6e, 0xc2, 0x78, 0x8c,
+                       0xca, 0xc5, 0xd0, 0x1c, 0x95, 0x4a, 0x91, 0x2d,
+                       0xa7, 0x20, 0xeb, 0x86, 0x52, 0xb7, 0x67, 0xd8,
+                       0x0c, 0xd6, 0x04, 0x14, 0xde, 0x51, 0x74, 0x75,
+                       0xe7, 0x11, 0xb4, 0x87, 0xa3, 0x3d, 0x2d, 0xad,
+                       0x4f, 0xef, 0xa0, 0x0f, 0x70, 0x00, 0x6d, 0x13,
+                       0x19, 0x1d, 0x41, 0x50, 0xe9, 0xd8, 0xf0, 0x32,
+                       0x71, 0xbc, 0xd3, 0x11, 0xf2, 0xac, 0xbe, 0xaf,
+                       0x75, 0x46, 0x65, 0x4e, 0x07, 0x34, 0x37, 0xa3,
+                       0x89, 0xfe, 0x75, 0xd4, 0x70, 0x4c, 0xc6, 0x3f,
+                       0x69, 0x24, 0x0e, 0x38, 0x67, 0x43, 0x8c, 0xde,
+                       0x06, 0xb5, 0xb8, 0xe7, 0xc4, 0xf0, 0x41, 0x8f,
+                       0xf0, 0xbd, 0x2f, 0x0b, 0xb9, 0x18, 0xf8, 0xde,
+                       0x64, 0xb1, 0xdb, 0xee, 0x00, 0x50, 0x77, 0xe1,
+                       0xc7, 0xff, 0xa6, 0xfa, 0xdd, 0x70, 0xf4, 0xe3,
+                       0x93, 0xe9, 0x77, 0x35, 0x3d, 0x4b, 0x2f, 0x2b,
+                       0x6d, 0x55, 0xf0, 0xfc, 0x88, 0x54, 0x4e, 0x89,
+                       0xc1, 0x8a, 0x23, 0x31, 0x2d, 0x14, 0x2a, 0xb8,
+                       0x1b, 0x15, 0xdd, 0x9e, 0x6e, 0x7b, 0xda, 0x05,
+                       0x91, 0x7d, 0x62, 0x64, 0x96, 0x72, 0xde, 0xfc,
+                       0xc1, 0xec, 0xf0, 0x23, 0x51, 0x6f, 0xdb, 0x5b,
+                       0x1d, 0x08, 0x57, 0xce, 0x09, 0xb8, 0xf6, 0xcd,
+                       0x8d, 0x95, 0xf2, 0x20, 0xbf, 0x0f, 0x20, 0x57,
+                       0x98, 0x81, 0x84, 0x4f, 0x15, 0x5c, 0x76, 0xe7,
+                       0x3e, 0x0a, 0x3a, 0x6c, 0xc4, 0x8a, 0xbe, 0x78,
+                       0x74, 0x77, 0xc3, 0x09, 0x4b, 0x5d, 0x48, 0xe4,
+                       0xc8, 0xcb, 0x0b, 0xea, 0x17, 0x28, 0xcf, 0xcf,
+                       0x31, 0x32, 0x44, 0xa4, 0xe5, 0x0e, 0x1a, 0x98,
+                       0x94, 0xc4, 0xf0, 0xff, 0xae, 0x3e, 0x44, 0xe8,
+                       0xa5, 0xb3, 0xb5, 0x37, 0x2f, 0xe8, 0xaf, 0x6f,
+                       0x28, 0xc1, 0x37, 0x5f, 0x31, 0xd2, 0xb9, 0x33,
+                       0xb1, 0xb2, 0x52, 0x94, 0x75, 0x2c, 0x29, 0x59,
+                       0x06, 0xc2, 0x25, 0xe8, 0x71, 0x65, 0x4e, 0xed,
+                       0xc0, 0x9c, 0xb1, 0xbb, 0x25, 0xdc, 0x6c, 0xe7,
+                       0x4b, 0xa5, 0x7a, 0x54, 0x7a, 0x60, 0xff, 0x7a,
+                       0xe0, 0x50, 0x40, 0x96, 0x35, 0x63, 0xe4, 0x0b,
+                       0x76, 0xbd, 0xa4, 0x65, 0x00, 0x1b, 0x57, 0x88,
+                       0xae, 0xed, 0x39, 0x88, 0x42, 0x11, 0x3c, 0xed,
+                       0x85, 0x67, 0x7d, 0xb9, 0x68, 0x82, 0xe9, 0x43,
+                       0x3c, 0x47, 0x53, 0xfa, 0xe8, 0xf8, 0x9f, 0x1f,
+                       0x9f, 0xef, 0x0f, 0xf7, 0x30, 0xd9, 0x30, 0x0e,
+                       0xb9, 0x9f, 0x69, 0x18, 0x2f, 0x7e, 0xf8, 0xf8,
+                       0xf8, 0x8c, 0x0f, 0xd4, 0x02, 0x4d, 0xea, 0xcd,
+                       0x0a, 0x9c, 0x6f, 0x71, 0x6d, 0x5a, 0x4c, 0x60,
+                       0xce, 0x20, 0x56, 0x32, 0xc6, 0xc5, 0x99, 0x1f,
+                       0x09, 0xe6, 0x4e, 0x18, 0x1a, 0x15, 0x13, 0xa8,
+                       0x7d, 0xb1, 0x6b, 0xc0, 0xb2, 0x6d, 0xf8, 0x26,
+                       0x66, 0xf8, 0x3d, 0x18, 0x74, 0x70, 0x66, 0x7a,
+                       0x34, 0x17, 0xde, 0xba, 0x47, 0xf1, 0x06, 0x18,
+                       0xcb, 0xaf, 0xeb, 0x4a, 0x1e, 0x8f, 0xa7, 0x77,
+                       0xe0, 0x3b, 0x78, 0x62, 0x66, 0xc9, 0x10, 0xea,
+                       0x1f, 0xb7, 0x29, 0x0a, 0x45, 0xa1, 0x1d, 0x1e,
+                       0x1d, 0xe2, 0x65, 0x61, 0x50, 0x9c, 0xd7, 0x05,
+                       0xf2, 0x0b, 0x5b, 0x12, 0x61, 0x02, 0xc8, 0xe5,
+                       0x63, 0x4f, 0x20, 0x0c, 0x07, 0x17, 0x33, 0x5e,
+                       0x03, 0x9a, 0x53, 0x0f, 0x2e, 0x55, 0xfe, 0x50,
+                       0x43, 0x7d, 0xd0, 0xb6, 0x7e, 0x5a, 0xda, 0xae,
+                       0x58, 0xef, 0x15, 0xa9, 0x83, 0xd9, 0x46, 0xb1,
+                       0x42, 0xaa, 0xf5, 0x02, 0x6c, 0xce, 0x92, 0x06,
+                       0x1b, 0xdb, 0x66, 0x45, 0x91, 0x79, 0xc2, 0x2d,
+                       0xe6, 0x53, 0xd3, 0x14, 0xfd, 0xbb, 0x44, 0x63,
+                       0xc6, 0xd7, 0x3d, 0x7a, 0x0c, 0x75, 0x78, 0x9d,
+                       0x5c, 0xa6, 0x39, 0xb3, 0xe5, 0x63, 0xca, 0x8b,
+                       0xfe, 0xd3, 0xef, 0x60, 0x83, 0xf6, 0x8e, 0x70,
+                       0xb6, 0x67, 0xc7, 0x77, 0xed, 0x23, 0xef, 0x4c,
+                       0xf0, 0xed, 0x2d, 0x07, 0x59, 0x6f, 0xc1, 0x01,
+                       0x34, 0x37, 0x08, 0xab, 0xd9, 0x1f, 0x09, 0xb1,
+                       0xce, 0x5b, 0x17, 0xff, 0x74, 0xf8, 0x9c, 0xd5,
+                       0x2c, 0x56, 0x39, 0x79, 0x0f, 0x69, 0x44, 0x75,
+                       0x58, 0x27, 0x01, 0xc4, 0xbf, 0xa7, 0xa1, 0x1d,
+                       0x90, 0x17, 0x77, 0x86, 0x5a, 0x3f, 0xd9, 0xd1,
+                       0x0e, 0xa0, 0x10, 0xf8, 0xec, 0x1e, 0xa5, 0x7f,
+                       0x5e, 0x36, 0xd1, 0xe3, 0x04, 0x2c, 0x70, 0xf7,
+                       0x8e, 0xc0, 0x98, 0x2f, 0x6c, 0x94, 0x2b, 0x41,
+                       0xb7, 0x60, 0x00, 0xb7, 0x2e, 0xb8, 0x02, 0x8d,
+                       0xb8, 0xb0, 0xd3, 0x86, 0xba, 0x1d, 0xd7, 0x90,
+                       0xd6, 0xb6, 0xe1, 0xfc, 0xd7, 0xd8, 0x28, 0x06,
+                       0x63, 0x9b, 0xce, 0x61, 0x24, 0x79, 0xc0, 0x70,
+                       0x52, 0xd0, 0xb6, 0xd4, 0x28, 0x95, 0x24, 0x87,
+                       0x03, 0x1f, 0xb7, 0x9a, 0xda, 0xa3, 0xfb, 0x52,
+                       0x5b, 0x68, 0xe7, 0x4c, 0x8c, 0x24, 0xe1, 0x42,
+                       0xf7, 0xd5, 0xfd, 0xad, 0x06, 0x32, 0x9f, 0xba,
+                       0xc1, 0xfc, 0xdd, 0xc6, 0xfc, 0xfc, 0xb3, 0x38,
+                       0x74, 0x56, 0x58, 0x40, 0x02, 0x37, 0x52, 0x2c,
+                       0x55, 0xcc, 0xb3, 0x9e, 0x7a, 0xe9, 0xd4, 0x38,
+                       0x41, 0x5e, 0x0c, 0x35, 0xe2, 0x11, 0xd1, 0x13,
+                       0xf8, 0xb7, 0x8d, 0x72, 0x6b, 0x22, 0x2a, 0xb0,
+                       0xdb, 0x08, 0xba, 0x35, 0xb9, 0x3f, 0xc8, 0xd3,
+                       0x24, 0x90, 0xec, 0x58, 0xd2, 0x09, 0xc7, 0x2d,
+                       0xed, 0x38, 0x80, 0x36, 0x72, 0x43, 0x27, 0x49,
+                       0x4a, 0x80, 0x8a, 0xa2, 0xe8, 0xd3, 0xda, 0x30,
+                       0x7d, 0xb6, 0x82, 0x37, 0x86, 0x92, 0x86, 0x3e,
+                       0x08, 0xb2, 0x28, 0x5a, 0x55, 0x44, 0x24, 0x7d,
+                       0x40, 0x48, 0x8a, 0xb6, 0x89, 0x58, 0x08, 0xa0,
+                       0xd6, 0x6d, 0x3a, 0x17, 0xbf, 0xf6, 0x54, 0xa2,
+                       0xf5, 0xd3, 0x8c, 0x0f, 0x78, 0x12, 0x57, 0x8b,
+                       0xd5, 0xc2, 0xfd, 0x58, 0x5b, 0x7f, 0x38, 0xe3,
+                       0xcc, 0xb7, 0x7c, 0x48, 0xb3, 0x20, 0xe8, 0x81,
+                       0x14, 0x32, 0x45, 0x05, 0xe0, 0xdb, 0x9f, 0x75,
+                       0x85, 0xb4, 0x6a, 0xfc, 0x95, 0xe3, 0x54, 0x22,
+                       0x12, 0xee, 0x30, 0xfe, 0xd8, 0x30, 0xef, 0x34,
+                       0x50, 0xab, 0x46, 0x30, 0x98, 0x2f, 0xb7, 0xc0,
+                       0x15, 0xa2, 0x83, 0xb6, 0xf2, 0x06, 0x21, 0xa2,
+                       0xc3, 0x26, 0x37, 0x14, 0xd1, 0x4d, 0xb5, 0x10,
+                       0x52, 0x76, 0x4d, 0x6a, 0xee, 0xb5, 0x2b, 0x15,
+                       0xb7, 0xf9, 0x51, 0xe8, 0x2a, 0xaf, 0xc7, 0xfa,
+                       0x77, 0xaf, 0xb0, 0x05, 0x4d, 0xd1, 0x68, 0x8e,
+                       0x74, 0x05, 0x9f, 0x9d, 0x93, 0xa5, 0x3e, 0x7f,
+                       0x4e, 0x5f, 0x9d, 0xcb, 0x09, 0xc7, 0x83, 0xe3,
+                       0x02, 0x9d, 0x27, 0x1f, 0xef, 0x85, 0x05, 0x8d,
+                       0xec, 0x55, 0x88, 0x0f, 0x0d, 0x7c, 0x4c, 0xe8,
+                       0xa1, 0x75, 0xa0, 0xd8, 0x06, 0x47, 0x14, 0xef,
+                       0xaa, 0x61, 0xcf, 0x26, 0x15, 0xad, 0xd8, 0xa3,
+                       0xaa, 0x75, 0xf2, 0x78, 0x4a, 0x5a, 0x61, 0xdf,
+                       0x8b, 0xc7, 0x04, 0xbc, 0xb2, 0x32, 0xd2, 0x7e,
+                       0x42, 0xee, 0xb4, 0x2f, 0x51, 0xff, 0x7b, 0x2e,
+                       0xd3, 0x02, 0xe8, 0xdc, 0x5d, 0x0d, 0x50, 0xdc,
+                       0xae, 0xb7, 0x46, 0xf9, 0xa8, 0xe6, 0xd0, 0x16,
+                       0xcc, 0xe6, 0x2c, 0x81, 0xc7, 0xad, 0xe9, 0xf0,
+                       0x05, 0x72, 0x6d, 0x3d, 0x0a, 0x7a, 0xa9, 0x02,
+                       0xac, 0x82, 0x93, 0x6e, 0xb6, 0x1c, 0x28, 0xfc,
+                       0x44, 0x12, 0xfb, 0x73, 0x77, 0xd4, 0x13, 0x39,
+                       0x29, 0x88, 0x8a, 0xf3, 0x5c, 0xa6, 0x36, 0xa0,
+                       0x2a, 0xed, 0x7e, 0xb1, 0x1d, 0xd6, 0x4c, 0x6b,
+                       0x41, 0x01, 0x18, 0x5d, 0x5d, 0x07, 0x97, 0xa6,
+                       0x4b, 0xef, 0x31, 0x18, 0xea, 0xac, 0xb1, 0x84,
+                       0x21, 0xed, 0xda, 0x86,
+               },
+               .rlen = 4100,
+       },
+};
+
+static struct cipher_testvec aes_ctr_dec_tv_template[] = {
+       { /* From RFC 3686 */
+               .key    = { 0xae, 0x68, 0x52, 0xf8, 0x12, 0x10, 0x67, 0xcc,
+                           0x4b, 0xf7, 0xa5, 0x76, 0x55, 0x77, 0xf3, 0x9e,
+                           0x00, 0x00, 0x00, 0x30 },
+               .klen   = 20,
+               .iv     = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+               .input  = { 0xe4, 0x09, 0x5d, 0x4f, 0xb7, 0xa7, 0xb3, 0x79,
+                           0x2d, 0x61, 0x75, 0xa3, 0x26, 0x13, 0x11, 0xb8 },
+               .ilen   = 16,
+               .result = { "Single block msg" },
+               .rlen   = 16,
+       }, {
+               .key    = { 0x7e, 0x24, 0x06, 0x78, 0x17, 0xfa, 0xe0, 0xd7,
+                           0x43, 0xd6, 0xce, 0x1f, 0x32, 0x53, 0x91, 0x63,
+                           0x00, 0x6c, 0xb6, 0xdb },
+               .klen   = 20,
+               .iv     = { 0xc0, 0x54, 0x3b, 0x59, 0xda, 0x48, 0xd9, 0x0b },
+               .input  = { 0x51, 0x04, 0xa1, 0x06, 0x16, 0x8a, 0x72, 0xd9,
+                           0x79, 0x0d, 0x41, 0xee, 0x8e, 0xda, 0xd3, 0x88,
+                           0xeb, 0x2e, 0x1e, 0xfc, 0x46, 0xda, 0x57, 0xc8,
+                           0xfc, 0xe6, 0x30, 0xdf, 0x91, 0x41, 0xbe, 0x28 },
+               .ilen   = 32,
+               .result = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                           0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                           0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
+               .rlen   = 32,
+       }, {
+               .key    = { 0x16, 0xaf, 0x5b, 0x14, 0x5f, 0xc9, 0xf5, 0x79,
+                           0xc1, 0x75, 0xf9, 0x3e, 0x3b, 0xfb, 0x0e, 0xed,
+                           0x86, 0x3d, 0x06, 0xcc, 0xfd, 0xb7, 0x85, 0x15,
+                           0x00, 0x00, 0x00, 0x48 },
+               .klen   = 28,
+               .iv     = { 0x36, 0x73, 0x3c, 0x14, 0x7d, 0x6d, 0x93, 0xcb },
+               .input  = { 0x4b, 0x55, 0x38, 0x4f, 0xe2, 0x59, 0xc9, 0xc8,
+                           0x4e, 0x79, 0x35, 0xa0, 0x03, 0xcb, 0xe9, 0x28 },
+               .ilen   = 16,
+               .result = { "Single block msg" },
+               .rlen   = 16,
+       }, {
+               .key    = { 0x7c, 0x5c, 0xb2, 0x40, 0x1b, 0x3d, 0xc3, 0x3c,
+                           0x19, 0xe7, 0x34, 0x08, 0x19, 0xe0, 0xf6, 0x9c,
+                           0x67, 0x8c, 0x3d, 0xb8, 0xe6, 0xf6, 0xa9, 0x1a,
+                           0x00, 0x96, 0xb0, 0x3b },
+               .klen   = 28,
+               .iv     = { 0x02, 0x0c, 0x6e, 0xad, 0xc2, 0xcb, 0x50, 0x0d },
+               .input  = { 0x45, 0x32, 0x43, 0xfc, 0x60, 0x9b, 0x23, 0x32,
+                           0x7e, 0xdf, 0xaa, 0xfa, 0x71, 0x31, 0xcd, 0x9f,
+                           0x84, 0x90, 0x70, 0x1c, 0x5a, 0xd4, 0xa7, 0x9c,
+                           0xfc, 0x1f, 0xe0, 0xff, 0x42, 0xf4, 0xfb, 0x00 },
+               .ilen   = 32,
+               .result = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                           0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                           0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
+               .rlen   = 32,
+       }, { 
+               .key    = { 0x77, 0x6b, 0xef, 0xf2, 0x85, 0x1d, 0xb0, 0x6f,
+                           0x4c, 0x8a, 0x05, 0x42, 0xc8, 0x69, 0x6f, 0x6c,
+                           0x6a, 0x81, 0xaf, 0x1e, 0xec, 0x96, 0xb4, 0xd3,
+                           0x7f, 0xc1, 0xd6, 0x89, 0xe6, 0xc1, 0xc1, 0x04,
+                           0x00, 0x00, 0x00, 0x60 },
+               .klen   = 36,
+               .iv     = { 0xdb, 0x56, 0x72, 0xc9, 0x7a, 0xa8, 0xf0, 0xb2 },
+               .input  = { 0x14, 0x5a, 0xd0, 0x1d, 0xbf, 0x82, 0x4e, 0xc7,
+                           0x56, 0x08, 0x63, 0xdc, 0x71, 0xe3, 0xe0, 0xc0 },
+               .ilen   = 16,
+               .result = { "Single block msg" },
+               .rlen   = 16,
+       }, {
+               .key    = { 0xf6, 0xd6, 0x6d, 0x6b, 0xd5, 0x2d, 0x59, 0xbb,
+                           0x07, 0x96, 0x36, 0x58, 0x79, 0xef, 0xf8, 0x86,
+                           0xc6, 0x6d, 0xd5, 0x1a, 0x5b, 0x6a, 0x99, 0x74,
+                           0x4b, 0x50, 0x59, 0x0c, 0x87, 0xa2, 0x38, 0x84,
+                           0x00, 0xfa, 0xac, 0x24 },
+               .klen   = 36,
+               .iv     = { 0xc1, 0x58, 0x5e, 0xf1, 0x5a, 0x43, 0xd8, 0x75 },
+               .input  = { 0xf0, 0x5e, 0x23, 0x1b, 0x38, 0x94, 0x61, 0x2c,
+                           0x49, 0xee, 0x00, 0x0b, 0x80, 0x4e, 0xb2, 0xa9,
+                           0xb8, 0x30, 0x6b, 0x50, 0x8f, 0x83, 0x9d, 0x6a,
+                           0x55, 0x30, 0x83, 0x1d, 0x93, 0x44, 0xaf, 0x1c },
+               .ilen   = 32,
+               .result = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                           0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                           0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
+               .rlen   = 32,
+       },
+};
+
+static struct aead_testvec aes_gcm_enc_tv_template[] = {
+       { /* From McGrew & Viega - http://citeseer.ist.psu.edu/656989.html */
+               .klen   = 16,
+               .result = { 0x58, 0xe2, 0xfc, 0xce, 0xfa, 0x7e, 0x30, 0x61,
+                           0x36, 0x7f, 0x1d, 0x57, 0xa4, 0xe7, 0x45, 0x5a },
+               .rlen   = 16,
+       }, {
+               .klen   = 16,
+               .ilen   = 16,
+               .result = { 0x03, 0x88, 0xda, 0xce, 0x60, 0xb6, 0xa3, 0x92,
+                           0xf3, 0x28, 0xc2, 0xb9, 0x71, 0xb2, 0xfe, 0x78,
+                           0xab, 0x6e, 0x47, 0xd4, 0x2c, 0xec, 0x13, 0xbd,
+                           0xf5, 0x3a, 0x67, 0xb2, 0x12, 0x57, 0xbd, 0xdf },
+               .rlen   = 32,
+       }, {
+               .key    = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
+                           0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
+               .klen   = 16,
+               .iv     = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
+                           0xde, 0xca, 0xf8, 0x88 },
+               .input  = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
+                           0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
+                           0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
+                           0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
+                           0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
+                           0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
+                           0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
+                           0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 },
+               .ilen   = 64,
+               .result = { 0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
+                           0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
+                           0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
+                           0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
+                           0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
+                           0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
+                           0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
+                           0x3d, 0x58, 0xe0, 0x91, 0x47, 0x3f, 0x59, 0x85,
+                           0x4d, 0x5c, 0x2a, 0xf3, 0x27, 0xcd, 0x64, 0xa6,
+                           0x2c, 0xf3, 0x5a, 0xbd, 0x2b, 0xa6, 0xfa, 0xb4 },
+               .rlen   = 80,
+       }, {
+               .key    = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
+                           0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
+               .klen   = 16,
+               .iv     = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
+                           0xde, 0xca, 0xf8, 0x88 },
+               .input  = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
+                           0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
+                           0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
+                           0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
+                           0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
+                           0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
+                           0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
+                           0xba, 0x63, 0x7b, 0x39 },
+               .ilen   = 60,
+               .assoc  = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
+                           0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
+                           0xab, 0xad, 0xda, 0xd2 },
+               .alen   = 20,
+               .result = { 0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
+                           0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
+                           0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
+                           0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
+                           0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
+                           0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
+                           0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
+                           0x3d, 0x58, 0xe0, 0x91,
+                           0x5b, 0xc9, 0x4f, 0xbc, 0x32, 0x21, 0xa5, 0xdb,
+                           0x94, 0xfa, 0xe9, 0x5a, 0xe7, 0x12, 0x1a, 0x47 },
+               .rlen   = 76,
+       }, {
+               .klen   = 24,
+               .result = { 0xcd, 0x33, 0xb2, 0x8a, 0xc7, 0x73, 0xf7, 0x4b,
+                           0xa0, 0x0e, 0xd1, 0xf3, 0x12, 0x57, 0x24, 0x35 },
+               .rlen   = 16,
+       }, {
+               .klen   = 24,
+               .ilen   = 16,
+               .result = { 0x98, 0xe7, 0x24, 0x7c, 0x07, 0xf0, 0xfe, 0x41,
+                           0x1c, 0x26, 0x7e, 0x43, 0x84, 0xb0, 0xf6, 0x00,
+                           0x2f, 0xf5, 0x8d, 0x80, 0x03, 0x39, 0x27, 0xab,
+                           0x8e, 0xf4, 0xd4, 0x58, 0x75, 0x14, 0xf0, 0xfb },
+               .rlen   = 32,
+       }, {
+               .key    = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
+                           0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08,
+                           0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c },
+               .klen   = 24,
+               .iv     = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
+                           0xde, 0xca, 0xf8, 0x88 },
+               .input  = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
+                           0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
+                           0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
+                           0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
+                           0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
+                           0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
+                           0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
+                           0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 },
+               .ilen   = 64,
+               .result = { 0x39, 0x80, 0xca, 0x0b, 0x3c, 0x00, 0xe8, 0x41,
+                           0xeb, 0x06, 0xfa, 0xc4, 0x87, 0x2a, 0x27, 0x57,
+                           0x85, 0x9e, 0x1c, 0xea, 0xa6, 0xef, 0xd9, 0x84,
+                           0x62, 0x85, 0x93, 0xb4, 0x0c, 0xa1, 0xe1, 0x9c,
+                           0x7d, 0x77, 0x3d, 0x00, 0xc1, 0x44, 0xc5, 0x25,
+                           0xac, 0x61, 0x9d, 0x18, 0xc8, 0x4a, 0x3f, 0x47,
+                           0x18, 0xe2, 0x44, 0x8b, 0x2f, 0xe3, 0x24, 0xd9,
+                           0xcc, 0xda, 0x27, 0x10, 0xac, 0xad, 0xe2, 0x56,
+                           0x99, 0x24, 0xa7, 0xc8, 0x58, 0x73, 0x36, 0xbf,
+                           0xb1, 0x18, 0x02, 0x4d, 0xb8, 0x67, 0x4a, 0x14 },
+               .rlen   = 80,
+       }, {
+               .key    = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
+                           0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08,
+                           0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c },
+               .klen   = 24,
+               .iv     = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
+                           0xde, 0xca, 0xf8, 0x88 },
+               .input  = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
+                           0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
+                           0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
+                           0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
+                           0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
+                           0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
+                           0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
+                           0xba, 0x63, 0x7b, 0x39 },
+               .ilen   = 60,
+               .assoc  = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
+                           0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
+                           0xab, 0xad, 0xda, 0xd2 },
+               .alen   = 20,
+               .result = { 0x39, 0x80, 0xca, 0x0b, 0x3c, 0x00, 0xe8, 0x41,
+                           0xeb, 0x06, 0xfa, 0xc4, 0x87, 0x2a, 0x27, 0x57,
+                           0x85, 0x9e, 0x1c, 0xea, 0xa6, 0xef, 0xd9, 0x84,
+                           0x62, 0x85, 0x93, 0xb4, 0x0c, 0xa1, 0xe1, 0x9c,
+                           0x7d, 0x77, 0x3d, 0x00, 0xc1, 0x44, 0xc5, 0x25,
+                           0xac, 0x61, 0x9d, 0x18, 0xc8, 0x4a, 0x3f, 0x47,
+                           0x18, 0xe2, 0x44, 0x8b, 0x2f, 0xe3, 0x24, 0xd9,
+                           0xcc, 0xda, 0x27, 0x10,
+                           0x25, 0x19, 0x49, 0x8e, 0x80, 0xf1, 0x47, 0x8f,
+                           0x37, 0xba, 0x55, 0xbd, 0x6d, 0x27, 0x61, 0x8c },
+               .rlen   = 76,
+               .np     = 2,
+               .tap    = { 32, 28 },
+               .anp    = 2,
+               .atap   = { 8, 12 }
+       }, {
+               .klen   = 32,
+               .result = { 0x53, 0x0f, 0x8a, 0xfb, 0xc7, 0x45, 0x36, 0xb9,
+                           0xa9, 0x63, 0xb4, 0xf1, 0xc4, 0xcb, 0x73, 0x8b },
+               .rlen   = 16,
+       }
+};
+
+static struct aead_testvec aes_gcm_dec_tv_template[] = {
+       { /* From McGrew & Viega - http://citeseer.ist.psu.edu/656989.html */
+               .klen   = 32,
+               .input  = { 0xce, 0xa7, 0x40, 0x3d, 0x4d, 0x60, 0x6b, 0x6e,
+                           0x07, 0x4e, 0xc5, 0xd3, 0xba, 0xf3, 0x9d, 0x18,
+                           0xd0, 0xd1, 0xc8, 0xa7, 0x99, 0x99, 0x6b, 0xf0,
+                           0x26, 0x5b, 0x98, 0xb5, 0xd4, 0x8a, 0xb9, 0x19 },
+               .ilen   = 32,
+               .rlen   = 16,
+       }, {
+               .key    = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
+                           0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08,
+                           0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
+                           0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
+               .klen   = 32,
+               .iv     = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
+                           0xde, 0xca, 0xf8, 0x88 },
+               .input  = { 0x52, 0x2d, 0xc1, 0xf0, 0x99, 0x56, 0x7d, 0x07,
+                           0xf4, 0x7f, 0x37, 0xa3, 0x2a, 0x84, 0x42, 0x7d,
+                           0x64, 0x3a, 0x8c, 0xdc, 0xbf, 0xe5, 0xc0, 0xc9,
+                           0x75, 0x98, 0xa2, 0xbd, 0x25, 0x55, 0xd1, 0xaa,
+                           0x8c, 0xb0, 0x8e, 0x48, 0x59, 0x0d, 0xbb, 0x3d,
+                           0xa7, 0xb0, 0x8b, 0x10, 0x56, 0x82, 0x88, 0x38,
+                           0xc5, 0xf6, 0x1e, 0x63, 0x93, 0xba, 0x7a, 0x0a,
+                           0xbc, 0xc9, 0xf6, 0x62, 0x89, 0x80, 0x15, 0xad,
+                           0xb0, 0x94, 0xda, 0xc5, 0xd9, 0x34, 0x71, 0xbd,
+                           0xec, 0x1a, 0x50, 0x22, 0x70, 0xe3, 0xcc, 0x6c },
+               .ilen   = 80,
+               .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
+                           0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
+                           0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
+                           0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
+                           0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
+                           0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
+                           0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
+                           0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 },
+               .rlen   = 64,
+       }, {
+               .key    = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
+                           0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08,
+                           0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
+                           0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
+               .klen   = 32,
+               .iv     = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
+                           0xde, 0xca, 0xf8, 0x88 },
+               .input  = { 0x52, 0x2d, 0xc1, 0xf0, 0x99, 0x56, 0x7d, 0x07,
+                           0xf4, 0x7f, 0x37, 0xa3, 0x2a, 0x84, 0x42, 0x7d,
+                           0x64, 0x3a, 0x8c, 0xdc, 0xbf, 0xe5, 0xc0, 0xc9,
+                           0x75, 0x98, 0xa2, 0xbd, 0x25, 0x55, 0xd1, 0xaa,
+                           0x8c, 0xb0, 0x8e, 0x48, 0x59, 0x0d, 0xbb, 0x3d,
+                           0xa7, 0xb0, 0x8b, 0x10, 0x56, 0x82, 0x88, 0x38,
+                           0xc5, 0xf6, 0x1e, 0x63, 0x93, 0xba, 0x7a, 0x0a,
+                           0xbc, 0xc9, 0xf6, 0x62,
+                           0x76, 0xfc, 0x6e, 0xce, 0x0f, 0x4e, 0x17, 0x68,
+                           0xcd, 0xdf, 0x88, 0x53, 0xbb, 0x2d, 0x55, 0x1b },
+               .ilen   = 76,
+               .assoc  = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
+                           0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
+                           0xab, 0xad, 0xda, 0xd2 },
+               .alen   = 20,
+               .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
+                           0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
+                           0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
+                           0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
+                           0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
+                           0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
+                           0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
+                           0xba, 0x63, 0x7b, 0x39 },
+               .rlen   = 60,
+               .np     = 2,
+               .tap    = { 48, 28 },
+               .anp    = 3,
+               .atap   = { 8, 8, 4 }
+       }, {
+               .key    = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
+                           0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
+               .klen   = 16,
+               .iv     = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
+                           0xde, 0xca, 0xf8, 0x88 },
+               .input  = { 0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
+                           0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
+                           0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
+                           0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
+                           0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
+                           0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
+                           0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
+                           0x3d, 0x58, 0xe0, 0x91, 0x47, 0x3f, 0x59, 0x85,
+                           0x4d, 0x5c, 0x2a, 0xf3, 0x27, 0xcd, 0x64, 0xa6,
+                           0x2c, 0xf3, 0x5a, 0xbd, 0x2b, 0xa6, 0xfa, 0xb4 },
+               .ilen   = 80,
+               .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
+                           0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
+                           0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
+                           0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
+                           0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
+                           0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
+                           0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
+                           0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 },
+               .rlen   = 64,
+       }, {
+               .key    = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
+                           0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
+               .klen   = 16,
+               .iv     = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
+                           0xde, 0xca, 0xf8, 0x88 },
+               .input  = { 0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
+                           0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
+                           0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
+                           0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
+                           0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
+                           0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
+                           0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
+                           0x3d, 0x58, 0xe0, 0x91,
+                           0x5b, 0xc9, 0x4f, 0xbc, 0x32, 0x21, 0xa5, 0xdb,
+                           0x94, 0xfa, 0xe9, 0x5a, 0xe7, 0x12, 0x1a, 0x47 },
+               .ilen   = 76,
+               .assoc  = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
+                           0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
+                           0xab, 0xad, 0xda, 0xd2 },
+               .alen   = 20,
+               .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
+                           0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
+                           0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
+                           0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
+                           0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
+                           0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
+                           0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
+                           0xba, 0x63, 0x7b, 0x39 },
+               .rlen   = 60,
+       }, {
+               .klen   = 24,
+               .input  = { 0x98, 0xe7, 0x24, 0x7c, 0x07, 0xf0, 0xfe, 0x41,
+                           0x1c, 0x26, 0x7e, 0x43, 0x84, 0xb0, 0xf6, 0x00,
+                           0x2f, 0xf5, 0x8d, 0x80, 0x03, 0x39, 0x27, 0xab,
+                           0x8e, 0xf4, 0xd4, 0x58, 0x75, 0x14, 0xf0, 0xfb },
+               .ilen   = 32,
+               .rlen   = 16,
+       }, {
+               .key    = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
+                           0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08,
+                           0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c },
+               .klen   = 24,
+               .iv     = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
+                           0xde, 0xca, 0xf8, 0x88 },
+               .input  = { 0x39, 0x80, 0xca, 0x0b, 0x3c, 0x00, 0xe8, 0x41,
+                           0xeb, 0x06, 0xfa, 0xc4, 0x87, 0x2a, 0x27, 0x57,
+                           0x85, 0x9e, 0x1c, 0xea, 0xa6, 0xef, 0xd9, 0x84,
+                           0x62, 0x85, 0x93, 0xb4, 0x0c, 0xa1, 0xe1, 0x9c,
+                           0x7d, 0x77, 0x3d, 0x00, 0xc1, 0x44, 0xc5, 0x25,
+                           0xac, 0x61, 0x9d, 0x18, 0xc8, 0x4a, 0x3f, 0x47,
+                           0x18, 0xe2, 0x44, 0x8b, 0x2f, 0xe3, 0x24, 0xd9,
+                           0xcc, 0xda, 0x27, 0x10, 0xac, 0xad, 0xe2, 0x56,
+                           0x99, 0x24, 0xa7, 0xc8, 0x58, 0x73, 0x36, 0xbf,
+                           0xb1, 0x18, 0x02, 0x4d, 0xb8, 0x67, 0x4a, 0x14 },
+               .ilen   = 80,
+               .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
+                           0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
+                           0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
+                           0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
+                           0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
+                           0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
+                           0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
+                           0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 },
+               .rlen   = 64,
+       }, {
+               .key    = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
+                           0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08,
+                           0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c },
+               .klen   = 24,
+               .iv     = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
+                           0xde, 0xca, 0xf8, 0x88 },
+               .input  = { 0x39, 0x80, 0xca, 0x0b, 0x3c, 0x00, 0xe8, 0x41,
+                           0xeb, 0x06, 0xfa, 0xc4, 0x87, 0x2a, 0x27, 0x57,
+                           0x85, 0x9e, 0x1c, 0xea, 0xa6, 0xef, 0xd9, 0x84,
+                           0x62, 0x85, 0x93, 0xb4, 0x0c, 0xa1, 0xe1, 0x9c,
+                           0x7d, 0x77, 0x3d, 0x00, 0xc1, 0x44, 0xc5, 0x25,
+                           0xac, 0x61, 0x9d, 0x18, 0xc8, 0x4a, 0x3f, 0x47,
+                           0x18, 0xe2, 0x44, 0x8b, 0x2f, 0xe3, 0x24, 0xd9,
+                           0xcc, 0xda, 0x27, 0x10,
+                           0x25, 0x19, 0x49, 0x8e, 0x80, 0xf1, 0x47, 0x8f,
+                           0x37, 0xba, 0x55, 0xbd, 0x6d, 0x27, 0x61, 0x8c },
+               .ilen   = 76,
+               .assoc  = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
+                           0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
+                           0xab, 0xad, 0xda, 0xd2 },
+               .alen   = 20,
+               .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
+                           0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
+                           0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
+                           0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
+                           0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
+                           0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
+                           0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
+                           0xba, 0x63, 0x7b, 0x39 },
+               .rlen   = 60,
+       }
+};
+
+static struct aead_testvec aes_ccm_enc_tv_template[] = {
+       { /* From RFC 3610 */
+               .key    = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+                           0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0x00, 0x00, 0x03, 0x02, 0x01, 0x00,
+                           0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
+               .assoc  = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07 },
+               .alen   = 8,
+               .input  = { 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                           0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                           0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e },
+               .ilen   = 23,
+               .result = { 0x58, 0x8c, 0x97, 0x9a, 0x61, 0xc6, 0x63, 0xd2,
+                           0xf0, 0x66, 0xd0, 0xc2, 0xc0, 0xf9, 0x89, 0x80,
+                           0x6d, 0x5f, 0x6b, 0x61, 0xda, 0xc3, 0x84, 0x17,
+                           0xe8, 0xd1, 0x2c, 0xfd, 0xf9, 0x26, 0xe0 },
+               .rlen   = 31,
+       }, {
+               .key    = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+                           0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0x00, 0x00, 0x07, 0x06, 0x05, 0x04,
+                           0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
+               .assoc  = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b },
+               .alen   = 12,
+               .input  = { 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
+                           0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
+                           0x1c, 0x1d, 0x1e, 0x1f },
+               .ilen   = 20,
+               .result = { 0xdc, 0xf1, 0xfb, 0x7b, 0x5d, 0x9e, 0x23, 0xfb,
+                           0x9d, 0x4e, 0x13, 0x12, 0x53, 0x65, 0x8a, 0xd8,
+                           0x6e, 0xbd, 0xca, 0x3e, 0x51, 0xe8, 0x3f, 0x07,
+                           0x7d, 0x9c, 0x2d, 0x93 },
+               .rlen   = 28,
+       }, {
+               .key    = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+                           0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0x00, 0x00, 0x0b, 0x0a, 0x09, 0x08,
+                           0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
+               .assoc  = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07 },
+               .alen   = 8,
+               .input  = { 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                           0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                           0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
+                           0x20 },
+               .ilen   = 25,
+               .result = { 0x82, 0x53, 0x1a, 0x60, 0xcc, 0x24, 0x94, 0x5a,
+                           0x4b, 0x82, 0x79, 0x18, 0x1a, 0xb5, 0xc8, 0x4d,
+                           0xf2, 0x1c, 0xe7, 0xf9, 0xb7, 0x3f, 0x42, 0xe1,
+                           0x97, 0xea, 0x9c, 0x07, 0xe5, 0x6b, 0x5e, 0xb1,
+                           0x7e, 0x5f, 0x4e },
+               .rlen   = 35,
+       }, {
+               .key    = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+                           0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0x00, 0x00, 0x0c, 0x0b, 0x0a, 0x09,
+                           0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
+               .assoc  = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b },
+               .alen   = 12,
+               .input  = { 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
+                           0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
+                           0x1c, 0x1d, 0x1e },
+               .ilen   = 19,
+               .result = { 0x07, 0x34, 0x25, 0x94, 0x15, 0x77, 0x85, 0x15,
+                           0x2b, 0x07, 0x40, 0x98, 0x33, 0x0a, 0xbb, 0x14,
+                           0x1b, 0x94, 0x7b, 0x56, 0x6a, 0xa9, 0x40, 0x6b,
+                           0x4d, 0x99, 0x99, 0x88, 0xdd },
+               .rlen   = 29,
+       }, {
+               .key    = { 0xd7, 0x82, 0x8d, 0x13, 0xb2, 0xb0, 0xbd, 0xc3,
+                           0x25, 0xa7, 0x62, 0x36, 0xdf, 0x93, 0xcc, 0x6b },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0x33, 0x56, 0x8e, 0xf7, 0xb2, 0x63,
+                           0x3c, 0x96, 0x96, 0x76, 0x6c, 0xfa, 0x00, 0x00 },
+               .assoc  = { 0x63, 0x01, 0x8f, 0x76, 0xdc, 0x8a, 0x1b, 0xcb },
+               .alen   = 8,
+               .input  = { 0x90, 0x20, 0xea, 0x6f, 0x91, 0xbd, 0xd8, 0x5a,
+                           0xfa, 0x00, 0x39, 0xba, 0x4b, 0xaf, 0xf9, 0xbf,
+                           0xb7, 0x9c, 0x70, 0x28, 0x94, 0x9c, 0xd0, 0xec },
+               .ilen   = 24,
+               .result = { 0x4c, 0xcb, 0x1e, 0x7c, 0xa9, 0x81, 0xbe, 0xfa,
+                           0xa0, 0x72, 0x6c, 0x55, 0xd3, 0x78, 0x06, 0x12,
+                           0x98, 0xc8, 0x5c, 0x92, 0x81, 0x4a, 0xbc, 0x33,
+                           0xc5, 0x2e, 0xe8, 0x1d, 0x7d, 0x77, 0xc0, 0x8a },
+               .rlen   = 32,
+       }, {
+               .key    = { 0xd7, 0x82, 0x8d, 0x13, 0xb2, 0xb0, 0xbd, 0xc3,
+                           0x25, 0xa7, 0x62, 0x36, 0xdf, 0x93, 0xcc, 0x6b },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0xd5, 0x60, 0x91, 0x2d, 0x3f, 0x70,
+                           0x3c, 0x96, 0x96, 0x76, 0x6c, 0xfa, 0x00, 0x00 },
+               .assoc  = { 0xcd, 0x90, 0x44, 0xd2, 0xb7, 0x1f, 0xdb, 0x81,
+                           0x20, 0xea, 0x60, 0xc0 },
+               .alen   = 12,
+               .input  = { 0x64, 0x35, 0xac, 0xba, 0xfb, 0x11, 0xa8, 0x2e,
+                           0x2f, 0x07, 0x1d, 0x7c, 0xa4, 0xa5, 0xeb, 0xd9,
+                           0x3a, 0x80, 0x3b, 0xa8, 0x7f },
+               .ilen   = 21,
+               .result = { 0x00, 0x97, 0x69, 0xec, 0xab, 0xdf, 0x48, 0x62,
+                           0x55, 0x94, 0xc5, 0x92, 0x51, 0xe6, 0x03, 0x57,
+                           0x22, 0x67, 0x5e, 0x04, 0xc8, 0x47, 0x09, 0x9e,
+                           0x5a, 0xe0, 0x70, 0x45, 0x51 },
+               .rlen   = 29,
+       }, {
+               .key    = { 0xd7, 0x82, 0x8d, 0x13, 0xb2, 0xb0, 0xbd, 0xc3,
+                           0x25, 0xa7, 0x62, 0x36, 0xdf, 0x93, 0xcc, 0x6b },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0x42, 0xff, 0xf8, 0xf1, 0x95, 0x1c,
+                           0x3c, 0x96, 0x96, 0x76, 0x6c, 0xfa, 0x00, 0x00 },
+               .assoc  = { 0xd8, 0x5b, 0xc7, 0xe6, 0x9f, 0x94, 0x4f, 0xb8 },
+               .alen   = 8,
+               .input  = { 0x8a, 0x19, 0xb9, 0x50, 0xbc, 0xf7, 0x1a, 0x01,
+                           0x8e, 0x5e, 0x67, 0x01, 0xc9, 0x17, 0x87, 0x65,
+                           0x98, 0x09, 0xd6, 0x7d, 0xbe, 0xdd, 0x18 },
+               .ilen   = 23,
+               .result = { 0xbc, 0x21, 0x8d, 0xaa, 0x94, 0x74, 0x27, 0xb6,
+                           0xdb, 0x38, 0x6a, 0x99, 0xac, 0x1a, 0xef, 0x23,
+                           0xad, 0xe0, 0xb5, 0x29, 0x39, 0xcb, 0x6a, 0x63,
+                           0x7c, 0xf9, 0xbe, 0xc2, 0x40, 0x88, 0x97, 0xc6,
+                           0xba },
+               .rlen   = 33,
+       },
+};
+
+static struct aead_testvec aes_ccm_dec_tv_template[] = {
+       { /* From RFC 3610 */
+               .key    = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+                           0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0x00, 0x00, 0x03, 0x02, 0x01, 0x00,
+                           0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
+               .assoc  = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07 },
+               .alen   = 8,
+               .input  = { 0x58, 0x8c, 0x97, 0x9a, 0x61, 0xc6, 0x63, 0xd2,
+                           0xf0, 0x66, 0xd0, 0xc2, 0xc0, 0xf9, 0x89, 0x80,
+                           0x6d, 0x5f, 0x6b, 0x61, 0xda, 0xc3, 0x84, 0x17,
+                           0xe8, 0xd1, 0x2c, 0xfd, 0xf9, 0x26, 0xe0 },
+               .ilen   = 31,
+               .result = { 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                           0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                           0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e },
+               .rlen   = 23,
+       }, {
+               .key    = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+                           0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0x00, 0x00, 0x07, 0x06, 0x05, 0x04,
+                           0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
+               .assoc  = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b },
+               .alen   = 12,
+               .input  = { 0xdc, 0xf1, 0xfb, 0x7b, 0x5d, 0x9e, 0x23, 0xfb,
+                           0x9d, 0x4e, 0x13, 0x12, 0x53, 0x65, 0x8a, 0xd8,
+                           0x6e, 0xbd, 0xca, 0x3e, 0x51, 0xe8, 0x3f, 0x07,
+                           0x7d, 0x9c, 0x2d, 0x93 },
+               .ilen   = 28,
+               .result = { 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
+                           0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
+                           0x1c, 0x1d, 0x1e, 0x1f },
+               .rlen   = 20,
+       }, {
+               .key    = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+                           0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0x00, 0x00, 0x0b, 0x0a, 0x09, 0x08,
+                           0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
+               .assoc  = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07 },
+               .alen   = 8,
+               .input  = { 0x82, 0x53, 0x1a, 0x60, 0xcc, 0x24, 0x94, 0x5a,
+                           0x4b, 0x82, 0x79, 0x18, 0x1a, 0xb5, 0xc8, 0x4d,
+                           0xf2, 0x1c, 0xe7, 0xf9, 0xb7, 0x3f, 0x42, 0xe1,
+                           0x97, 0xea, 0x9c, 0x07, 0xe5, 0x6b, 0x5e, 0xb1,
+                           0x7e, 0x5f, 0x4e },
+               .ilen   = 35,
+               .result = { 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                           0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                           0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
+                           0x20 },
+               .rlen   = 25,
+       }, {
+               .key    = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+                           0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0x00, 0x00, 0x0c, 0x0b, 0x0a, 0x09,
+                           0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
+               .assoc  = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0a, 0x0b },
+               .alen   = 12,
+               .input  = { 0x07, 0x34, 0x25, 0x94, 0x15, 0x77, 0x85, 0x15,
+                           0x2b, 0x07, 0x40, 0x98, 0x33, 0x0a, 0xbb, 0x14,
+                           0x1b, 0x94, 0x7b, 0x56, 0x6a, 0xa9, 0x40, 0x6b,
+                           0x4d, 0x99, 0x99, 0x88, 0xdd },
+               .ilen   = 29,
+               .result = { 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
+                           0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
+                           0x1c, 0x1d, 0x1e },
+               .rlen   = 19,
+       }, {
+               .key    = { 0xd7, 0x82, 0x8d, 0x13, 0xb2, 0xb0, 0xbd, 0xc3,
+                           0x25, 0xa7, 0x62, 0x36, 0xdf, 0x93, 0xcc, 0x6b },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0x33, 0x56, 0x8e, 0xf7, 0xb2, 0x63,
+                           0x3c, 0x96, 0x96, 0x76, 0x6c, 0xfa, 0x00, 0x00 },
+               .assoc  = { 0x63, 0x01, 0x8f, 0x76, 0xdc, 0x8a, 0x1b, 0xcb },
+               .alen   = 8,
+               .input  = { 0x4c, 0xcb, 0x1e, 0x7c, 0xa9, 0x81, 0xbe, 0xfa,
+                           0xa0, 0x72, 0x6c, 0x55, 0xd3, 0x78, 0x06, 0x12,
+                           0x98, 0xc8, 0x5c, 0x92, 0x81, 0x4a, 0xbc, 0x33,
+                           0xc5, 0x2e, 0xe8, 0x1d, 0x7d, 0x77, 0xc0, 0x8a },
+               .ilen   = 32,
+               .result = { 0x90, 0x20, 0xea, 0x6f, 0x91, 0xbd, 0xd8, 0x5a,
+                           0xfa, 0x00, 0x39, 0xba, 0x4b, 0xaf, 0xf9, 0xbf,
+                           0xb7, 0x9c, 0x70, 0x28, 0x94, 0x9c, 0xd0, 0xec },
+               .rlen   = 24,
+       }, {
+               .key    = { 0xd7, 0x82, 0x8d, 0x13, 0xb2, 0xb0, 0xbd, 0xc3,
+                           0x25, 0xa7, 0x62, 0x36, 0xdf, 0x93, 0xcc, 0x6b },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0xd5, 0x60, 0x91, 0x2d, 0x3f, 0x70,
+                           0x3c, 0x96, 0x96, 0x76, 0x6c, 0xfa, 0x00, 0x00 },
+               .assoc  = { 0xcd, 0x90, 0x44, 0xd2, 0xb7, 0x1f, 0xdb, 0x81,
+                           0x20, 0xea, 0x60, 0xc0 },
+               .alen   = 12,
+               .input  = { 0x00, 0x97, 0x69, 0xec, 0xab, 0xdf, 0x48, 0x62,
+                           0x55, 0x94, 0xc5, 0x92, 0x51, 0xe6, 0x03, 0x57,
+                           0x22, 0x67, 0x5e, 0x04, 0xc8, 0x47, 0x09, 0x9e,
+                           0x5a, 0xe0, 0x70, 0x45, 0x51 },
+               .ilen   = 29,
+               .result = { 0x64, 0x35, 0xac, 0xba, 0xfb, 0x11, 0xa8, 0x2e,
+                           0x2f, 0x07, 0x1d, 0x7c, 0xa4, 0xa5, 0xeb, 0xd9,
+                           0x3a, 0x80, 0x3b, 0xa8, 0x7f },
+               .rlen   = 21,
+       }, {
+               .key    = { 0xd7, 0x82, 0x8d, 0x13, 0xb2, 0xb0, 0xbd, 0xc3,
+                           0x25, 0xa7, 0x62, 0x36, 0xdf, 0x93, 0xcc, 0x6b },
+               .klen   = 16,
+               .iv     = { 0x01, 0x00, 0x42, 0xff, 0xf8, 0xf1, 0x95, 0x1c,
+                           0x3c, 0x96, 0x96, 0x76, 0x6c, 0xfa, 0x00, 0x00 },
+               .assoc  = { 0xd8, 0x5b, 0xc7, 0xe6, 0x9f, 0x94, 0x4f, 0xb8 },
+               .alen   = 8,
+               .input  = { 0xbc, 0x21, 0x8d, 0xaa, 0x94, 0x74, 0x27, 0xb6,
+                           0xdb, 0x38, 0x6a, 0x99, 0xac, 0x1a, 0xef, 0x23,
+                           0xad, 0xe0, 0xb5, 0x29, 0x39, 0xcb, 0x6a, 0x63,
+                           0x7c, 0xf9, 0xbe, 0xc2, 0x40, 0x88, 0x97, 0xc6,
+                           0xba },
+               .ilen   = 33,
+               .result = { 0x8a, 0x19, 0xb9, 0x50, 0xbc, 0xf7, 0x1a, 0x01,
+                           0x8e, 0x5e, 0x67, 0x01, 0xc9, 0x17, 0x87, 0x65,
+                           0x98, 0x09, 0xd6, 0x7d, 0xbe, 0xdd, 0x18 },
+               .rlen   = 23,
+       },
+};
+
 /* Cast5 test vectors from RFC 2144 */
 #define CAST5_ENC_TEST_VECTORS 3
 #define CAST5_DEC_TEST_VECTORS 3
@@ -4317,6 +6425,1211 @@ static struct cipher_testvec seed_dec_tv_template[] = {
        }
 };
 
+#define SALSA20_STREAM_ENC_TEST_VECTORS 5
+static struct cipher_testvec salsa20_stream_enc_tv_template[] = {
+       /*
+       * Testvectors from verified.test-vectors submitted to ECRYPT.
+       * They are truncated to size 39, 64, 111, 129 to test a variety
+       * of input length.
+       */
+       { /* Set 3, vector 0 */
+               .key    = {
+                           0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                           0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F
+                         },
+               .klen   = 16,
+               .iv     = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+               .input  = {
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                         },
+               .ilen   = 39,
+               .result = {
+                           0x2D, 0xD5, 0xC3, 0xF7, 0xBA, 0x2B, 0x20, 0xF7,
+                            0x68, 0x02, 0x41, 0x0C, 0x68, 0x86, 0x88, 0x89,
+                            0x5A, 0xD8, 0xC1, 0xBD, 0x4E, 0xA6, 0xC9, 0xB1,
+                            0x40, 0xFB, 0x9B, 0x90, 0xE2, 0x10, 0x49, 0xBF,
+                            0x58, 0x3F, 0x52, 0x79, 0x70, 0xEB, 0xC1,
+                       },
+               .rlen   = 39,
+       }, { /* Set 5, vector 0 */
+               .key    = {
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
+                         },
+               .klen   = 16,
+               .iv     = { 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+               .input  = {
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                         },
+               .ilen   = 64,
+               .result = {
+                           0xB6, 0x6C, 0x1E, 0x44, 0x46, 0xDD, 0x95, 0x57,
+                            0xE5, 0x78, 0xE2, 0x23, 0xB0, 0xB7, 0x68, 0x01,
+                            0x7B, 0x23, 0xB2, 0x67, 0xBB, 0x02, 0x34, 0xAE,
+                            0x46, 0x26, 0xBF, 0x44, 0x3F, 0x21, 0x97, 0x76,
+                            0x43, 0x6F, 0xB1, 0x9F, 0xD0, 0xE8, 0x86, 0x6F,
+                            0xCD, 0x0D, 0xE9, 0xA9, 0x53, 0x8F, 0x4A, 0x09,
+                            0xCA, 0x9A, 0xC0, 0x73, 0x2E, 0x30, 0xBC, 0xF9,
+                            0x8E, 0x4F, 0x13, 0xE4, 0xB9, 0xE2, 0x01, 0xD9,
+                         },
+               .rlen   = 64,
+       }, { /* Set 3, vector 27 */
+               .key    = {
+                           0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20, 0x21, 0x22,
+                           0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A,
+                            0x2B, 0x2C, 0x2D, 0x2E, 0x2F, 0x30, 0x31, 0x32,
+                           0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A
+                         },
+               .klen   = 32,
+               .iv     = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+               .input  = {
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                         },
+               .ilen   = 111,
+               .result = {
+                           0xAE, 0x39, 0x50, 0x8E, 0xAC, 0x9A, 0xEC, 0xE7,
+                            0xBF, 0x97, 0xBB, 0x20, 0xB9, 0xDE, 0xE4, 0x1F,
+                            0x87, 0xD9, 0x47, 0xF8, 0x28, 0x91, 0x35, 0x98,
+                            0xDB, 0x72, 0xCC, 0x23, 0x29, 0x48, 0x56, 0x5E,
+                            0x83, 0x7E, 0x0B, 0xF3, 0x7D, 0x5D, 0x38, 0x7B,
+                            0x2D, 0x71, 0x02, 0xB4, 0x3B, 0xB5, 0xD8, 0x23,
+                            0xB0, 0x4A, 0xDF, 0x3C, 0xEC, 0xB6, 0xD9, 0x3B,
+                            0x9B, 0xA7, 0x52, 0xBE, 0xC5, 0xD4, 0x50, 0x59,
+
+                            0x15, 0x14, 0xB4, 0x0E, 0x40, 0xE6, 0x53, 0xD1,
+                            0x83, 0x9C, 0x5B, 0xA0, 0x92, 0x29, 0x6B, 0x5E,
+                            0x96, 0x5B, 0x1E, 0x2F, 0xD3, 0xAC, 0xC1, 0x92,
+                            0xB1, 0x41, 0x3F, 0x19, 0x2F, 0xC4, 0x3B, 0xC6,
+                            0x95, 0x46, 0x45, 0x54, 0xE9, 0x75, 0x03, 0x08,
+                            0x44, 0xAF, 0xE5, 0x8A, 0x81, 0x12, 0x09,
+                         },
+               .rlen   = 111,
+
+       }, { /* Set 5, vector 27 */
+               .key    = {
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
+                         },
+               .klen   = 32,
+               .iv     = { 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00 },
+               .input  = {
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                           0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+
+                           0x00,
+                         },
+               .ilen   = 129,
+               .result = {
+                           0xD2, 0xDB, 0x1A, 0x5C, 0xF1, 0xC1, 0xAC, 0xDB,
+                            0xE8, 0x1A, 0x7A, 0x43, 0x40, 0xEF, 0x53, 0x43,
+                            0x5E, 0x7F, 0x4B, 0x1A, 0x50, 0x52, 0x3F, 0x8D,
+                            0x28, 0x3D, 0xCF, 0x85, 0x1D, 0x69, 0x6E, 0x60,
+                            0xF2, 0xDE, 0x74, 0x56, 0x18, 0x1B, 0x84, 0x10,
+                            0xD4, 0x62, 0xBA, 0x60, 0x50, 0xF0, 0x61, 0xF2,
+                            0x1C, 0x78, 0x7F, 0xC1, 0x24, 0x34, 0xAF, 0x58,
+                            0xBF, 0x2C, 0x59, 0xCA, 0x90, 0x77, 0xF3, 0xB0,
+
+                            0x5B, 0x4A, 0xDF, 0x89, 0xCE, 0x2C, 0x2F, 0xFC,
+                            0x67, 0xF0, 0xE3, 0x45, 0xE8, 0xB3, 0xB3, 0x75,
+                            0xA0, 0x95, 0x71, 0xA1, 0x29, 0x39, 0x94, 0xCA,
+                            0x45, 0x2F, 0xBD, 0xCB, 0x10, 0xB6, 0xBE, 0x9F,
+                            0x8E, 0xF9, 0xB2, 0x01, 0x0A, 0x5A, 0x0A, 0xB7,
+                            0x6B, 0x9D, 0x70, 0x8E, 0x4B, 0xD6, 0x2F, 0xCD,
+                            0x2E, 0x40, 0x48, 0x75, 0xE9, 0xE2, 0x21, 0x45,
+                            0x0B, 0xC9, 0xB6, 0xB5, 0x66, 0xBC, 0x9A, 0x59,
+
+                            0x5A,
+                         },
+               .rlen   = 129,
+       }, { /* large test vector generated using Crypto++ */
+               .key = {
+                       0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                       0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                       0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                       0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
+               },
+               .klen = 32,
+               .iv = {
+                       0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                       0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+               },
+               .input = {
+                       0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+                       0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+                       0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+                       0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
+                       0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
+                       0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
+                       0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
+                       0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
+                       0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
+                       0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
+                       0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
+                       0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
+                       0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67,
+                       0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f,
+                       0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77,
+                       0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
+                       0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+                       0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
+                       0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
+                       0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
+                       0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
+                       0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf,
+                       0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7,
+                       0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
+                       0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+                       0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf,
+                       0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
+                       0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf,
+                       0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7,
+                       0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef,
+                       0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7,
+                       0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff,
+                       0x00, 0x03, 0x06, 0x09, 0x0c, 0x0f, 0x12, 0x15,
+                       0x18, 0x1b, 0x1e, 0x21, 0x24, 0x27, 0x2a, 0x2d,
+                       0x30, 0x33, 0x36, 0x39, 0x3c, 0x3f, 0x42, 0x45,
+                       0x48, 0x4b, 0x4e, 0x51, 0x54, 0x57, 0x5a, 0x5d,
+                       0x60, 0x63, 0x66, 0x69, 0x6c, 0x6f, 0x72, 0x75,
+                       0x78, 0x7b, 0x7e, 0x81, 0x84, 0x87, 0x8a, 0x8d,
+                       0x90, 0x93, 0x96, 0x99, 0x9c, 0x9f, 0xa2, 0xa5,
+                       0xa8, 0xab, 0xae, 0xb1, 0xb4, 0xb7, 0xba, 0xbd,
+                       0xc0, 0xc3, 0xc6, 0xc9, 0xcc, 0xcf, 0xd2, 0xd5,
+                       0xd8, 0xdb, 0xde, 0xe1, 0xe4, 0xe7, 0xea, 0xed,
+                       0xf0, 0xf3, 0xf6, 0xf9, 0xfc, 0xff, 0x02, 0x05,
+                       0x08, 0x0b, 0x0e, 0x11, 0x14, 0x17, 0x1a, 0x1d,
+                       0x20, 0x23, 0x26, 0x29, 0x2c, 0x2f, 0x32, 0x35,
+                       0x38, 0x3b, 0x3e, 0x41, 0x44, 0x47, 0x4a, 0x4d,
+                       0x50, 0x53, 0x56, 0x59, 0x5c, 0x5f, 0x62, 0x65,
+                       0x68, 0x6b, 0x6e, 0x71, 0x74, 0x77, 0x7a, 0x7d,
+                       0x80, 0x83, 0x86, 0x89, 0x8c, 0x8f, 0x92, 0x95,
+                       0x98, 0x9b, 0x9e, 0xa1, 0xa4, 0xa7, 0xaa, 0xad,
+                       0xb0, 0xb3, 0xb6, 0xb9, 0xbc, 0xbf, 0xc2, 0xc5,
+                       0xc8, 0xcb, 0xce, 0xd1, 0xd4, 0xd7, 0xda, 0xdd,
+                       0xe0, 0xe3, 0xe6, 0xe9, 0xec, 0xef, 0xf2, 0xf5,
+                       0xf8, 0xfb, 0xfe, 0x01, 0x04, 0x07, 0x0a, 0x0d,
+                       0x10, 0x13, 0x16, 0x19, 0x1c, 0x1f, 0x22, 0x25,
+                       0x28, 0x2b, 0x2e, 0x31, 0x34, 0x37, 0x3a, 0x3d,
+                       0x40, 0x43, 0x46, 0x49, 0x4c, 0x4f, 0x52, 0x55,
+                       0x58, 0x5b, 0x5e, 0x61, 0x64, 0x67, 0x6a, 0x6d,
+                       0x70, 0x73, 0x76, 0x79, 0x7c, 0x7f, 0x82, 0x85,
+                       0x88, 0x8b, 0x8e, 0x91, 0x94, 0x97, 0x9a, 0x9d,
+                       0xa0, 0xa3, 0xa6, 0xa9, 0xac, 0xaf, 0xb2, 0xb5,
+                       0xb8, 0xbb, 0xbe, 0xc1, 0xc4, 0xc7, 0xca, 0xcd,
+                       0xd0, 0xd3, 0xd6, 0xd9, 0xdc, 0xdf, 0xe2, 0xe5,
+                       0xe8, 0xeb, 0xee, 0xf1, 0xf4, 0xf7, 0xfa, 0xfd,
+                       0x00, 0x05, 0x0a, 0x0f, 0x14, 0x19, 0x1e, 0x23,
+                       0x28, 0x2d, 0x32, 0x37, 0x3c, 0x41, 0x46, 0x4b,
+                       0x50, 0x55, 0x5a, 0x5f, 0x64, 0x69, 0x6e, 0x73,
+                       0x78, 0x7d, 0x82, 0x87, 0x8c, 0x91, 0x96, 0x9b,
+                       0xa0, 0xa5, 0xaa, 0xaf, 0xb4, 0xb9, 0xbe, 0xc3,
+                       0xc8, 0xcd, 0xd2, 0xd7, 0xdc, 0xe1, 0xe6, 0xeb,
+                       0xf0, 0xf5, 0xfa, 0xff, 0x04, 0x09, 0x0e, 0x13,
+                       0x18, 0x1d, 0x22, 0x27, 0x2c, 0x31, 0x36, 0x3b,
+                       0x40, 0x45, 0x4a, 0x4f, 0x54, 0x59, 0x5e, 0x63,
+                       0x68, 0x6d, 0x72, 0x77, 0x7c, 0x81, 0x86, 0x8b,
+                       0x90, 0x95, 0x9a, 0x9f, 0xa4, 0xa9, 0xae, 0xb3,
+                       0xb8, 0xbd, 0xc2, 0xc7, 0xcc, 0xd1, 0xd6, 0xdb,
+                       0xe0, 0xe5, 0xea, 0xef, 0xf4, 0xf9, 0xfe, 0x03,
+                       0x08, 0x0d, 0x12, 0x17, 0x1c, 0x21, 0x26, 0x2b,
+                       0x30, 0x35, 0x3a, 0x3f, 0x44, 0x49, 0x4e, 0x53,
+                       0x58, 0x5d, 0x62, 0x67, 0x6c, 0x71, 0x76, 0x7b,
+                       0x80, 0x85, 0x8a, 0x8f, 0x94, 0x99, 0x9e, 0xa3,
+                       0xa8, 0xad, 0xb2, 0xb7, 0xbc, 0xc1, 0xc6, 0xcb,
+                       0xd0, 0xd5, 0xda, 0xdf, 0xe4, 0xe9, 0xee, 0xf3,
+                       0xf8, 0xfd, 0x02, 0x07, 0x0c, 0x11, 0x16, 0x1b,
+                       0x20, 0x25, 0x2a, 0x2f, 0x34, 0x39, 0x3e, 0x43,
+                       0x48, 0x4d, 0x52, 0x57, 0x5c, 0x61, 0x66, 0x6b,
+                       0x70, 0x75, 0x7a, 0x7f, 0x84, 0x89, 0x8e, 0x93,
+                       0x98, 0x9d, 0xa2, 0xa7, 0xac, 0xb1, 0xb6, 0xbb,
+                       0xc0, 0xc5, 0xca, 0xcf, 0xd4, 0xd9, 0xde, 0xe3,
+                       0xe8, 0xed, 0xf2, 0xf7, 0xfc, 0x01, 0x06, 0x0b,
+                       0x10, 0x15, 0x1a, 0x1f, 0x24, 0x29, 0x2e, 0x33,
+                       0x38, 0x3d, 0x42, 0x47, 0x4c, 0x51, 0x56, 0x5b,
+                       0x60, 0x65, 0x6a, 0x6f, 0x74, 0x79, 0x7e, 0x83,
+                       0x88, 0x8d, 0x92, 0x97, 0x9c, 0xa1, 0xa6, 0xab,
+                       0xb0, 0xb5, 0xba, 0xbf, 0xc4, 0xc9, 0xce, 0xd3,
+                       0xd8, 0xdd, 0xe2, 0xe7, 0xec, 0xf1, 0xf6, 0xfb,
+                       0x00, 0x07, 0x0e, 0x15, 0x1c, 0x23, 0x2a, 0x31,
+                       0x38, 0x3f, 0x46, 0x4d, 0x54, 0x5b, 0x62, 0x69,
+                       0x70, 0x77, 0x7e, 0x85, 0x8c, 0x93, 0x9a, 0xa1,
+                       0xa8, 0xaf, 0xb6, 0xbd, 0xc4, 0xcb, 0xd2, 0xd9,
+                       0xe0, 0xe7, 0xee, 0xf5, 0xfc, 0x03, 0x0a, 0x11,
+                       0x18, 0x1f, 0x26, 0x2d, 0x34, 0x3b, 0x42, 0x49,
+                       0x50, 0x57, 0x5e, 0x65, 0x6c, 0x73, 0x7a, 0x81,
+                       0x88, 0x8f, 0x96, 0x9d, 0xa4, 0xab, 0xb2, 0xb9,
+                       0xc0, 0xc7, 0xce, 0xd5, 0xdc, 0xe3, 0xea, 0xf1,
+                       0xf8, 0xff, 0x06, 0x0d, 0x14, 0x1b, 0x22, 0x29,
+                       0x30, 0x37, 0x3e, 0x45, 0x4c, 0x53, 0x5a, 0x61,
+                       0x68, 0x6f, 0x76, 0x7d, 0x84, 0x8b, 0x92, 0x99,
+                       0xa0, 0xa7, 0xae, 0xb5, 0xbc, 0xc3, 0xca, 0xd1,
+                       0xd8, 0xdf, 0xe6, 0xed, 0xf4, 0xfb, 0x02, 0x09,
+                       0x10, 0x17, 0x1e, 0x25, 0x2c, 0x33, 0x3a, 0x41,
+                       0x48, 0x4f, 0x56, 0x5d, 0x64, 0x6b, 0x72, 0x79,
+                       0x80, 0x87, 0x8e, 0x95, 0x9c, 0xa3, 0xaa, 0xb1,
+                       0xb8, 0xbf, 0xc6, 0xcd, 0xd4, 0xdb, 0xe2, 0xe9,
+                       0xf0, 0xf7, 0xfe, 0x05, 0x0c, 0x13, 0x1a, 0x21,
+                       0x28, 0x2f, 0x36, 0x3d, 0x44, 0x4b, 0x52, 0x59,
+                       0x60, 0x67, 0x6e, 0x75, 0x7c, 0x83, 0x8a, 0x91,
+                       0x98, 0x9f, 0xa6, 0xad, 0xb4, 0xbb, 0xc2, 0xc9,
+                       0xd0, 0xd7, 0xde, 0xe5, 0xec, 0xf3, 0xfa, 0x01,
+                       0x08, 0x0f, 0x16, 0x1d, 0x24, 0x2b, 0x32, 0x39,
+                       0x40, 0x47, 0x4e, 0x55, 0x5c, 0x63, 0x6a, 0x71,
+                       0x78, 0x7f, 0x86, 0x8d, 0x94, 0x9b, 0xa2, 0xa9,
+                       0xb0, 0xb7, 0xbe, 0xc5, 0xcc, 0xd3, 0xda, 0xe1,
+                       0xe8, 0xef, 0xf6, 0xfd, 0x04, 0x0b, 0x12, 0x19,
+                       0x20, 0x27, 0x2e, 0x35, 0x3c, 0x43, 0x4a, 0x51,
+                       0x58, 0x5f, 0x66, 0x6d, 0x74, 0x7b, 0x82, 0x89,
+                       0x90, 0x97, 0x9e, 0xa5, 0xac, 0xb3, 0xba, 0xc1,
+                       0xc8, 0xcf, 0xd6, 0xdd, 0xe4, 0xeb, 0xf2, 0xf9,
+                       0x00, 0x09, 0x12, 0x1b, 0x24, 0x2d, 0x36, 0x3f,
+                       0x48, 0x51, 0x5a, 0x63, 0x6c, 0x75, 0x7e, 0x87,
+                       0x90, 0x99, 0xa2, 0xab, 0xb4, 0xbd, 0xc6, 0xcf,
+                       0xd8, 0xe1, 0xea, 0xf3, 0xfc, 0x05, 0x0e, 0x17,
+                       0x20, 0x29, 0x32, 0x3b, 0x44, 0x4d, 0x56, 0x5f,
+                       0x68, 0x71, 0x7a, 0x83, 0x8c, 0x95, 0x9e, 0xa7,
+                       0xb0, 0xb9, 0xc2, 0xcb, 0xd4, 0xdd, 0xe6, 0xef,
+                       0xf8, 0x01, 0x0a, 0x13, 0x1c, 0x25, 0x2e, 0x37,
+                       0x40, 0x49, 0x52, 0x5b, 0x64, 0x6d, 0x76, 0x7f,
+                       0x88, 0x91, 0x9a, 0xa3, 0xac, 0xb5, 0xbe, 0xc7,
+                       0xd0, 0xd9, 0xe2, 0xeb, 0xf4, 0xfd, 0x06, 0x0f,
+                       0x18, 0x21, 0x2a, 0x33, 0x3c, 0x45, 0x4e, 0x57,
+                       0x60, 0x69, 0x72, 0x7b, 0x84, 0x8d, 0x96, 0x9f,
+                       0xa8, 0xb1, 0xba, 0xc3, 0xcc, 0xd5, 0xde, 0xe7,
+                       0xf0, 0xf9, 0x02, 0x0b, 0x14, 0x1d, 0x26, 0x2f,
+                       0x38, 0x41, 0x4a, 0x53, 0x5c, 0x65, 0x6e, 0x77,
+                       0x80, 0x89, 0x92, 0x9b, 0xa4, 0xad, 0xb6, 0xbf,
+                       0xc8, 0xd1, 0xda, 0xe3, 0xec, 0xf5, 0xfe, 0x07,
+                       0x10, 0x19, 0x22, 0x2b, 0x34, 0x3d, 0x46, 0x4f,
+                       0x58, 0x61, 0x6a, 0x73, 0x7c, 0x85, 0x8e, 0x97,
+                       0xa0, 0xa9, 0xb2, 0xbb, 0xc4, 0xcd, 0xd6, 0xdf,
+                       0xe8, 0xf1, 0xfa, 0x03, 0x0c, 0x15, 0x1e, 0x27,
+                       0x30, 0x39, 0x42, 0x4b, 0x54, 0x5d, 0x66, 0x6f,
+                       0x78, 0x81, 0x8a, 0x93, 0x9c, 0xa5, 0xae, 0xb7,
+                       0xc0, 0xc9, 0xd2, 0xdb, 0xe4, 0xed, 0xf6, 0xff,
+                       0x08, 0x11, 0x1a, 0x23, 0x2c, 0x35, 0x3e, 0x47,
+                       0x50, 0x59, 0x62, 0x6b, 0x74, 0x7d, 0x86, 0x8f,
+                       0x98, 0xa1, 0xaa, 0xb3, 0xbc, 0xc5, 0xce, 0xd7,
+                       0xe0, 0xe9, 0xf2, 0xfb, 0x04, 0x0d, 0x16, 0x1f,
+                       0x28, 0x31, 0x3a, 0x43, 0x4c, 0x55, 0x5e, 0x67,
+                       0x70, 0x79, 0x82, 0x8b, 0x94, 0x9d, 0xa6, 0xaf,
+                       0xb8, 0xc1, 0xca, 0xd3, 0xdc, 0xe5, 0xee, 0xf7,
+                       0x00, 0x0b, 0x16, 0x21, 0x2c, 0x37, 0x42, 0x4d,
+                       0x58, 0x63, 0x6e, 0x79, 0x84, 0x8f, 0x9a, 0xa5,
+                       0xb0, 0xbb, 0xc6, 0xd1, 0xdc, 0xe7, 0xf2, 0xfd,
+                       0x08, 0x13, 0x1e, 0x29, 0x34, 0x3f, 0x4a, 0x55,
+                       0x60, 0x6b, 0x76, 0x81, 0x8c, 0x97, 0xa2, 0xad,
+                       0xb8, 0xc3, 0xce, 0xd9, 0xe4, 0xef, 0xfa, 0x05,
+                       0x10, 0x1b, 0x26, 0x31, 0x3c, 0x47, 0x52, 0x5d,
+                       0x68, 0x73, 0x7e, 0x89, 0x94, 0x9f, 0xaa, 0xb5,
+                       0xc0, 0xcb, 0xd6, 0xe1, 0xec, 0xf7, 0x02, 0x0d,
+                       0x18, 0x23, 0x2e, 0x39, 0x44, 0x4f, 0x5a, 0x65,
+                       0x70, 0x7b, 0x86, 0x91, 0x9c, 0xa7, 0xb2, 0xbd,
+                       0xc8, 0xd3, 0xde, 0xe9, 0xf4, 0xff, 0x0a, 0x15,
+                       0x20, 0x2b, 0x36, 0x41, 0x4c, 0x57, 0x62, 0x6d,
+                       0x78, 0x83, 0x8e, 0x99, 0xa4, 0xaf, 0xba, 0xc5,
+                       0xd0, 0xdb, 0xe6, 0xf1, 0xfc, 0x07, 0x12, 0x1d,
+                       0x28, 0x33, 0x3e, 0x49, 0x54, 0x5f, 0x6a, 0x75,
+                       0x80, 0x8b, 0x96, 0xa1, 0xac, 0xb7, 0xc2, 0xcd,
+                       0xd8, 0xe3, 0xee, 0xf9, 0x04, 0x0f, 0x1a, 0x25,
+                       0x30, 0x3b, 0x46, 0x51, 0x5c, 0x67, 0x72, 0x7d,
+                       0x88, 0x93, 0x9e, 0xa9, 0xb4, 0xbf, 0xca, 0xd5,
+                       0xe0, 0xeb, 0xf6, 0x01, 0x0c, 0x17, 0x22, 0x2d,
+                       0x38, 0x43, 0x4e, 0x59, 0x64, 0x6f, 0x7a, 0x85,
+                       0x90, 0x9b, 0xa6, 0xb1, 0xbc, 0xc7, 0xd2, 0xdd,
+                       0xe8, 0xf3, 0xfe, 0x09, 0x14, 0x1f, 0x2a, 0x35,
+                       0x40, 0x4b, 0x56, 0x61, 0x6c, 0x77, 0x82, 0x8d,
+                       0x98, 0xa3, 0xae, 0xb9, 0xc4, 0xcf, 0xda, 0xe5,
+                       0xf0, 0xfb, 0x06, 0x11, 0x1c, 0x27, 0x32, 0x3d,
+                       0x48, 0x53, 0x5e, 0x69, 0x74, 0x7f, 0x8a, 0x95,
+                       0xa0, 0xab, 0xb6, 0xc1, 0xcc, 0xd7, 0xe2, 0xed,
+                       0xf8, 0x03, 0x0e, 0x19, 0x24, 0x2f, 0x3a, 0x45,
+                       0x50, 0x5b, 0x66, 0x71, 0x7c, 0x87, 0x92, 0x9d,
+                       0xa8, 0xb3, 0xbe, 0xc9, 0xd4, 0xdf, 0xea, 0xf5,
+                       0x00, 0x0d, 0x1a, 0x27, 0x34, 0x41, 0x4e, 0x5b,
+                       0x68, 0x75, 0x82, 0x8f, 0x9c, 0xa9, 0xb6, 0xc3,
+                       0xd0, 0xdd, 0xea, 0xf7, 0x04, 0x11, 0x1e, 0x2b,
+                       0x38, 0x45, 0x52, 0x5f, 0x6c, 0x79, 0x86, 0x93,
+                       0xa0, 0xad, 0xba, 0xc7, 0xd4, 0xe1, 0xee, 0xfb,
+                       0x08, 0x15, 0x22, 0x2f, 0x3c, 0x49, 0x56, 0x63,
+                       0x70, 0x7d, 0x8a, 0x97, 0xa4, 0xb1, 0xbe, 0xcb,
+                       0xd8, 0xe5, 0xf2, 0xff, 0x0c, 0x19, 0x26, 0x33,
+                       0x40, 0x4d, 0x5a, 0x67, 0x74, 0x81, 0x8e, 0x9b,
+                       0xa8, 0xb5, 0xc2, 0xcf, 0xdc, 0xe9, 0xf6, 0x03,
+                       0x10, 0x1d, 0x2a, 0x37, 0x44, 0x51, 0x5e, 0x6b,
+                       0x78, 0x85, 0x92, 0x9f, 0xac, 0xb9, 0xc6, 0xd3,
+                       0xe0, 0xed, 0xfa, 0x07, 0x14, 0x21, 0x2e, 0x3b,
+                       0x48, 0x55, 0x62, 0x6f, 0x7c, 0x89, 0x96, 0xa3,
+                       0xb0, 0xbd, 0xca, 0xd7, 0xe4, 0xf1, 0xfe, 0x0b,
+                       0x18, 0x25, 0x32, 0x3f, 0x4c, 0x59, 0x66, 0x73,
+                       0x80, 0x8d, 0x9a, 0xa7, 0xb4, 0xc1, 0xce, 0xdb,
+                       0xe8, 0xf5, 0x02, 0x0f, 0x1c, 0x29, 0x36, 0x43,
+                       0x50, 0x5d, 0x6a, 0x77, 0x84, 0x91, 0x9e, 0xab,
+                       0xb8, 0xc5, 0xd2, 0xdf, 0xec, 0xf9, 0x06, 0x13,
+                       0x20, 0x2d, 0x3a, 0x47, 0x54, 0x61, 0x6e, 0x7b,
+                       0x88, 0x95, 0xa2, 0xaf, 0xbc, 0xc9, 0xd6, 0xe3,
+                       0xf0, 0xfd, 0x0a, 0x17, 0x24, 0x31, 0x3e, 0x4b,
+                       0x58, 0x65, 0x72, 0x7f, 0x8c, 0x99, 0xa6, 0xb3,
+                       0xc0, 0xcd, 0xda, 0xe7, 0xf4, 0x01, 0x0e, 0x1b,
+                       0x28, 0x35, 0x42, 0x4f, 0x5c, 0x69, 0x76, 0x83,
+                       0x90, 0x9d, 0xaa, 0xb7, 0xc4, 0xd1, 0xde, 0xeb,
+                       0xf8, 0x05, 0x12, 0x1f, 0x2c, 0x39, 0x46, 0x53,
+                       0x60, 0x6d, 0x7a, 0x87, 0x94, 0xa1, 0xae, 0xbb,
+                       0xc8, 0xd5, 0xe2, 0xef, 0xfc, 0x09, 0x16, 0x23,
+                       0x30, 0x3d, 0x4a, 0x57, 0x64, 0x71, 0x7e, 0x8b,
+                       0x98, 0xa5, 0xb2, 0xbf, 0xcc, 0xd9, 0xe6, 0xf3,
+                       0x00, 0x0f, 0x1e, 0x2d, 0x3c, 0x4b, 0x5a, 0x69,
+                       0x78, 0x87, 0x96, 0xa5, 0xb4, 0xc3, 0xd2, 0xe1,
+                       0xf0, 0xff, 0x0e, 0x1d, 0x2c, 0x3b, 0x4a, 0x59,
+                       0x68, 0x77, 0x86, 0x95, 0xa4, 0xb3, 0xc2, 0xd1,
+                       0xe0, 0xef, 0xfe, 0x0d, 0x1c, 0x2b, 0x3a, 0x49,
+                       0x58, 0x67, 0x76, 0x85, 0x94, 0xa3, 0xb2, 0xc1,
+                       0xd0, 0xdf, 0xee, 0xfd, 0x0c, 0x1b, 0x2a, 0x39,
+                       0x48, 0x57, 0x66, 0x75, 0x84, 0x93, 0xa2, 0xb1,
+                       0xc0, 0xcf, 0xde, 0xed, 0xfc, 0x0b, 0x1a, 0x29,
+                       0x38, 0x47, 0x56, 0x65, 0x74, 0x83, 0x92, 0xa1,
+                       0xb0, 0xbf, 0xce, 0xdd, 0xec, 0xfb, 0x0a, 0x19,
+                       0x28, 0x37, 0x46, 0x55, 0x64, 0x73, 0x82, 0x91,
+                       0xa0, 0xaf, 0xbe, 0xcd, 0xdc, 0xeb, 0xfa, 0x09,
+                       0x18, 0x27, 0x36, 0x45, 0x54, 0x63, 0x72, 0x81,
+                       0x90, 0x9f, 0xae, 0xbd, 0xcc, 0xdb, 0xea, 0xf9,
+                       0x08, 0x17, 0x26, 0x35, 0x44, 0x53, 0x62, 0x71,
+                       0x80, 0x8f, 0x9e, 0xad, 0xbc, 0xcb, 0xda, 0xe9,
+                       0xf8, 0x07, 0x16, 0x25, 0x34, 0x43, 0x52, 0x61,
+                       0x70, 0x7f, 0x8e, 0x9d, 0xac, 0xbb, 0xca, 0xd9,
+                       0xe8, 0xf7, 0x06, 0x15, 0x24, 0x33, 0x42, 0x51,
+                       0x60, 0x6f, 0x7e, 0x8d, 0x9c, 0xab, 0xba, 0xc9,
+                       0xd8, 0xe7, 0xf6, 0x05, 0x14, 0x23, 0x32, 0x41,
+                       0x50, 0x5f, 0x6e, 0x7d, 0x8c, 0x9b, 0xaa, 0xb9,
+                       0xc8, 0xd7, 0xe6, 0xf5, 0x04, 0x13, 0x22, 0x31,
+                       0x40, 0x4f, 0x5e, 0x6d, 0x7c, 0x8b, 0x9a, 0xa9,
+                       0xb8, 0xc7, 0xd6, 0xe5, 0xf4, 0x03, 0x12, 0x21,
+                       0x30, 0x3f, 0x4e, 0x5d, 0x6c, 0x7b, 0x8a, 0x99,
+                       0xa8, 0xb7, 0xc6, 0xd5, 0xe4, 0xf3, 0x02, 0x11,
+                       0x20, 0x2f, 0x3e, 0x4d, 0x5c, 0x6b, 0x7a, 0x89,
+                       0x98, 0xa7, 0xb6, 0xc5, 0xd4, 0xe3, 0xf2, 0x01,
+                       0x10, 0x1f, 0x2e, 0x3d, 0x4c, 0x5b, 0x6a, 0x79,
+                       0x88, 0x97, 0xa6, 0xb5, 0xc4, 0xd3, 0xe2, 0xf1,
+                       0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77,
+                       0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff,
+                       0x10, 0x21, 0x32, 0x43, 0x54, 0x65, 0x76, 0x87,
+                       0x98, 0xa9, 0xba, 0xcb, 0xdc, 0xed, 0xfe, 0x0f,
+                       0x20, 0x31, 0x42, 0x53, 0x64, 0x75, 0x86, 0x97,
+                       0xa8, 0xb9, 0xca, 0xdb, 0xec, 0xfd, 0x0e, 0x1f,
+                       0x30, 0x41, 0x52, 0x63, 0x74, 0x85, 0x96, 0xa7,
+                       0xb8, 0xc9, 0xda, 0xeb, 0xfc, 0x0d, 0x1e, 0x2f,
+                       0x40, 0x51, 0x62, 0x73, 0x84, 0x95, 0xa6, 0xb7,
+                       0xc8, 0xd9, 0xea, 0xfb, 0x0c, 0x1d, 0x2e, 0x3f,
+                       0x50, 0x61, 0x72, 0x83, 0x94, 0xa5, 0xb6, 0xc7,
+                       0xd8, 0xe9, 0xfa, 0x0b, 0x1c, 0x2d, 0x3e, 0x4f,
+                       0x60, 0x71, 0x82, 0x93, 0xa4, 0xb5, 0xc6, 0xd7,
+                       0xe8, 0xf9, 0x0a, 0x1b, 0x2c, 0x3d, 0x4e, 0x5f,
+                       0x70, 0x81, 0x92, 0xa3, 0xb4, 0xc5, 0xd6, 0xe7,
+                       0xf8, 0x09, 0x1a, 0x2b, 0x3c, 0x4d, 0x5e, 0x6f,
+                       0x80, 0x91, 0xa2, 0xb3, 0xc4, 0xd5, 0xe6, 0xf7,
+                       0x08, 0x19, 0x2a, 0x3b, 0x4c, 0x5d, 0x6e, 0x7f,
+                       0x90, 0xa1, 0xb2, 0xc3, 0xd4, 0xe5, 0xf6, 0x07,
+                       0x18, 0x29, 0x3a, 0x4b, 0x5c, 0x6d, 0x7e, 0x8f,
+                       0xa0, 0xb1, 0xc2, 0xd3, 0xe4, 0xf5, 0x06, 0x17,
+                       0x28, 0x39, 0x4a, 0x5b, 0x6c, 0x7d, 0x8e, 0x9f,
+                       0xb0, 0xc1, 0xd2, 0xe3, 0xf4, 0x05, 0x16, 0x27,
+                       0x38, 0x49, 0x5a, 0x6b, 0x7c, 0x8d, 0x9e, 0xaf,
+                       0xc0, 0xd1, 0xe2, 0xf3, 0x04, 0x15, 0x26, 0x37,
+                       0x48, 0x59, 0x6a, 0x7b, 0x8c, 0x9d, 0xae, 0xbf,
+                       0xd0, 0xe1, 0xf2, 0x03, 0x14, 0x25, 0x36, 0x47,
+                       0x58, 0x69, 0x7a, 0x8b, 0x9c, 0xad, 0xbe, 0xcf,
+                       0xe0, 0xf1, 0x02, 0x13, 0x24, 0x35, 0x46, 0x57,
+                       0x68, 0x79, 0x8a, 0x9b, 0xac, 0xbd, 0xce, 0xdf,
+                       0xf0, 0x01, 0x12, 0x23, 0x34, 0x45, 0x56, 0x67,
+                       0x78, 0x89, 0x9a, 0xab, 0xbc, 0xcd, 0xde, 0xef,
+                       0x00, 0x13, 0x26, 0x39, 0x4c, 0x5f, 0x72, 0x85,
+                       0x98, 0xab, 0xbe, 0xd1, 0xe4, 0xf7, 0x0a, 0x1d,
+                       0x30, 0x43, 0x56, 0x69, 0x7c, 0x8f, 0xa2, 0xb5,
+                       0xc8, 0xdb, 0xee, 0x01, 0x14, 0x27, 0x3a, 0x4d,
+                       0x60, 0x73, 0x86, 0x99, 0xac, 0xbf, 0xd2, 0xe5,
+                       0xf8, 0x0b, 0x1e, 0x31, 0x44, 0x57, 0x6a, 0x7d,
+                       0x90, 0xa3, 0xb6, 0xc9, 0xdc, 0xef, 0x02, 0x15,
+                       0x28, 0x3b, 0x4e, 0x61, 0x74, 0x87, 0x9a, 0xad,
+                       0xc0, 0xd3, 0xe6, 0xf9, 0x0c, 0x1f, 0x32, 0x45,
+                       0x58, 0x6b, 0x7e, 0x91, 0xa4, 0xb7, 0xca, 0xdd,
+                       0xf0, 0x03, 0x16, 0x29, 0x3c, 0x4f, 0x62, 0x75,
+                       0x88, 0x9b, 0xae, 0xc1, 0xd4, 0xe7, 0xfa, 0x0d,
+                       0x20, 0x33, 0x46, 0x59, 0x6c, 0x7f, 0x92, 0xa5,
+                       0xb8, 0xcb, 0xde, 0xf1, 0x04, 0x17, 0x2a, 0x3d,
+                       0x50, 0x63, 0x76, 0x89, 0x9c, 0xaf, 0xc2, 0xd5,
+                       0xe8, 0xfb, 0x0e, 0x21, 0x34, 0x47, 0x5a, 0x6d,
+                       0x80, 0x93, 0xa6, 0xb9, 0xcc, 0xdf, 0xf2, 0x05,
+                       0x18, 0x2b, 0x3e, 0x51, 0x64, 0x77, 0x8a, 0x9d,
+                       0xb0, 0xc3, 0xd6, 0xe9, 0xfc, 0x0f, 0x22, 0x35,
+                       0x48, 0x5b, 0x6e, 0x81, 0x94, 0xa7, 0xba, 0xcd,
+                       0xe0, 0xf3, 0x06, 0x19, 0x2c, 0x3f, 0x52, 0x65,
+                       0x78, 0x8b, 0x9e, 0xb1, 0xc4, 0xd7, 0xea, 0xfd,
+                       0x10, 0x23, 0x36, 0x49, 0x5c, 0x6f, 0x82, 0x95,
+                       0xa8, 0xbb, 0xce, 0xe1, 0xf4, 0x07, 0x1a, 0x2d,
+                       0x40, 0x53, 0x66, 0x79, 0x8c, 0x9f, 0xb2, 0xc5,
+                       0xd8, 0xeb, 0xfe, 0x11, 0x24, 0x37, 0x4a, 0x5d,
+                       0x70, 0x83, 0x96, 0xa9, 0xbc, 0xcf, 0xe2, 0xf5,
+                       0x08, 0x1b, 0x2e, 0x41, 0x54, 0x67, 0x7a, 0x8d,
+                       0xa0, 0xb3, 0xc6, 0xd9, 0xec, 0xff, 0x12, 0x25,
+                       0x38, 0x4b, 0x5e, 0x71, 0x84, 0x97, 0xaa, 0xbd,
+                       0xd0, 0xe3, 0xf6, 0x09, 0x1c, 0x2f, 0x42, 0x55,
+                       0x68, 0x7b, 0x8e, 0xa1, 0xb4, 0xc7, 0xda, 0xed,
+                       0x00, 0x15, 0x2a, 0x3f, 0x54, 0x69, 0x7e, 0x93,
+                       0xa8, 0xbd, 0xd2, 0xe7, 0xfc, 0x11, 0x26, 0x3b,
+                       0x50, 0x65, 0x7a, 0x8f, 0xa4, 0xb9, 0xce, 0xe3,
+                       0xf8, 0x0d, 0x22, 0x37, 0x4c, 0x61, 0x76, 0x8b,
+                       0xa0, 0xb5, 0xca, 0xdf, 0xf4, 0x09, 0x1e, 0x33,
+                       0x48, 0x5d, 0x72, 0x87, 0x9c, 0xb1, 0xc6, 0xdb,
+                       0xf0, 0x05, 0x1a, 0x2f, 0x44, 0x59, 0x6e, 0x83,
+                       0x98, 0xad, 0xc2, 0xd7, 0xec, 0x01, 0x16, 0x2b,
+                       0x40, 0x55, 0x6a, 0x7f, 0x94, 0xa9, 0xbe, 0xd3,
+                       0xe8, 0xfd, 0x12, 0x27, 0x3c, 0x51, 0x66, 0x7b,
+                       0x90, 0xa5, 0xba, 0xcf, 0xe4, 0xf9, 0x0e, 0x23,
+                       0x38, 0x4d, 0x62, 0x77, 0x8c, 0xa1, 0xb6, 0xcb,
+                       0xe0, 0xf5, 0x0a, 0x1f, 0x34, 0x49, 0x5e, 0x73,
+                       0x88, 0x9d, 0xb2, 0xc7, 0xdc, 0xf1, 0x06, 0x1b,
+                       0x30, 0x45, 0x5a, 0x6f, 0x84, 0x99, 0xae, 0xc3,
+                       0xd8, 0xed, 0x02, 0x17, 0x2c, 0x41, 0x56, 0x6b,
+                       0x80, 0x95, 0xaa, 0xbf, 0xd4, 0xe9, 0xfe, 0x13,
+                       0x28, 0x3d, 0x52, 0x67, 0x7c, 0x91, 0xa6, 0xbb,
+                       0xd0, 0xe5, 0xfa, 0x0f, 0x24, 0x39, 0x4e, 0x63,
+                       0x78, 0x8d, 0xa2, 0xb7, 0xcc, 0xe1, 0xf6, 0x0b,
+                       0x20, 0x35, 0x4a, 0x5f, 0x74, 0x89, 0x9e, 0xb3,
+                       0xc8, 0xdd, 0xf2, 0x07, 0x1c, 0x31, 0x46, 0x5b,
+                       0x70, 0x85, 0x9a, 0xaf, 0xc4, 0xd9, 0xee, 0x03,
+                       0x18, 0x2d, 0x42, 0x57, 0x6c, 0x81, 0x96, 0xab,
+                       0xc0, 0xd5, 0xea, 0xff, 0x14, 0x29, 0x3e, 0x53,
+                       0x68, 0x7d, 0x92, 0xa7, 0xbc, 0xd1, 0xe6, 0xfb,
+                       0x10, 0x25, 0x3a, 0x4f, 0x64, 0x79, 0x8e, 0xa3,
+                       0xb8, 0xcd, 0xe2, 0xf7, 0x0c, 0x21, 0x36, 0x4b,
+                       0x60, 0x75, 0x8a, 0x9f, 0xb4, 0xc9, 0xde, 0xf3,
+                       0x08, 0x1d, 0x32, 0x47, 0x5c, 0x71, 0x86, 0x9b,
+                       0xb0, 0xc5, 0xda, 0xef, 0x04, 0x19, 0x2e, 0x43,
+                       0x58, 0x6d, 0x82, 0x97, 0xac, 0xc1, 0xd6, 0xeb,
+                       0x00, 0x17, 0x2e, 0x45, 0x5c, 0x73, 0x8a, 0xa1,
+                       0xb8, 0xcf, 0xe6, 0xfd, 0x14, 0x2b, 0x42, 0x59,
+                       0x70, 0x87, 0x9e, 0xb5, 0xcc, 0xe3, 0xfa, 0x11,
+                       0x28, 0x3f, 0x56, 0x6d, 0x84, 0x9b, 0xb2, 0xc9,
+                       0xe0, 0xf7, 0x0e, 0x25, 0x3c, 0x53, 0x6a, 0x81,
+                       0x98, 0xaf, 0xc6, 0xdd, 0xf4, 0x0b, 0x22, 0x39,
+                       0x50, 0x67, 0x7e, 0x95, 0xac, 0xc3, 0xda, 0xf1,
+                       0x08, 0x1f, 0x36, 0x4d, 0x64, 0x7b, 0x92, 0xa9,
+                       0xc0, 0xd7, 0xee, 0x05, 0x1c, 0x33, 0x4a, 0x61,
+                       0x78, 0x8f, 0xa6, 0xbd, 0xd4, 0xeb, 0x02, 0x19,
+                       0x30, 0x47, 0x5e, 0x75, 0x8c, 0xa3, 0xba, 0xd1,
+                       0xe8, 0xff, 0x16, 0x2d, 0x44, 0x5b, 0x72, 0x89,
+                       0xa0, 0xb7, 0xce, 0xe5, 0xfc, 0x13, 0x2a, 0x41,
+                       0x58, 0x6f, 0x86, 0x9d, 0xb4, 0xcb, 0xe2, 0xf9,
+                       0x10, 0x27, 0x3e, 0x55, 0x6c, 0x83, 0x9a, 0xb1,
+                       0xc8, 0xdf, 0xf6, 0x0d, 0x24, 0x3b, 0x52, 0x69,
+                       0x80, 0x97, 0xae, 0xc5, 0xdc, 0xf3, 0x0a, 0x21,
+                       0x38, 0x4f, 0x66, 0x7d, 0x94, 0xab, 0xc2, 0xd9,
+                       0xf0, 0x07, 0x1e, 0x35, 0x4c, 0x63, 0x7a, 0x91,
+                       0xa8, 0xbf, 0xd6, 0xed, 0x04, 0x1b, 0x32, 0x49,
+                       0x60, 0x77, 0x8e, 0xa5, 0xbc, 0xd3, 0xea, 0x01,
+                       0x18, 0x2f, 0x46, 0x5d, 0x74, 0x8b, 0xa2, 0xb9,
+                       0xd0, 0xe7, 0xfe, 0x15, 0x2c, 0x43, 0x5a, 0x71,
+                       0x88, 0x9f, 0xb6, 0xcd, 0xe4, 0xfb, 0x12, 0x29,
+                       0x40, 0x57, 0x6e, 0x85, 0x9c, 0xb3, 0xca, 0xe1,
+                       0xf8, 0x0f, 0x26, 0x3d, 0x54, 0x6b, 0x82, 0x99,
+                       0xb0, 0xc7, 0xde, 0xf5, 0x0c, 0x23, 0x3a, 0x51,
+                       0x68, 0x7f, 0x96, 0xad, 0xc4, 0xdb, 0xf2, 0x09,
+                       0x20, 0x37, 0x4e, 0x65, 0x7c, 0x93, 0xaa, 0xc1,
+                       0xd8, 0xef, 0x06, 0x1d, 0x34, 0x4b, 0x62, 0x79,
+                       0x90, 0xa7, 0xbe, 0xd5, 0xec, 0x03, 0x1a, 0x31,
+                       0x48, 0x5f, 0x76, 0x8d, 0xa4, 0xbb, 0xd2, 0xe9,
+                       0x00, 0x19, 0x32, 0x4b, 0x64, 0x7d, 0x96, 0xaf,
+                       0xc8, 0xe1, 0xfa, 0x13, 0x2c, 0x45, 0x5e, 0x77,
+                       0x90, 0xa9, 0xc2, 0xdb, 0xf4, 0x0d, 0x26, 0x3f,
+                       0x58, 0x71, 0x8a, 0xa3, 0xbc, 0xd5, 0xee, 0x07,
+                       0x20, 0x39, 0x52, 0x6b, 0x84, 0x9d, 0xb6, 0xcf,
+                       0xe8, 0x01, 0x1a, 0x33, 0x4c, 0x65, 0x7e, 0x97,
+                       0xb0, 0xc9, 0xe2, 0xfb, 0x14, 0x2d, 0x46, 0x5f,
+                       0x78, 0x91, 0xaa, 0xc3, 0xdc, 0xf5, 0x0e, 0x27,
+                       0x40, 0x59, 0x72, 0x8b, 0xa4, 0xbd, 0xd6, 0xef,
+                       0x08, 0x21, 0x3a, 0x53, 0x6c, 0x85, 0x9e, 0xb7,
+                       0xd0, 0xe9, 0x02, 0x1b, 0x34, 0x4d, 0x66, 0x7f,
+                       0x98, 0xb1, 0xca, 0xe3, 0xfc, 0x15, 0x2e, 0x47,
+                       0x60, 0x79, 0x92, 0xab, 0xc4, 0xdd, 0xf6, 0x0f,
+                       0x28, 0x41, 0x5a, 0x73, 0x8c, 0xa5, 0xbe, 0xd7,
+                       0xf0, 0x09, 0x22, 0x3b, 0x54, 0x6d, 0x86, 0x9f,
+                       0xb8, 0xd1, 0xea, 0x03, 0x1c, 0x35, 0x4e, 0x67,
+                       0x80, 0x99, 0xb2, 0xcb, 0xe4, 0xfd, 0x16, 0x2f,
+                       0x48, 0x61, 0x7a, 0x93, 0xac, 0xc5, 0xde, 0xf7,
+                       0x10, 0x29, 0x42, 0x5b, 0x74, 0x8d, 0xa6, 0xbf,
+                       0xd8, 0xf1, 0x0a, 0x23, 0x3c, 0x55, 0x6e, 0x87,
+                       0xa0, 0xb9, 0xd2, 0xeb, 0x04, 0x1d, 0x36, 0x4f,
+                       0x68, 0x81, 0x9a, 0xb3, 0xcc, 0xe5, 0xfe, 0x17,
+                       0x30, 0x49, 0x62, 0x7b, 0x94, 0xad, 0xc6, 0xdf,
+                       0xf8, 0x11, 0x2a, 0x43, 0x5c, 0x75, 0x8e, 0xa7,
+                       0xc0, 0xd9, 0xf2, 0x0b, 0x24, 0x3d, 0x56, 0x6f,
+                       0x88, 0xa1, 0xba, 0xd3, 0xec, 0x05, 0x1e, 0x37,
+                       0x50, 0x69, 0x82, 0x9b, 0xb4, 0xcd, 0xe6, 0xff,
+                       0x18, 0x31, 0x4a, 0x63, 0x7c, 0x95, 0xae, 0xc7,
+                       0xe0, 0xf9, 0x12, 0x2b, 0x44, 0x5d, 0x76, 0x8f,
+                       0xa8, 0xc1, 0xda, 0xf3, 0x0c, 0x25, 0x3e, 0x57,
+                       0x70, 0x89, 0xa2, 0xbb, 0xd4, 0xed, 0x06, 0x1f,
+                       0x38, 0x51, 0x6a, 0x83, 0x9c, 0xb5, 0xce, 0xe7,
+                       0x00, 0x1b, 0x36, 0x51, 0x6c, 0x87, 0xa2, 0xbd,
+                       0xd8, 0xf3, 0x0e, 0x29, 0x44, 0x5f, 0x7a, 0x95,
+                       0xb0, 0xcb, 0xe6, 0x01, 0x1c, 0x37, 0x52, 0x6d,
+                       0x88, 0xa3, 0xbe, 0xd9, 0xf4, 0x0f, 0x2a, 0x45,
+                       0x60, 0x7b, 0x96, 0xb1, 0xcc, 0xe7, 0x02, 0x1d,
+                       0x38, 0x53, 0x6e, 0x89, 0xa4, 0xbf, 0xda, 0xf5,
+                       0x10, 0x2b, 0x46, 0x61, 0x7c, 0x97, 0xb2, 0xcd,
+                       0xe8, 0x03, 0x1e, 0x39, 0x54, 0x6f, 0x8a, 0xa5,
+                       0xc0, 0xdb, 0xf6, 0x11, 0x2c, 0x47, 0x62, 0x7d,
+                       0x98, 0xb3, 0xce, 0xe9, 0x04, 0x1f, 0x3a, 0x55,
+                       0x70, 0x8b, 0xa6, 0xc1, 0xdc, 0xf7, 0x12, 0x2d,
+                       0x48, 0x63, 0x7e, 0x99, 0xb4, 0xcf, 0xea, 0x05,
+                       0x20, 0x3b, 0x56, 0x71, 0x8c, 0xa7, 0xc2, 0xdd,
+                       0xf8, 0x13, 0x2e, 0x49, 0x64, 0x7f, 0x9a, 0xb5,
+                       0xd0, 0xeb, 0x06, 0x21, 0x3c, 0x57, 0x72, 0x8d,
+                       0xa8, 0xc3, 0xde, 0xf9, 0x14, 0x2f, 0x4a, 0x65,
+                       0x80, 0x9b, 0xb6, 0xd1, 0xec, 0x07, 0x22, 0x3d,
+                       0x58, 0x73, 0x8e, 0xa9, 0xc4, 0xdf, 0xfa, 0x15,
+                       0x30, 0x4b, 0x66, 0x81, 0x9c, 0xb7, 0xd2, 0xed,
+                       0x08, 0x23, 0x3e, 0x59, 0x74, 0x8f, 0xaa, 0xc5,
+                       0xe0, 0xfb, 0x16, 0x31, 0x4c, 0x67, 0x82, 0x9d,
+                       0xb8, 0xd3, 0xee, 0x09, 0x24, 0x3f, 0x5a, 0x75,
+                       0x90, 0xab, 0xc6, 0xe1, 0xfc, 0x17, 0x32, 0x4d,
+                       0x68, 0x83, 0x9e, 0xb9, 0xd4, 0xef, 0x0a, 0x25,
+                       0x40, 0x5b, 0x76, 0x91, 0xac, 0xc7, 0xe2, 0xfd,
+                       0x18, 0x33, 0x4e, 0x69, 0x84, 0x9f, 0xba, 0xd5,
+                       0xf0, 0x0b, 0x26, 0x41, 0x5c, 0x77, 0x92, 0xad,
+                       0xc8, 0xe3, 0xfe, 0x19, 0x34, 0x4f, 0x6a, 0x85,
+                       0xa0, 0xbb, 0xd6, 0xf1, 0x0c, 0x27, 0x42, 0x5d,
+                       0x78, 0x93, 0xae, 0xc9, 0xe4, 0xff, 0x1a, 0x35,
+                       0x50, 0x6b, 0x86, 0xa1, 0xbc, 0xd7, 0xf2, 0x0d,
+                       0x28, 0x43, 0x5e, 0x79, 0x94, 0xaf, 0xca, 0xe5,
+                       0x00, 0x1d, 0x3a, 0x57, 0x74, 0x91, 0xae, 0xcb,
+                       0xe8, 0x05, 0x22, 0x3f, 0x5c, 0x79, 0x96, 0xb3,
+                       0xd0, 0xed, 0x0a, 0x27, 0x44, 0x61, 0x7e, 0x9b,
+                       0xb8, 0xd5, 0xf2, 0x0f, 0x2c, 0x49, 0x66, 0x83,
+                       0xa0, 0xbd, 0xda, 0xf7, 0x14, 0x31, 0x4e, 0x6b,
+                       0x88, 0xa5, 0xc2, 0xdf, 0xfc, 0x19, 0x36, 0x53,
+                       0x70, 0x8d, 0xaa, 0xc7, 0xe4, 0x01, 0x1e, 0x3b,
+                       0x58, 0x75, 0x92, 0xaf, 0xcc, 0xe9, 0x06, 0x23,
+                       0x40, 0x5d, 0x7a, 0x97, 0xb4, 0xd1, 0xee, 0x0b,
+                       0x28, 0x45, 0x62, 0x7f, 0x9c, 0xb9, 0xd6, 0xf3,
+                       0x10, 0x2d, 0x4a, 0x67, 0x84, 0xa1, 0xbe, 0xdb,
+                       0xf8, 0x15, 0x32, 0x4f, 0x6c, 0x89, 0xa6, 0xc3,
+                       0xe0, 0xfd, 0x1a, 0x37, 0x54, 0x71, 0x8e, 0xab,
+                       0xc8, 0xe5, 0x02, 0x1f, 0x3c, 0x59, 0x76, 0x93,
+                       0xb0, 0xcd, 0xea, 0x07, 0x24, 0x41, 0x5e, 0x7b,
+                       0x98, 0xb5, 0xd2, 0xef, 0x0c, 0x29, 0x46, 0x63,
+                       0x80, 0x9d, 0xba, 0xd7, 0xf4, 0x11, 0x2e, 0x4b,
+                       0x68, 0x85, 0xa2, 0xbf, 0xdc, 0xf9, 0x16, 0x33,
+                       0x50, 0x6d, 0x8a, 0xa7, 0xc4, 0xe1, 0xfe, 0x1b,
+                       0x38, 0x55, 0x72, 0x8f, 0xac, 0xc9, 0xe6, 0x03,
+                       0x20, 0x3d, 0x5a, 0x77, 0x94, 0xb1, 0xce, 0xeb,
+                       0x08, 0x25, 0x42, 0x5f, 0x7c, 0x99, 0xb6, 0xd3,
+                       0xf0, 0x0d, 0x2a, 0x47, 0x64, 0x81, 0x9e, 0xbb,
+                       0xd8, 0xf5, 0x12, 0x2f, 0x4c, 0x69, 0x86, 0xa3,
+                       0xc0, 0xdd, 0xfa, 0x17, 0x34, 0x51, 0x6e, 0x8b,
+                       0xa8, 0xc5, 0xe2, 0xff, 0x1c, 0x39, 0x56, 0x73,
+                       0x90, 0xad, 0xca, 0xe7, 0x04, 0x21, 0x3e, 0x5b,
+                       0x78, 0x95, 0xb2, 0xcf, 0xec, 0x09, 0x26, 0x43,
+                       0x60, 0x7d, 0x9a, 0xb7, 0xd4, 0xf1, 0x0e, 0x2b,
+                       0x48, 0x65, 0x82, 0x9f, 0xbc, 0xd9, 0xf6, 0x13,
+                       0x30, 0x4d, 0x6a, 0x87, 0xa4, 0xc1, 0xde, 0xfb,
+                       0x18, 0x35, 0x52, 0x6f, 0x8c, 0xa9, 0xc6, 0xe3,
+                       0x00, 0x1f, 0x3e, 0x5d, 0x7c, 0x9b, 0xba, 0xd9,
+                       0xf8, 0x17, 0x36, 0x55, 0x74, 0x93, 0xb2, 0xd1,
+                       0xf0, 0x0f, 0x2e, 0x4d, 0x6c, 0x8b, 0xaa, 0xc9,
+                       0xe8, 0x07, 0x26, 0x45, 0x64, 0x83, 0xa2, 0xc1,
+                       0xe0, 0xff, 0x1e, 0x3d, 0x5c, 0x7b, 0x9a, 0xb9,
+                       0xd8, 0xf7, 0x16, 0x35, 0x54, 0x73, 0x92, 0xb1,
+                       0xd0, 0xef, 0x0e, 0x2d, 0x4c, 0x6b, 0x8a, 0xa9,
+                       0xc8, 0xe7, 0x06, 0x25, 0x44, 0x63, 0x82, 0xa1,
+                       0xc0, 0xdf, 0xfe, 0x1d, 0x3c, 0x5b, 0x7a, 0x99,
+                       0xb8, 0xd7, 0xf6, 0x15, 0x34, 0x53, 0x72, 0x91,
+                       0xb0, 0xcf, 0xee, 0x0d, 0x2c, 0x4b, 0x6a, 0x89,
+                       0xa8, 0xc7, 0xe6, 0x05, 0x24, 0x43, 0x62, 0x81,
+                       0xa0, 0xbf, 0xde, 0xfd, 0x1c, 0x3b, 0x5a, 0x79,
+                       0x98, 0xb7, 0xd6, 0xf5, 0x14, 0x33, 0x52, 0x71,
+                       0x90, 0xaf, 0xce, 0xed, 0x0c, 0x2b, 0x4a, 0x69,
+                       0x88, 0xa7, 0xc6, 0xe5, 0x04, 0x23, 0x42, 0x61,
+                       0x80, 0x9f, 0xbe, 0xdd, 0xfc, 0x1b, 0x3a, 0x59,
+                       0x78, 0x97, 0xb6, 0xd5, 0xf4, 0x13, 0x32, 0x51,
+                       0x70, 0x8f, 0xae, 0xcd, 0xec, 0x0b, 0x2a, 0x49,
+                       0x68, 0x87, 0xa6, 0xc5, 0xe4, 0x03, 0x22, 0x41,
+                       0x60, 0x7f, 0x9e, 0xbd, 0xdc, 0xfb, 0x1a, 0x39,
+                       0x58, 0x77, 0x96, 0xb5, 0xd4, 0xf3, 0x12, 0x31,
+                       0x50, 0x6f, 0x8e, 0xad, 0xcc, 0xeb, 0x0a, 0x29,
+                       0x48, 0x67, 0x86, 0xa5, 0xc4, 0xe3, 0x02, 0x21,
+                       0x40, 0x5f, 0x7e, 0x9d, 0xbc, 0xdb, 0xfa, 0x19,
+                       0x38, 0x57, 0x76, 0x95, 0xb4, 0xd3, 0xf2, 0x11,
+                       0x30, 0x4f, 0x6e, 0x8d, 0xac, 0xcb, 0xea, 0x09,
+                       0x28, 0x47, 0x66, 0x85, 0xa4, 0xc3, 0xe2, 0x01,
+                       0x20, 0x3f, 0x5e, 0x7d, 0x9c, 0xbb, 0xda, 0xf9,
+                       0x18, 0x37, 0x56, 0x75, 0x94, 0xb3, 0xd2, 0xf1,
+                       0x10, 0x2f, 0x4e, 0x6d, 0x8c, 0xab, 0xca, 0xe9,
+                       0x08, 0x27, 0x46, 0x65, 0x84, 0xa3, 0xc2, 0xe1,
+                       0x00, 0x21, 0x42, 0x63,
+               },
+               .ilen = 4100,
+               .result = {
+                       0xb5, 0x81, 0xf5, 0x64, 0x18, 0x73, 0xe3, 0xf0,
+                       0x4c, 0x13, 0xf2, 0x77, 0x18, 0x60, 0x65, 0x5e,
+                       0x29, 0x01, 0xce, 0x98, 0x55, 0x53, 0xf9, 0x0c,
+                       0x2a, 0x08, 0xd5, 0x09, 0xb3, 0x57, 0x55, 0x56,
+                       0xc5, 0xe9, 0x56, 0x90, 0xcb, 0x6a, 0xa3, 0xc0,
+                       0xff, 0xc4, 0x79, 0xb4, 0xd2, 0x97, 0x5d, 0xc4,
+                       0x43, 0xd1, 0xfe, 0x94, 0x7b, 0x88, 0x06, 0x5a,
+                       0xb2, 0x9e, 0x2c, 0xfc, 0x44, 0x03, 0xb7, 0x90,
+                       0xa0, 0xc1, 0xba, 0x6a, 0x33, 0xb8, 0xc7, 0xb2,
+                       0x9d, 0xe1, 0x12, 0x4f, 0xc0, 0x64, 0xd4, 0x01,
+                       0xfe, 0x8c, 0x7a, 0x66, 0xf7, 0xe6, 0x5a, 0x91,
+                       0xbb, 0xde, 0x56, 0x86, 0xab, 0x65, 0x21, 0x30,
+                       0x00, 0x84, 0x65, 0x24, 0xa5, 0x7d, 0x85, 0xb4,
+                       0xe3, 0x17, 0xed, 0x3a, 0xb7, 0x6f, 0xb4, 0x0b,
+                       0x0b, 0xaf, 0x15, 0xae, 0x5a, 0x8f, 0xf2, 0x0c,
+                       0x2f, 0x27, 0xf4, 0x09, 0xd8, 0xd2, 0x96, 0xb7,
+                       0x71, 0xf2, 0xc5, 0x99, 0x4d, 0x7e, 0x7f, 0x75,
+                       0x77, 0x89, 0x30, 0x8b, 0x59, 0xdb, 0xa2, 0xb2,
+                       0xa0, 0xf3, 0x19, 0x39, 0x2b, 0xc5, 0x7e, 0x3f,
+                       0x4f, 0xd9, 0xd3, 0x56, 0x28, 0x97, 0x44, 0xdc,
+                       0xc0, 0x8b, 0x77, 0x24, 0xd9, 0x52, 0xe7, 0xc5,
+                       0xaf, 0xf6, 0x7d, 0x59, 0xb2, 0x44, 0x05, 0x1d,
+                       0xb1, 0xb0, 0x11, 0xa5, 0x0f, 0xec, 0x33, 0xe1,
+                       0x6d, 0x1b, 0x4e, 0x1f, 0xff, 0x57, 0x91, 0xb4,
+                       0x5b, 0x9a, 0x96, 0xc5, 0x53, 0xbc, 0xae, 0x20,
+                       0x3c, 0xbb, 0x14, 0xe2, 0xe8, 0x22, 0x33, 0xc1,
+                       0x5e, 0x76, 0x9e, 0x46, 0x99, 0xf6, 0x2a, 0x15,
+                       0xc6, 0x97, 0x02, 0xa0, 0x66, 0x43, 0xd1, 0xa6,
+                       0x31, 0xa6, 0x9f, 0xfb, 0xf4, 0xd3, 0x69, 0xe5,
+                       0xcd, 0x76, 0x95, 0xb8, 0x7a, 0x82, 0x7f, 0x21,
+                       0x45, 0xff, 0x3f, 0xce, 0x55, 0xf6, 0x95, 0x10,
+                       0x08, 0x77, 0x10, 0x43, 0xc6, 0xf3, 0x09, 0xe5,
+                       0x68, 0xe7, 0x3c, 0xad, 0x00, 0x52, 0x45, 0x0d,
+                       0xfe, 0x2d, 0xc6, 0xc2, 0x94, 0x8c, 0x12, 0x1d,
+                       0xe6, 0x25, 0xae, 0x98, 0x12, 0x8e, 0x19, 0x9c,
+                       0x81, 0x68, 0xb1, 0x11, 0xf6, 0x69, 0xda, 0xe3,
+                       0x62, 0x08, 0x18, 0x7a, 0x25, 0x49, 0x28, 0xac,
+                       0xba, 0x71, 0x12, 0x0b, 0xe4, 0xa2, 0xe5, 0xc7,
+                       0x5d, 0x8e, 0xec, 0x49, 0x40, 0x21, 0xbf, 0x5a,
+                       0x98, 0xf3, 0x02, 0x68, 0x55, 0x03, 0x7f, 0x8a,
+                       0xe5, 0x94, 0x0c, 0x32, 0x5c, 0x07, 0x82, 0x63,
+                       0xaf, 0x6f, 0x91, 0x40, 0x84, 0x8e, 0x52, 0x25,
+                       0xd0, 0xb0, 0x29, 0x53, 0x05, 0xe2, 0x50, 0x7a,
+                       0x34, 0xeb, 0xc9, 0x46, 0x20, 0xa8, 0x3d, 0xde,
+                       0x7f, 0x16, 0x5f, 0x36, 0xc5, 0x2e, 0xdc, 0xd1,
+                       0x15, 0x47, 0xc7, 0x50, 0x40, 0x6d, 0x91, 0xc5,
+                       0xe7, 0x93, 0x95, 0x1a, 0xd3, 0x57, 0xbc, 0x52,
+                       0x33, 0xee, 0x14, 0x19, 0x22, 0x52, 0x89, 0xa7,
+                       0x4a, 0x25, 0x56, 0x77, 0x4b, 0xca, 0xcf, 0x0a,
+                       0xe1, 0xf5, 0x35, 0x85, 0x30, 0x7e, 0x59, 0x4a,
+                       0xbd, 0x14, 0x5b, 0xdf, 0xe3, 0x46, 0xcb, 0xac,
+                       0x1f, 0x6c, 0x96, 0x0e, 0xf4, 0x81, 0xd1, 0x99,
+                       0xca, 0x88, 0x63, 0x3d, 0x02, 0x58, 0x6b, 0xa9,
+                       0xe5, 0x9f, 0xb3, 0x00, 0xb2, 0x54, 0xc6, 0x74,
+                       0x1c, 0xbf, 0x46, 0xab, 0x97, 0xcc, 0xf8, 0x54,
+                       0x04, 0x07, 0x08, 0x52, 0xe6, 0xc0, 0xda, 0x93,
+                       0x74, 0x7d, 0x93, 0x99, 0x5d, 0x78, 0x68, 0xa6,
+                       0x2e, 0x6b, 0xd3, 0x6a, 0x69, 0xcc, 0x12, 0x6b,
+                       0xd4, 0xc7, 0xa5, 0xc6, 0xe7, 0xf6, 0x03, 0x04,
+                       0x5d, 0xcd, 0x61, 0x5e, 0x17, 0x40, 0xdc, 0xd1,
+                       0x5c, 0xf5, 0x08, 0xdf, 0x5c, 0x90, 0x85, 0xa4,
+                       0xaf, 0xf6, 0x78, 0xbb, 0x0d, 0xf1, 0xf4, 0xa4,
+                       0x54, 0x26, 0x72, 0x9e, 0x61, 0xfa, 0x86, 0xcf,
+                       0xe8, 0x9e, 0xa1, 0xe0, 0xc7, 0x48, 0x23, 0xae,
+                       0x5a, 0x90, 0xae, 0x75, 0x0a, 0x74, 0x18, 0x89,
+                       0x05, 0xb1, 0x92, 0xb2, 0x7f, 0xd0, 0x1b, 0xa6,
+                       0x62, 0x07, 0x25, 0x01, 0xc7, 0xc2, 0x4f, 0xf9,
+                       0xe8, 0xfe, 0x63, 0x95, 0x80, 0x07, 0xb4, 0x26,
+                       0xcc, 0xd1, 0x26, 0xb6, 0xc4, 0x3f, 0x9e, 0xcb,
+                       0x8e, 0x3b, 0x2e, 0x44, 0x16, 0xd3, 0x10, 0x9a,
+                       0x95, 0x08, 0xeb, 0xc8, 0xcb, 0xeb, 0xbf, 0x6f,
+                       0x0b, 0xcd, 0x1f, 0xc8, 0xca, 0x86, 0xaa, 0xec,
+                       0x33, 0xe6, 0x69, 0xf4, 0x45, 0x25, 0x86, 0x3a,
+                       0x22, 0x94, 0x4f, 0x00, 0x23, 0x6a, 0x44, 0xc2,
+                       0x49, 0x97, 0x33, 0xab, 0x36, 0x14, 0x0a, 0x70,
+                       0x24, 0xc3, 0xbe, 0x04, 0x3b, 0x79, 0xa0, 0xf9,
+                       0xb8, 0xe7, 0x76, 0x29, 0x22, 0x83, 0xd7, 0xf2,
+                       0x94, 0xf4, 0x41, 0x49, 0xba, 0x5f, 0x7b, 0x07,
+                       0xb5, 0xfb, 0xdb, 0x03, 0x1a, 0x9f, 0xb6, 0x4c,
+                       0xc2, 0x2e, 0x37, 0x40, 0x49, 0xc3, 0x38, 0x16,
+                       0xe2, 0x4f, 0x77, 0x82, 0xb0, 0x68, 0x4c, 0x71,
+                       0x1d, 0x57, 0x61, 0x9c, 0xd9, 0x4e, 0x54, 0x99,
+                       0x47, 0x13, 0x28, 0x73, 0x3c, 0xbb, 0x00, 0x90,
+                       0xf3, 0x4d, 0xc9, 0x0e, 0xfd, 0xe7, 0xb1, 0x71,
+                       0xd3, 0x15, 0x79, 0xbf, 0xcc, 0x26, 0x2f, 0xbd,
+                       0xad, 0x6c, 0x50, 0x69, 0x6c, 0x3e, 0x6d, 0x80,
+                       0x9a, 0xea, 0x78, 0xaf, 0x19, 0xb2, 0x0d, 0x4d,
+                       0xad, 0x04, 0x07, 0xae, 0x22, 0x90, 0x4a, 0x93,
+                       0x32, 0x0e, 0x36, 0x9b, 0x1b, 0x46, 0xba, 0x3b,
+                       0xb4, 0xac, 0xc6, 0xd1, 0xa2, 0x31, 0x53, 0x3b,
+                       0x2a, 0x3d, 0x45, 0xfe, 0x03, 0x61, 0x10, 0x85,
+                       0x17, 0x69, 0xa6, 0x78, 0xcc, 0x6c, 0x87, 0x49,
+                       0x53, 0xf9, 0x80, 0x10, 0xde, 0x80, 0xa2, 0x41,
+                       0x6a, 0xc3, 0x32, 0x02, 0xad, 0x6d, 0x3c, 0x56,
+                       0x00, 0x71, 0x51, 0x06, 0xa7, 0xbd, 0xfb, 0xef,
+                       0x3c, 0xb5, 0x9f, 0xfc, 0x48, 0x7d, 0x53, 0x7c,
+                       0x66, 0xb0, 0x49, 0x23, 0xc4, 0x47, 0x10, 0x0e,
+                       0xe5, 0x6c, 0x74, 0x13, 0xe6, 0xc5, 0x3f, 0xaa,
+                       0xde, 0xff, 0x07, 0x44, 0xdd, 0x56, 0x1b, 0xad,
+                       0x09, 0x77, 0xfb, 0x5b, 0x12, 0xb8, 0x0d, 0x38,
+                       0x17, 0x37, 0x35, 0x7b, 0x9b, 0xbc, 0xfe, 0xd4,
+                       0x7e, 0x8b, 0xda, 0x7e, 0x5b, 0x04, 0xa7, 0x22,
+                       0xa7, 0x31, 0xa1, 0x20, 0x86, 0xc7, 0x1b, 0x99,
+                       0xdb, 0xd1, 0x89, 0xf4, 0x94, 0xa3, 0x53, 0x69,
+                       0x8d, 0xe7, 0xe8, 0x74, 0x11, 0x8d, 0x74, 0xd6,
+                       0x07, 0x37, 0x91, 0x9f, 0xfd, 0x67, 0x50, 0x3a,
+                       0xc9, 0xe1, 0xf4, 0x36, 0xd5, 0xa0, 0x47, 0xd1,
+                       0xf9, 0xe5, 0x39, 0xa3, 0x31, 0xac, 0x07, 0x36,
+                       0x23, 0xf8, 0x66, 0x18, 0x14, 0x28, 0x34, 0x0f,
+                       0xb8, 0xd0, 0xe7, 0x29, 0xb3, 0x04, 0x4b, 0x55,
+                       0x01, 0x41, 0xb2, 0x75, 0x8d, 0xcb, 0x96, 0x85,
+                       0x3a, 0xfb, 0xab, 0x2b, 0x9e, 0xfa, 0x58, 0x20,
+                       0x44, 0x1f, 0xc0, 0x14, 0x22, 0x75, 0x61, 0xe8,
+                       0xaa, 0x19, 0xcf, 0xf1, 0x82, 0x56, 0xf4, 0xd7,
+                       0x78, 0x7b, 0x3d, 0x5f, 0xb3, 0x9e, 0x0b, 0x8a,
+                       0x57, 0x50, 0xdb, 0x17, 0x41, 0x65, 0x4d, 0xa3,
+                       0x02, 0xc9, 0x9c, 0x9c, 0x53, 0xfb, 0x39, 0x39,
+                       0x9b, 0x1d, 0x72, 0x24, 0xda, 0xb7, 0x39, 0xbe,
+                       0x13, 0x3b, 0xfa, 0x29, 0xda, 0x9e, 0x54, 0x64,
+                       0x6e, 0xba, 0xd8, 0xa1, 0xcb, 0xb3, 0x36, 0xfa,
+                       0xcb, 0x47, 0x85, 0xe9, 0x61, 0x38, 0xbc, 0xbe,
+                       0xc5, 0x00, 0x38, 0x2a, 0x54, 0xf7, 0xc4, 0xb9,
+                       0xb3, 0xd3, 0x7b, 0xa0, 0xa0, 0xf8, 0x72, 0x7f,
+                       0x8c, 0x8e, 0x82, 0x0e, 0xc6, 0x1c, 0x75, 0x9d,
+                       0xca, 0x8e, 0x61, 0x87, 0xde, 0xad, 0x80, 0xd2,
+                       0xf5, 0xf9, 0x80, 0xef, 0x15, 0x75, 0xaf, 0xf5,
+                       0x80, 0xfb, 0xff, 0x6d, 0x1e, 0x25, 0xb7, 0x40,
+                       0x61, 0x6a, 0x39, 0x5a, 0x6a, 0xb5, 0x31, 0xab,
+                       0x97, 0x8a, 0x19, 0x89, 0x44, 0x40, 0xc0, 0xa6,
+                       0xb4, 0x4e, 0x30, 0x32, 0x7b, 0x13, 0xe7, 0x67,
+                       0xa9, 0x8b, 0x57, 0x04, 0xc2, 0x01, 0xa6, 0xf4,
+                       0x28, 0x99, 0xad, 0x2c, 0x76, 0xa3, 0x78, 0xc2,
+                       0x4a, 0xe6, 0xca, 0x5c, 0x50, 0x6a, 0xc1, 0xb0,
+                       0x62, 0x4b, 0x10, 0x8e, 0x7c, 0x17, 0x43, 0xb3,
+                       0x17, 0x66, 0x1c, 0x3e, 0x8d, 0x69, 0xf0, 0x5a,
+                       0x71, 0xf5, 0x97, 0xdc, 0xd1, 0x45, 0xdd, 0x28,
+                       0xf3, 0x5d, 0xdf, 0x53, 0x7b, 0x11, 0xe5, 0xbc,
+                       0x4c, 0xdb, 0x1b, 0x51, 0x6b, 0xe9, 0xfb, 0x3d,
+                       0xc1, 0xc3, 0x2c, 0xb9, 0x71, 0xf5, 0xb6, 0xb2,
+                       0x13, 0x36, 0x79, 0x80, 0x53, 0xe8, 0xd3, 0xa6,
+                       0x0a, 0xaf, 0xfd, 0x56, 0x97, 0xf7, 0x40, 0x8e,
+                       0x45, 0xce, 0xf8, 0xb0, 0x9e, 0x5c, 0x33, 0x82,
+                       0xb0, 0x44, 0x56, 0xfc, 0x05, 0x09, 0xe9, 0x2a,
+                       0xac, 0x26, 0x80, 0x14, 0x1d, 0xc8, 0x3a, 0x35,
+                       0x4c, 0x82, 0x97, 0xfd, 0x76, 0xb7, 0xa9, 0x0a,
+                       0x35, 0x58, 0x79, 0x8e, 0x0f, 0x66, 0xea, 0xaf,
+                       0x51, 0x6c, 0x09, 0xa9, 0x6e, 0x9b, 0xcb, 0x9a,
+                       0x31, 0x47, 0xa0, 0x2f, 0x7c, 0x71, 0xb4, 0x4a,
+                       0x11, 0xaa, 0x8c, 0x66, 0xc5, 0x64, 0xe6, 0x3a,
+                       0x54, 0xda, 0x24, 0x6a, 0xc4, 0x41, 0x65, 0x46,
+                       0x82, 0xa0, 0x0a, 0x0f, 0x5f, 0xfb, 0x25, 0xd0,
+                       0x2c, 0x91, 0xa7, 0xee, 0xc4, 0x81, 0x07, 0x86,
+                       0x75, 0x5e, 0x33, 0x69, 0x97, 0xe4, 0x2c, 0xa8,
+                       0x9d, 0x9f, 0x0b, 0x6a, 0xbe, 0xad, 0x98, 0xda,
+                       0x6d, 0x94, 0x41, 0xda, 0x2c, 0x1e, 0x89, 0xc4,
+                       0xc2, 0xaf, 0x1e, 0x00, 0x05, 0x0b, 0x83, 0x60,
+                       0xbd, 0x43, 0xea, 0x15, 0x23, 0x7f, 0xb9, 0xac,
+                       0xee, 0x4f, 0x2c, 0xaf, 0x2a, 0xf3, 0xdf, 0xd0,
+                       0xf3, 0x19, 0x31, 0xbb, 0x4a, 0x74, 0x84, 0x17,
+                       0x52, 0x32, 0x2c, 0x7d, 0x61, 0xe4, 0xcb, 0xeb,
+                       0x80, 0x38, 0x15, 0x52, 0xcb, 0x6f, 0xea, 0xe5,
+                       0x73, 0x9c, 0xd9, 0x24, 0x69, 0xc6, 0x95, 0x32,
+                       0x21, 0xc8, 0x11, 0xe4, 0xdc, 0x36, 0xd7, 0x93,
+                       0x38, 0x66, 0xfb, 0xb2, 0x7f, 0x3a, 0xb9, 0xaf,
+                       0x31, 0xdd, 0x93, 0x75, 0x78, 0x8a, 0x2c, 0x94,
+                       0x87, 0x1a, 0x58, 0xec, 0x9e, 0x7d, 0x4d, 0xba,
+                       0xe1, 0xe5, 0x4d, 0xfc, 0xbc, 0xa4, 0x2a, 0x14,
+                       0xef, 0xcc, 0xa7, 0xec, 0xab, 0x43, 0x09, 0x18,
+                       0xd3, 0xab, 0x68, 0xd1, 0x07, 0x99, 0x44, 0x47,
+                       0xd6, 0x83, 0x85, 0x3b, 0x30, 0xea, 0xa9, 0x6b,
+                       0x63, 0xea, 0xc4, 0x07, 0xfb, 0x43, 0x2f, 0xa4,
+                       0xaa, 0xb0, 0xab, 0x03, 0x89, 0xce, 0x3f, 0x8c,
+                       0x02, 0x7c, 0x86, 0x54, 0xbc, 0x88, 0xaf, 0x75,
+                       0xd2, 0xdc, 0x63, 0x17, 0xd3, 0x26, 0xf6, 0x96,
+                       0xa9, 0x3c, 0xf1, 0x61, 0x8c, 0x11, 0x18, 0xcc,
+                       0xd6, 0xea, 0x5b, 0xe2, 0xcd, 0xf0, 0xf1, 0xb2,
+                       0xe5, 0x35, 0x90, 0x1f, 0x85, 0x4c, 0x76, 0x5b,
+                       0x66, 0xce, 0x44, 0xa4, 0x32, 0x9f, 0xe6, 0x7b,
+                       0x71, 0x6e, 0x9f, 0x58, 0x15, 0x67, 0x72, 0x87,
+                       0x64, 0x8e, 0x3a, 0x44, 0x45, 0xd4, 0x76, 0xfa,
+                       0xc2, 0xf6, 0xef, 0x85, 0x05, 0x18, 0x7a, 0x9b,
+                       0xba, 0x41, 0x54, 0xac, 0xf0, 0xfc, 0x59, 0x12,
+                       0x3f, 0xdf, 0xa0, 0xe5, 0x8a, 0x65, 0xfd, 0x3a,
+                       0x62, 0x8d, 0x83, 0x2c, 0x03, 0xbe, 0x05, 0x76,
+                       0x2e, 0x53, 0x49, 0x97, 0x94, 0x33, 0xae, 0x40,
+                       0x81, 0x15, 0xdb, 0x6e, 0xad, 0xaa, 0xf5, 0x4b,
+                       0xe3, 0x98, 0x70, 0xdf, 0xe0, 0x7c, 0xcd, 0xdb,
+                       0x02, 0xd4, 0x7d, 0x2f, 0xc1, 0xe6, 0xb4, 0xf3,
+                       0xd7, 0x0d, 0x7a, 0xd9, 0x23, 0x9e, 0x87, 0x2d,
+                       0xce, 0x87, 0xad, 0xcc, 0x72, 0x05, 0x00, 0x29,
+                       0xdc, 0x73, 0x7f, 0x64, 0xc1, 0x15, 0x0e, 0xc2,
+                       0xdf, 0xa7, 0x5f, 0xeb, 0x41, 0xa1, 0xcd, 0xef,
+                       0x5c, 0x50, 0x79, 0x2a, 0x56, 0x56, 0x71, 0x8c,
+                       0xac, 0xc0, 0x79, 0x50, 0x69, 0xca, 0x59, 0x32,
+                       0x65, 0xf2, 0x54, 0xe4, 0x52, 0x38, 0x76, 0xd1,
+                       0x5e, 0xde, 0x26, 0x9e, 0xfb, 0x75, 0x2e, 0x11,
+                       0xb5, 0x10, 0xf4, 0x17, 0x73, 0xf5, 0x89, 0xc7,
+                       0x4f, 0x43, 0x5c, 0x8e, 0x7c, 0xb9, 0x05, 0x52,
+                       0x24, 0x40, 0x99, 0xfe, 0x9b, 0x85, 0x0b, 0x6c,
+                       0x22, 0x3e, 0x8b, 0xae, 0x86, 0xa1, 0xd2, 0x79,
+                       0x05, 0x68, 0x6b, 0xab, 0xe3, 0x41, 0x49, 0xed,
+                       0x15, 0xa1, 0x8d, 0x40, 0x2d, 0x61, 0xdf, 0x1a,
+                       0x59, 0xc9, 0x26, 0x8b, 0xef, 0x30, 0x4c, 0x88,
+                       0x4b, 0x10, 0xf8, 0x8d, 0xa6, 0x92, 0x9f, 0x4b,
+                       0xf3, 0xc4, 0x53, 0x0b, 0x89, 0x5d, 0x28, 0x92,
+                       0xcf, 0x78, 0xb2, 0xc0, 0x5d, 0xed, 0x7e, 0xfc,
+                       0xc0, 0x12, 0x23, 0x5f, 0x5a, 0x78, 0x86, 0x43,
+                       0x6e, 0x27, 0xf7, 0x5a, 0xa7, 0x6a, 0xed, 0x19,
+                       0x04, 0xf0, 0xb3, 0x12, 0xd1, 0xbd, 0x0e, 0x89,
+                       0x6e, 0xbc, 0x96, 0xa8, 0xd8, 0x49, 0x39, 0x9f,
+                       0x7e, 0x67, 0xf0, 0x2e, 0x3e, 0x01, 0xa9, 0xba,
+                       0xec, 0x8b, 0x62, 0x8e, 0xcb, 0x4a, 0x70, 0x43,
+                       0xc7, 0xc2, 0xc4, 0xca, 0x82, 0x03, 0x73, 0xe9,
+                       0x11, 0xdf, 0xcf, 0x54, 0xea, 0xc9, 0xb0, 0x95,
+                       0x51, 0xc0, 0x13, 0x3d, 0x92, 0x05, 0xfa, 0xf4,
+                       0xa9, 0x34, 0xc8, 0xce, 0x6c, 0x3d, 0x54, 0xcc,
+                       0xc4, 0xaf, 0xf1, 0xdc, 0x11, 0x44, 0x26, 0xa2,
+                       0xaf, 0xf1, 0x85, 0x75, 0x7d, 0x03, 0x61, 0x68,
+                       0x4e, 0x78, 0xc6, 0x92, 0x7d, 0x86, 0x7d, 0x77,
+                       0xdc, 0x71, 0x72, 0xdb, 0xc6, 0xae, 0xa1, 0xcb,
+                       0x70, 0x9a, 0x0b, 0x19, 0xbe, 0x4a, 0x6c, 0x2a,
+                       0xe2, 0xba, 0x6c, 0x64, 0x9a, 0x13, 0x28, 0xdf,
+                       0x85, 0x75, 0xe6, 0x43, 0xf6, 0x87, 0x08, 0x68,
+                       0x6e, 0xba, 0x6e, 0x79, 0x9f, 0x04, 0xbc, 0x23,
+                       0x50, 0xf6, 0x33, 0x5c, 0x1f, 0x24, 0x25, 0xbe,
+                       0x33, 0x47, 0x80, 0x45, 0x56, 0xa3, 0xa7, 0xd7,
+                       0x7a, 0xb1, 0x34, 0x0b, 0x90, 0x3c, 0x9c, 0xad,
+                       0x44, 0x5f, 0x9e, 0x0e, 0x9d, 0xd4, 0xbd, 0x93,
+                       0x5e, 0xfa, 0x3c, 0xe0, 0xb0, 0xd9, 0xed, 0xf3,
+                       0xd6, 0x2e, 0xff, 0x24, 0xd8, 0x71, 0x6c, 0xed,
+                       0xaf, 0x55, 0xeb, 0x22, 0xac, 0x93, 0x68, 0x32,
+                       0x05, 0x5b, 0x47, 0xdd, 0xc6, 0x4a, 0xcb, 0xc7,
+                       0x10, 0xe1, 0x3c, 0x92, 0x1a, 0xf3, 0x23, 0x78,
+                       0x2b, 0xa1, 0xd2, 0x80, 0xf4, 0x12, 0xb1, 0x20,
+                       0x8f, 0xff, 0x26, 0x35, 0xdd, 0xfb, 0xc7, 0x4e,
+                       0x78, 0xf1, 0x2d, 0x50, 0x12, 0x77, 0xa8, 0x60,
+                       0x7c, 0x0f, 0xf5, 0x16, 0x2f, 0x63, 0x70, 0x2a,
+                       0xc0, 0x96, 0x80, 0x4e, 0x0a, 0xb4, 0x93, 0x35,
+                       0x5d, 0x1d, 0x3f, 0x56, 0xf7, 0x2f, 0xbb, 0x90,
+                       0x11, 0x16, 0x8f, 0xa2, 0xec, 0x47, 0xbe, 0xac,
+                       0x56, 0x01, 0x26, 0x56, 0xb1, 0x8c, 0xb2, 0x10,
+                       0xf9, 0x1a, 0xca, 0xf5, 0xd1, 0xb7, 0x39, 0x20,
+                       0x63, 0xf1, 0x69, 0x20, 0x4f, 0x13, 0x12, 0x1f,
+                       0x5b, 0x65, 0xfc, 0x98, 0xf7, 0xc4, 0x7a, 0xbe,
+                       0xf7, 0x26, 0x4d, 0x2b, 0x84, 0x7b, 0x42, 0xad,
+                       0xd8, 0x7a, 0x0a, 0xb4, 0xd8, 0x74, 0xbf, 0xc1,
+                       0xf0, 0x6e, 0xb4, 0x29, 0xa3, 0xbb, 0xca, 0x46,
+                       0x67, 0x70, 0x6a, 0x2d, 0xce, 0x0e, 0xa2, 0x8a,
+                       0xa9, 0x87, 0xbf, 0x05, 0xc4, 0xc1, 0x04, 0xa3,
+                       0xab, 0xd4, 0x45, 0x43, 0x8c, 0xb6, 0x02, 0xb0,
+                       0x41, 0xc8, 0xfc, 0x44, 0x3d, 0x59, 0xaa, 0x2e,
+                       0x44, 0x21, 0x2a, 0x8d, 0x88, 0x9d, 0x57, 0xf4,
+                       0xa0, 0x02, 0x77, 0xb8, 0xa6, 0xa0, 0xe6, 0x75,
+                       0x5c, 0x82, 0x65, 0x3e, 0x03, 0x5c, 0x29, 0x8f,
+                       0x38, 0x55, 0xab, 0x33, 0x26, 0xef, 0x9f, 0x43,
+                       0x52, 0xfd, 0x68, 0xaf, 0x36, 0xb4, 0xbb, 0x9a,
+                       0x58, 0x09, 0x09, 0x1b, 0xc3, 0x65, 0x46, 0x46,
+                       0x1d, 0xa7, 0x94, 0x18, 0x23, 0x50, 0x2c, 0xca,
+                       0x2c, 0x55, 0x19, 0x97, 0x01, 0x9d, 0x93, 0x3b,
+                       0x63, 0x86, 0xf2, 0x03, 0x67, 0x45, 0xd2, 0x72,
+                       0x28, 0x52, 0x6c, 0xf4, 0xe3, 0x1c, 0xb5, 0x11,
+                       0x13, 0xf1, 0xeb, 0x21, 0xc7, 0xd9, 0x56, 0x82,
+                       0x2b, 0x82, 0x39, 0xbd, 0x69, 0x54, 0xed, 0x62,
+                       0xc3, 0xe2, 0xde, 0x73, 0xd4, 0x6a, 0x12, 0xae,
+                       0x13, 0x21, 0x7f, 0x4b, 0x5b, 0xfc, 0xbf, 0xe8,
+                       0x2b, 0xbe, 0x56, 0xba, 0x68, 0x8b, 0x9a, 0xb1,
+                       0x6e, 0xfa, 0xbf, 0x7e, 0x5a, 0x4b, 0xf1, 0xac,
+                       0x98, 0x65, 0x85, 0xd1, 0x93, 0x53, 0xd3, 0x7b,
+                       0x09, 0xdd, 0x4b, 0x10, 0x6d, 0x84, 0xb0, 0x13,
+                       0x65, 0xbd, 0xcf, 0x52, 0x09, 0xc4, 0x85, 0xe2,
+                       0x84, 0x74, 0x15, 0x65, 0xb7, 0xf7, 0x51, 0xaf,
+                       0x55, 0xad, 0xa4, 0xd1, 0x22, 0x54, 0x70, 0x94,
+                       0xa0, 0x1c, 0x90, 0x41, 0xfd, 0x99, 0xd7, 0x5a,
+                       0x31, 0xef, 0xaa, 0x25, 0xd0, 0x7f, 0x4f, 0xea,
+                       0x1d, 0x55, 0x42, 0xe5, 0x49, 0xb0, 0xd0, 0x46,
+                       0x62, 0x36, 0x43, 0xb2, 0x82, 0x15, 0x75, 0x50,
+                       0xa4, 0x72, 0xeb, 0x54, 0x27, 0x1f, 0x8a, 0xe4,
+                       0x7d, 0xe9, 0x66, 0xc5, 0xf1, 0x53, 0xa4, 0xd1,
+                       0x0c, 0xeb, 0xb8, 0xf8, 0xbc, 0xd4, 0xe2, 0xe7,
+                       0xe1, 0xf8, 0x4b, 0xcb, 0xa9, 0xa1, 0xaf, 0x15,
+                       0x83, 0xcb, 0x72, 0xd0, 0x33, 0x79, 0x00, 0x2d,
+                       0x9f, 0xd7, 0xf1, 0x2e, 0x1e, 0x10, 0xe4, 0x45,
+                       0xc0, 0x75, 0x3a, 0x39, 0xea, 0x68, 0xf7, 0x5d,
+                       0x1b, 0x73, 0x8f, 0xe9, 0x8e, 0x0f, 0x72, 0x47,
+                       0xae, 0x35, 0x0a, 0x31, 0x7a, 0x14, 0x4d, 0x4a,
+                       0x6f, 0x47, 0xf7, 0x7e, 0x91, 0x6e, 0x74, 0x8b,
+                       0x26, 0x47, 0xf9, 0xc3, 0xf9, 0xde, 0x70, 0xf5,
+                       0x61, 0xab, 0xa9, 0x27, 0x9f, 0x82, 0xe4, 0x9c,
+                       0x89, 0x91, 0x3f, 0x2e, 0x6a, 0xfd, 0xb5, 0x49,
+                       0xe9, 0xfd, 0x59, 0x14, 0x36, 0x49, 0x40, 0x6d,
+                       0x32, 0xd8, 0x85, 0x42, 0xf3, 0xa5, 0xdf, 0x0c,
+                       0xa8, 0x27, 0xd7, 0x54, 0xe2, 0x63, 0x2f, 0xf2,
+                       0x7e, 0x8b, 0x8b, 0xe7, 0xf1, 0x9a, 0x95, 0x35,
+                       0x43, 0xdc, 0x3a, 0xe4, 0xb6, 0xf4, 0xd0, 0xdf,
+                       0x9c, 0xcb, 0x94, 0xf3, 0x21, 0xa0, 0x77, 0x50,
+                       0xe2, 0xc6, 0xc4, 0xc6, 0x5f, 0x09, 0x64, 0x5b,
+                       0x92, 0x90, 0xd8, 0xe1, 0xd1, 0xed, 0x4b, 0x42,
+                       0xd7, 0x37, 0xaf, 0x65, 0x3d, 0x11, 0x39, 0xb6,
+                       0x24, 0x8a, 0x60, 0xae, 0xd6, 0x1e, 0xbf, 0x0e,
+                       0x0d, 0xd7, 0xdc, 0x96, 0x0e, 0x65, 0x75, 0x4e,
+                       0x29, 0x06, 0x9d, 0xa4, 0x51, 0x3a, 0x10, 0x63,
+                       0x8f, 0x17, 0x07, 0xd5, 0x8e, 0x3c, 0xf4, 0x28,
+                       0x00, 0x5a, 0x5b, 0x05, 0x19, 0xd8, 0xc0, 0x6c,
+                       0xe5, 0x15, 0xe4, 0x9c, 0x9d, 0x71, 0x9d, 0x5e,
+                       0x94, 0x29, 0x1a, 0xa7, 0x80, 0xfa, 0x0e, 0x33,
+                       0x03, 0xdd, 0xb7, 0x3e, 0x9a, 0xa9, 0x26, 0x18,
+                       0x37, 0xa9, 0x64, 0x08, 0x4d, 0x94, 0x5a, 0x88,
+                       0xca, 0x35, 0xce, 0x81, 0x02, 0xe3, 0x1f, 0x1b,
+                       0x89, 0x1a, 0x77, 0x85, 0xe3, 0x41, 0x6d, 0x32,
+                       0x42, 0x19, 0x23, 0x7d, 0xc8, 0x73, 0xee, 0x25,
+                       0x85, 0x0d, 0xf8, 0x31, 0x25, 0x79, 0x1b, 0x6f,
+                       0x79, 0x25, 0xd2, 0xd8, 0xd4, 0x23, 0xfd, 0xf7,
+                       0x82, 0x36, 0x6a, 0x0c, 0x46, 0x22, 0x15, 0xe9,
+                       0xff, 0x72, 0x41, 0x91, 0x91, 0x7d, 0x3a, 0xb7,
+                       0xdd, 0x65, 0x99, 0x70, 0xf6, 0x8d, 0x84, 0xf8,
+                       0x67, 0x15, 0x20, 0x11, 0xd6, 0xb2, 0x55, 0x7b,
+                       0xdb, 0x87, 0xee, 0xef, 0x55, 0x89, 0x2a, 0x59,
+                       0x2b, 0x07, 0x8f, 0x43, 0x8a, 0x59, 0x3c, 0x01,
+                       0x8b, 0x65, 0x54, 0xa1, 0x66, 0xd5, 0x38, 0xbd,
+                       0xc6, 0x30, 0xa9, 0xcc, 0x49, 0xb6, 0xa8, 0x1b,
+                       0xb8, 0xc0, 0x0e, 0xe3, 0x45, 0x28, 0xe2, 0xff,
+                       0x41, 0x9f, 0x7e, 0x7c, 0xd1, 0xae, 0x9e, 0x25,
+                       0x3f, 0x4c, 0x7c, 0x7c, 0xf4, 0xa8, 0x26, 0x4d,
+                       0x5c, 0xfd, 0x4b, 0x27, 0x18, 0xf9, 0x61, 0x76,
+                       0x48, 0xba, 0x0c, 0x6b, 0xa9, 0x4d, 0xfc, 0xf5,
+                       0x3b, 0x35, 0x7e, 0x2f, 0x4a, 0xa9, 0xc2, 0x9a,
+                       0xae, 0xab, 0x86, 0x09, 0x89, 0xc9, 0xc2, 0x40,
+                       0x39, 0x2c, 0x81, 0xb3, 0xb8, 0x17, 0x67, 0xc2,
+                       0x0d, 0x32, 0x4a, 0x3a, 0x67, 0x81, 0xd7, 0x1a,
+                       0x34, 0x52, 0xc5, 0xdb, 0x0a, 0xf5, 0x63, 0x39,
+                       0xea, 0x1f, 0xe1, 0x7c, 0xa1, 0x9e, 0xc1, 0x35,
+                       0xe3, 0xb1, 0x18, 0x45, 0x67, 0xf9, 0x22, 0x38,
+                       0x95, 0xd9, 0x34, 0x34, 0x86, 0xc6, 0x41, 0x94,
+                       0x15, 0xf9, 0x5b, 0x41, 0xa6, 0x87, 0x8b, 0xf8,
+                       0xd5, 0xe1, 0x1b, 0xe2, 0x5b, 0xf3, 0x86, 0x10,
+                       0xff, 0xe6, 0xae, 0x69, 0x76, 0xbc, 0x0d, 0xb4,
+                       0x09, 0x90, 0x0c, 0xa2, 0x65, 0x0c, 0xad, 0x74,
+                       0xf5, 0xd7, 0xff, 0xda, 0xc1, 0xce, 0x85, 0xbe,
+                       0x00, 0xa7, 0xff, 0x4d, 0x2f, 0x65, 0xd3, 0x8c,
+                       0x86, 0x2d, 0x05, 0xe8, 0xed, 0x3e, 0x6b, 0x8b,
+                       0x0f, 0x3d, 0x83, 0x8c, 0xf1, 0x1d, 0x5b, 0x96,
+                       0x2e, 0xb1, 0x9c, 0xc2, 0x98, 0xe1, 0x70, 0xb9,
+                       0xba, 0x5c, 0x8a, 0x43, 0xd6, 0x34, 0xa7, 0x2d,
+                       0xc9, 0x92, 0xae, 0xf2, 0xa5, 0x7b, 0x05, 0x49,
+                       0xa7, 0x33, 0x34, 0x86, 0xca, 0xe4, 0x96, 0x23,
+                       0x76, 0x5b, 0xf2, 0xc6, 0xf1, 0x51, 0x28, 0x42,
+                       0x7b, 0xcc, 0x76, 0x8f, 0xfa, 0xa2, 0xad, 0x31,
+                       0xd4, 0xd6, 0x7a, 0x6d, 0x25, 0x25, 0x54, 0xe4,
+                       0x3f, 0x50, 0x59, 0xe1, 0x5c, 0x05, 0xb7, 0x27,
+                       0x48, 0xbf, 0x07, 0xec, 0x1b, 0x13, 0xbe, 0x2b,
+                       0xa1, 0x57, 0x2b, 0xd5, 0xab, 0xd7, 0xd0, 0x4c,
+                       0x1e, 0xcb, 0x71, 0x9b, 0xc5, 0x90, 0x85, 0xd3,
+                       0xde, 0x59, 0xec, 0x71, 0xeb, 0x89, 0xbb, 0xd0,
+                       0x09, 0x50, 0xe1, 0x16, 0x3f, 0xfd, 0x1c, 0x34,
+                       0xc3, 0x1c, 0xa1, 0x10, 0x77, 0x53, 0x98, 0xef,
+                       0xf2, 0xfd, 0xa5, 0x01, 0x59, 0xc2, 0x9b, 0x26,
+                       0xc7, 0x42, 0xd9, 0x49, 0xda, 0x58, 0x2b, 0x6e,
+                       0x9f, 0x53, 0x19, 0x76, 0x7e, 0xd9, 0xc9, 0x0e,
+                       0x68, 0xc8, 0x7f, 0x51, 0x22, 0x42, 0xef, 0x49,
+                       0xa4, 0x55, 0xb6, 0x36, 0xac, 0x09, 0xc7, 0x31,
+                       0x88, 0x15, 0x4b, 0x2e, 0x8f, 0x3a, 0x08, 0xf7,
+                       0xd8, 0xf7, 0xa8, 0xc5, 0xa9, 0x33, 0xa6, 0x45,
+                       0xe4, 0xc4, 0x94, 0x76, 0xf3, 0x0d, 0x8f, 0x7e,
+                       0xc8, 0xf6, 0xbc, 0x23, 0x0a, 0xb6, 0x4c, 0xd3,
+                       0x6a, 0xcd, 0x36, 0xc2, 0x90, 0x5c, 0x5c, 0x3c,
+                       0x65, 0x7b, 0xc2, 0xd6, 0xcc, 0xe6, 0x0d, 0x87,
+                       0x73, 0x2e, 0x71, 0x79, 0x16, 0x06, 0x63, 0x28,
+                       0x09, 0x15, 0xd8, 0x89, 0x38, 0x38, 0x3d, 0xb5,
+                       0x42, 0x1c, 0x08, 0x24, 0xf7, 0x2a, 0xd2, 0x9d,
+                       0xc8, 0xca, 0xef, 0xf9, 0x27, 0xd8, 0x07, 0x86,
+                       0xf7, 0x43, 0x0b, 0x55, 0x15, 0x3f, 0x9f, 0x83,
+                       0xef, 0xdc, 0x49, 0x9d, 0x2a, 0xc1, 0x54, 0x62,
+                       0xbd, 0x9b, 0x66, 0x55, 0x9f, 0xb7, 0x12, 0xf3,
+                       0x1b, 0x4d, 0x9d, 0x2a, 0x5c, 0xed, 0x87, 0x75,
+                       0x87, 0x26, 0xec, 0x61, 0x2c, 0xb4, 0x0f, 0x89,
+                       0xb0, 0xfb, 0x2e, 0x68, 0x5d, 0x15, 0xc7, 0x8d,
+                       0x2e, 0xc0, 0xd9, 0xec, 0xaf, 0x4f, 0xd2, 0x25,
+                       0x29, 0xe8, 0xd2, 0x26, 0x2b, 0x67, 0xe9, 0xfc,
+                       0x2b, 0xa8, 0x67, 0x96, 0x12, 0x1f, 0x5b, 0x96,
+                       0xc6, 0x14, 0x53, 0xaf, 0x44, 0xea, 0xd6, 0xe2,
+                       0x94, 0x98, 0xe4, 0x12, 0x93, 0x4c, 0x92, 0xe0,
+                       0x18, 0xa5, 0x8d, 0x2d, 0xe4, 0x71, 0x3c, 0x47,
+                       0x4c, 0xf7, 0xe6, 0x47, 0x9e, 0xc0, 0x68, 0xdf,
+                       0xd4, 0xf5, 0x5a, 0x74, 0xb1, 0x2b, 0x29, 0x03,
+                       0x19, 0x07, 0xaf, 0x90, 0x62, 0x5c, 0x68, 0x98,
+                       0x48, 0x16, 0x11, 0x02, 0x9d, 0xee, 0xb4, 0x9b,
+                       0xe5, 0x42, 0x7f, 0x08, 0xfd, 0x16, 0x32, 0x0b,
+                       0xd0, 0xb3, 0xfa, 0x2b, 0xb7, 0x99, 0xf9, 0x29,
+                       0xcd, 0x20, 0x45, 0x9f, 0xb3, 0x1a, 0x5d, 0xa2,
+                       0xaf, 0x4d, 0xe0, 0xbd, 0x42, 0x0d, 0xbc, 0x74,
+                       0x99, 0x9c, 0x8e, 0x53, 0x1a, 0xb4, 0x3e, 0xbd,
+                       0xa2, 0x9a, 0x2d, 0xf7, 0xf8, 0x39, 0x0f, 0x67,
+                       0x63, 0xfc, 0x6b, 0xc0, 0xaf, 0xb3, 0x4b, 0x4f,
+                       0x55, 0xc4, 0xcf, 0xa7, 0xc8, 0x04, 0x11, 0x3e,
+                       0x14, 0x32, 0xbb, 0x1b, 0x38, 0x77, 0xd6, 0x7f,
+                       0x54, 0x4c, 0xdf, 0x75, 0xf3, 0x07, 0x2d, 0x33,
+                       0x9b, 0xa8, 0x20, 0xe1, 0x7b, 0x12, 0xb5, 0xf3,
+                       0xef, 0x2f, 0xce, 0x72, 0xe5, 0x24, 0x60, 0xc1,
+                       0x30, 0xe2, 0xab, 0xa1, 0x8e, 0x11, 0x09, 0xa8,
+                       0x21, 0x33, 0x44, 0xfe, 0x7f, 0x35, 0x32, 0x93,
+                       0x39, 0xa7, 0xad, 0x8b, 0x79, 0x06, 0xb2, 0xcb,
+                       0x4e, 0xa9, 0x5f, 0xc7, 0xba, 0x74, 0x29, 0xec,
+                       0x93, 0xa0, 0x4e, 0x54, 0x93, 0xc0, 0xbc, 0x55,
+                       0x64, 0xf0, 0x48, 0xe5, 0x57, 0x99, 0xee, 0x75,
+                       0xd6, 0x79, 0x0f, 0x66, 0xb7, 0xc6, 0x57, 0x76,
+                       0xf7, 0xb7, 0xf3, 0x9c, 0xc5, 0x60, 0xe8, 0x7f,
+                       0x83, 0x76, 0xd6, 0x0e, 0xaa, 0xe6, 0x90, 0x39,
+                       0x1d, 0xa6, 0x32, 0x6a, 0x34, 0xe3, 0x55, 0xf8,
+                       0x58, 0xa0, 0x58, 0x7d, 0x33, 0xe0, 0x22, 0x39,
+                       0x44, 0x64, 0x87, 0x86, 0x5a, 0x2f, 0xa7, 0x7e,
+                       0x0f, 0x38, 0xea, 0xb0, 0x30, 0xcc, 0x61, 0xa5,
+                       0x6a, 0x32, 0xae, 0x1e, 0xf7, 0xe9, 0xd0, 0xa9,
+                       0x0c, 0x32, 0x4b, 0xb5, 0x49, 0x28, 0xab, 0x85,
+                       0x2f, 0x8e, 0x01, 0x36, 0x38, 0x52, 0xd0, 0xba,
+                       0xd6, 0x02, 0x78, 0xf8, 0x0e, 0x3e, 0x9c, 0x8b,
+                       0x6b, 0x45, 0x99, 0x3f, 0x5c, 0xfe, 0x58, 0xf1,
+                       0x5c, 0x94, 0x04, 0xe1, 0xf5, 0x18, 0x6d, 0x51,
+                       0xb2, 0x5d, 0x18, 0x20, 0xb6, 0xc2, 0x9a, 0x42,
+                       0x1d, 0xb3, 0xab, 0x3c, 0xb6, 0x3a, 0x13, 0x03,
+                       0xb2, 0x46, 0x82, 0x4f, 0xfc, 0x64, 0xbc, 0x4f,
+                       0xca, 0xfa, 0x9c, 0xc0, 0xd5, 0xa7, 0xbd, 0x11,
+                       0xb7, 0xe4, 0x5a, 0xf6, 0x6f, 0x4d, 0x4d, 0x54,
+                       0xea, 0xa4, 0x98, 0x66, 0xd4, 0x22, 0x3b, 0xd3,
+                       0x8f, 0x34, 0x47, 0xd9, 0x7c, 0xf4, 0x72, 0x3b,
+                       0x4d, 0x02, 0x77, 0xf6, 0xd6, 0xdd, 0x08, 0x0a,
+                       0x81, 0xe1, 0x86, 0x89, 0x3e, 0x56, 0x10, 0x3c,
+                       0xba, 0xd7, 0x81, 0x8c, 0x08, 0xbc, 0x8b, 0xe2,
+                       0x53, 0xec, 0xa7, 0x89, 0xee, 0xc8, 0x56, 0xb5,
+                       0x36, 0x2c, 0xb2, 0x03, 0xba, 0x99, 0xdd, 0x7c,
+                       0x48, 0xa0, 0xb0, 0xbc, 0x91, 0x33, 0xe9, 0xa8,
+                       0xcb, 0xcd, 0xcf, 0x59, 0x5f, 0x1f, 0x15, 0xe2,
+                       0x56, 0xf5, 0x4e, 0x01, 0x35, 0x27, 0x45, 0x77,
+                       0x47, 0xc8, 0xbc, 0xcb, 0x7e, 0x39, 0xc1, 0x97,
+                       0x28, 0xd3, 0x84, 0xfc, 0x2c, 0x3e, 0xc8, 0xad,
+                       0x9c, 0xf8, 0x8a, 0x61, 0x9c, 0x28, 0xaa, 0xc5,
+                       0x99, 0x20, 0x43, 0x85, 0x9d, 0xa5, 0xe2, 0x8b,
+                       0xb8, 0xae, 0xeb, 0xd0, 0x32, 0x0d, 0x52, 0x78,
+                       0x09, 0x56, 0x3f, 0xc7, 0xd8, 0x7e, 0x26, 0xfc,
+                       0x37, 0xfb, 0x6f, 0x04, 0xfc, 0xfa, 0x92, 0x10,
+                       0xac, 0xf8, 0x3e, 0x21, 0xdc, 0x8c, 0x21, 0x16,
+                       0x7d, 0x67, 0x6e, 0xf6, 0xcd, 0xda, 0xb6, 0x98,
+                       0x23, 0xab, 0x23, 0x3c, 0xb2, 0x10, 0xa0, 0x53,
+                       0x5a, 0x56, 0x9f, 0xc5, 0xd0, 0xff, 0xbb, 0xe4,
+                       0x98, 0x3c, 0x69, 0x1e, 0xdb, 0x38, 0x8f, 0x7e,
+                       0x0f, 0xd2, 0x98, 0x88, 0x81, 0x8b, 0x45, 0x67,
+                       0xea, 0x33, 0xf1, 0xeb, 0xe9, 0x97, 0x55, 0x2e,
+                       0xd9, 0xaa, 0xeb, 0x5a, 0xec, 0xda, 0xe1, 0x68,
+                       0xa8, 0x9d, 0x3c, 0x84, 0x7c, 0x05, 0x3d, 0x62,
+                       0x87, 0x8f, 0x03, 0x21, 0x28, 0x95, 0x0c, 0x89,
+                       0x25, 0x22, 0x4a, 0xb0, 0x93, 0xa9, 0x50, 0xa2,
+                       0x2f, 0x57, 0x6e, 0x18, 0x42, 0x19, 0x54, 0x0c,
+                       0x55, 0x67, 0xc6, 0x11, 0x49, 0xf4, 0x5c, 0xd2,
+                       0xe9, 0x3d, 0xdd, 0x8b, 0x48, 0x71, 0x21, 0x00,
+                       0xc3, 0x9a, 0x6c, 0x85, 0x74, 0x28, 0x83, 0x4a,
+                       0x1b, 0x31, 0x05, 0xe1, 0x06, 0x92, 0xe7, 0xda,
+                       0x85, 0x73, 0x78, 0x45, 0x20, 0x7f, 0xae, 0x13,
+                       0x7c, 0x33, 0x06, 0x22, 0xf4, 0x83, 0xf9, 0x35,
+                       0x3f, 0x6c, 0x71, 0xa8, 0x4e, 0x48, 0xbe, 0x9b,
+                       0xce, 0x8a, 0xba, 0xda, 0xbe, 0x28, 0x08, 0xf7,
+                       0xe2, 0x14, 0x8c, 0x71, 0xea, 0x72, 0xf9, 0x33,
+                       0xf2, 0x88, 0x3f, 0xd7, 0xbb, 0x69, 0x6c, 0x29,
+                       0x19, 0xdc, 0x84, 0xce, 0x1f, 0x12, 0x4f, 0xc8,
+                       0xaf, 0xa5, 0x04, 0xba, 0x5a, 0xab, 0xb0, 0xd9,
+                       0x14, 0x1f, 0x6c, 0x68, 0x98, 0x39, 0x89, 0x7a,
+                       0xd9, 0xd8, 0x2f, 0xdf, 0xa8, 0x47, 0x4a, 0x25,
+                       0xe2, 0xfb, 0x33, 0xf4, 0x59, 0x78, 0xe1, 0x68,
+                       0x85, 0xcf, 0xfe, 0x59, 0x20, 0xd4, 0x05, 0x1d,
+                       0x80, 0x99, 0xae, 0xbc, 0xca, 0xae, 0x0f, 0x2f,
+                       0x65, 0x43, 0x34, 0x8e, 0x7e, 0xac, 0xd3, 0x93,
+                       0x2f, 0xac, 0x6d, 0x14, 0x3d, 0x02, 0x07, 0x70,
+                       0x9d, 0xa4, 0xf3, 0x1b, 0x5c, 0x36, 0xfc, 0x01,
+                       0x73, 0x34, 0x85, 0x0c, 0x6c, 0xd6, 0xf1, 0xbd,
+                       0x3f, 0xdf, 0xee, 0xf5, 0xd9, 0xba, 0x56, 0xef,
+                       0xf4, 0x9b, 0x6b, 0xee, 0x9f, 0x5a, 0x78, 0x6d,
+                       0x32, 0x19, 0xf4, 0xf7, 0xf8, 0x4c, 0x69, 0x0b,
+                       0x4b, 0xbc, 0xbb, 0xb7, 0xf2, 0x85, 0xaf, 0x70,
+                       0x75, 0x24, 0x6c, 0x54, 0xa7, 0x0e, 0x4d, 0x1d,
+                       0x01, 0xbf, 0x08, 0xac, 0xcf, 0x7f, 0x2c, 0xe3,
+                       0x14, 0x89, 0x5e, 0x70, 0x5a, 0x99, 0x92, 0xcd,
+                       0x01, 0x84, 0xc8, 0xd2, 0xab, 0xe5, 0x4f, 0x58,
+                       0xe7, 0x0f, 0x2f, 0x0e, 0xff, 0x68, 0xea, 0xfd,
+                       0x15, 0xb3, 0x17, 0xe6, 0xb0, 0xe7, 0x85, 0xd8,
+                       0x23, 0x2e, 0x05, 0xc7, 0xc9, 0xc4, 0x46, 0x1f,
+                       0xe1, 0x9e, 0x49, 0x20, 0x23, 0x24, 0x4d, 0x7e,
+                       0x29, 0x65, 0xff, 0xf4, 0xb6, 0xfd, 0x1a, 0x85,
+                       0xc4, 0x16, 0xec, 0xfc, 0xea, 0x7b, 0xd6, 0x2c,
+                       0x43, 0xf8, 0xb7, 0xbf, 0x79, 0xc0, 0x85, 0xcd,
+                       0xef, 0xe1, 0x98, 0xd3, 0xa5, 0xf7, 0x90, 0x8c,
+                       0xe9, 0x7f, 0x80, 0x6b, 0xd2, 0xac, 0x4c, 0x30,
+                       0xa7, 0xc6, 0x61, 0x6c, 0xd2, 0xf9, 0x2c, 0xff,
+                       0x30, 0xbc, 0x22, 0x81, 0x7d, 0x93, 0x12, 0xe4,
+                       0x0a, 0xcd, 0xaf, 0xdd, 0xe8, 0xab, 0x0a, 0x1e,
+                       0x13, 0xa4, 0x27, 0xc3, 0x5f, 0xf7, 0x4b, 0xbb,
+                       0x37, 0x09, 0x4b, 0x91, 0x6f, 0x92, 0x4f, 0xaf,
+                       0x52, 0xee, 0xdf, 0xef, 0x09, 0x6f, 0xf7, 0x5c,
+                       0x6e, 0x12, 0x17, 0x72, 0x63, 0x57, 0xc7, 0xba,
+                       0x3b, 0x6b, 0x38, 0x32, 0x73, 0x1b, 0x9c, 0x80,
+                       0xc1, 0x7a, 0xc6, 0xcf, 0xcd, 0x35, 0xc0, 0x6b,
+                       0x31, 0x1a, 0x6b, 0xe9, 0xd8, 0x2c, 0x29, 0x3f,
+                       0x96, 0xfb, 0xb6, 0xcd, 0x13, 0x91, 0x3b, 0xc2,
+                       0xd2, 0xa3, 0x31, 0x8d, 0xa4, 0xcd, 0x57, 0xcd,
+                       0x13, 0x3d, 0x64, 0xfd, 0x06, 0xce, 0xe6, 0xdc,
+                       0x0c, 0x24, 0x43, 0x31, 0x40, 0x57, 0xf1, 0x72,
+                       0x17, 0xe3, 0x3a, 0x63, 0x6d, 0x35, 0xcf, 0x5d,
+                       0x97, 0x40, 0x59, 0xdd, 0xf7, 0x3c, 0x02, 0xf7,
+                       0x1c, 0x7e, 0x05, 0xbb, 0xa9, 0x0d, 0x01, 0xb1,
+                       0x8e, 0xc0, 0x30, 0xa9, 0x53, 0x24, 0xc9, 0x89,
+                       0x84, 0x6d, 0xaa, 0xd0, 0xcd, 0x91, 0xc2, 0x4d,
+                       0x91, 0xb0, 0x89, 0xe2, 0xbf, 0x83, 0x44, 0xaa,
+                       0x28, 0x72, 0x23, 0xa0, 0xc2, 0xad, 0xad, 0x1c,
+                       0xfc, 0x3f, 0x09, 0x7a, 0x0b, 0xdc, 0xc5, 0x1b,
+                       0x87, 0x13, 0xc6, 0x5b, 0x59, 0x8d, 0xf2, 0xc8,
+                       0xaf, 0xdf, 0x11, 0x95,
+               },
+               .rlen = 4100,
+       },
+};
+
 /*
  * Compression stuff.
  */
@@ -4407,6 +7720,88 @@ static struct comp_testvec deflate_decomp_tv_template[] = {
        },
 };
 
+/*
+ * LZO test vectors (null-terminated strings).
+ */
+#define LZO_COMP_TEST_VECTORS 2
+#define LZO_DECOMP_TEST_VECTORS 2
+
+static struct comp_testvec lzo_comp_tv_template[] = {
+       {
+               .inlen  = 70,
+               .outlen = 46,
+               .input  = "Join us now and share the software "
+                         "Join us now and share the software ",
+               .output = {  0x00, 0x0d, 0x4a, 0x6f, 0x69, 0x6e, 0x20, 0x75,
+                            0x73, 0x20, 0x6e, 0x6f, 0x77, 0x20, 0x61, 0x6e,
+                            0x64, 0x20, 0x73, 0x68, 0x61, 0x72, 0x65, 0x20,
+                            0x74, 0x68, 0x65, 0x20, 0x73, 0x6f, 0x66, 0x74,
+                            0x77, 0x70, 0x01, 0x01, 0x4a, 0x6f, 0x69, 0x6e,
+                            0x3d, 0x88, 0x00, 0x11, 0x00, 0x00 },
+       }, {
+               .inlen  = 159,
+               .outlen = 133,
+               .input  = "This document describes a compression method based on the LZO "
+                         "compression algorithm.  This document defines the application of "
+                         "the LZO algorithm used in UBIFS.",
+               .output = { 0x00, 0x2b, 0x54, 0x68, 0x69, 0x73, 0x20, 0x64,
+                           0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x20,
+                           0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65,
+                           0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6d, 0x70,
+                           0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20,
+                           0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x20, 0x62,
+                           0x61, 0x73, 0x65, 0x64, 0x20, 0x6f, 0x6e, 0x20,
+                           0x74, 0x68, 0x65, 0x20, 0x4c, 0x5a, 0x4f, 0x2b,
+                           0x8c, 0x00, 0x0d, 0x61, 0x6c, 0x67, 0x6f, 0x72,
+                           0x69, 0x74, 0x68, 0x6d, 0x2e, 0x20, 0x20, 0x54,
+                           0x68, 0x69, 0x73, 0x2a, 0x54, 0x01, 0x02, 0x66,
+                           0x69, 0x6e, 0x65, 0x73, 0x94, 0x06, 0x05, 0x61,
+                           0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x76,
+                           0x0a, 0x6f, 0x66, 0x88, 0x02, 0x60, 0x09, 0x27,
+                           0xf0, 0x00, 0x0c, 0x20, 0x75, 0x73, 0x65, 0x64,
+                           0x20, 0x69, 0x6e, 0x20, 0x55, 0x42, 0x49, 0x46,
+                           0x53, 0x2e, 0x11, 0x00, 0x00 },
+       },
+};
+
+static struct comp_testvec lzo_decomp_tv_template[] = {
+       {
+               .inlen  = 133,
+               .outlen = 159,
+               .input  = { 0x00, 0x2b, 0x54, 0x68, 0x69, 0x73, 0x20, 0x64,
+                           0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x20,
+                           0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65,
+                           0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6d, 0x70,
+                           0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20,
+                           0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x20, 0x62,
+                           0x61, 0x73, 0x65, 0x64, 0x20, 0x6f, 0x6e, 0x20,
+                           0x74, 0x68, 0x65, 0x20, 0x4c, 0x5a, 0x4f, 0x2b,
+                           0x8c, 0x00, 0x0d, 0x61, 0x6c, 0x67, 0x6f, 0x72,
+                           0x69, 0x74, 0x68, 0x6d, 0x2e, 0x20, 0x20, 0x54,
+                           0x68, 0x69, 0x73, 0x2a, 0x54, 0x01, 0x02, 0x66,
+                           0x69, 0x6e, 0x65, 0x73, 0x94, 0x06, 0x05, 0x61,
+                           0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x76,
+                           0x0a, 0x6f, 0x66, 0x88, 0x02, 0x60, 0x09, 0x27,
+                           0xf0, 0x00, 0x0c, 0x20, 0x75, 0x73, 0x65, 0x64,
+                           0x20, 0x69, 0x6e, 0x20, 0x55, 0x42, 0x49, 0x46,
+                           0x53, 0x2e, 0x11, 0x00, 0x00 },
+               .output = "This document describes a compression method based on the LZO "
+                         "compression algorithm.  This document defines the application of "
+                         "the LZO algorithm used in UBIFS.",
+       }, {
+               .inlen  = 46,
+               .outlen = 70,
+               .input  = { 0x00, 0x0d, 0x4a, 0x6f, 0x69, 0x6e, 0x20, 0x75,
+                           0x73, 0x20, 0x6e, 0x6f, 0x77, 0x20, 0x61, 0x6e,
+                           0x64, 0x20, 0x73, 0x68, 0x61, 0x72, 0x65, 0x20,
+                           0x74, 0x68, 0x65, 0x20, 0x73, 0x6f, 0x66, 0x74,
+                           0x77, 0x70, 0x01, 0x01, 0x4a, 0x6f, 0x69, 0x6e,
+                           0x3d, 0x88, 0x00, 0x11, 0x00, 0x00 },
+               .output = "Join us now and share the software "
+                         "Join us now and share the software ",
+       },
+};
+
 /*
  * Michael MIC test vectors from IEEE 802.11i
  */
@@ -4812,4 +8207,20 @@ static struct cipher_speed camellia_speed_template[] = {
       {  .klen = 0, .blen = 0, }
 };
 
+static struct cipher_speed salsa20_speed_template[] = {
+      { .klen = 16, .blen = 16, },
+      { .klen = 16, .blen = 64, },
+      { .klen = 16, .blen = 256, },
+      { .klen = 16, .blen = 1024, },
+      { .klen = 16, .blen = 8192, },
+      { .klen = 32, .blen = 16, },
+      { .klen = 32, .blen = 64, },
+      { .klen = 32, .blen = 256, },
+      { .klen = 32, .blen = 1024, },
+      { .klen = 32, .blen = 8192, },
+
+      /* End marker */
+      {  .klen = 0, .blen = 0, }
+};
+
 #endif /* _CRYPTO_TCRYPT_H */
index b4b9c0c..0af216c 100644 (file)
@@ -655,84 +655,48 @@ int twofish_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int key_len)
                        CALC_SB256_2( i, calc_sb_tbl[j], calc_sb_tbl[k] );
                }
 
-               /* Calculate whitening and round subkeys.  The constants are
-                * indices of subkeys, preprocessed through q0 and q1. */
-               CALC_K256 (w, 0, 0xA9, 0x75, 0x67, 0xF3);
-               CALC_K256 (w, 2, 0xB3, 0xC6, 0xE8, 0xF4);
-               CALC_K256 (w, 4, 0x04, 0xDB, 0xFD, 0x7B);
-               CALC_K256 (w, 6, 0xA3, 0xFB, 0x76, 0xC8);
-               CALC_K256 (k, 0, 0x9A, 0x4A, 0x92, 0xD3);
-               CALC_K256 (k, 2, 0x80, 0xE6, 0x78, 0x6B);
-               CALC_K256 (k, 4, 0xE4, 0x45, 0xDD, 0x7D);
-               CALC_K256 (k, 6, 0xD1, 0xE8, 0x38, 0x4B);
-               CALC_K256 (k, 8, 0x0D, 0xD6, 0xC6, 0x32);
-               CALC_K256 (k, 10, 0x35, 0xD8, 0x98, 0xFD);
-               CALC_K256 (k, 12, 0x18, 0x37, 0xF7, 0x71);
-               CALC_K256 (k, 14, 0xEC, 0xF1, 0x6C, 0xE1);
-               CALC_K256 (k, 16, 0x43, 0x30, 0x75, 0x0F);
-               CALC_K256 (k, 18, 0x37, 0xF8, 0x26, 0x1B);
-               CALC_K256 (k, 20, 0xFA, 0x87, 0x13, 0xFA);
-               CALC_K256 (k, 22, 0x94, 0x06, 0x48, 0x3F);
-               CALC_K256 (k, 24, 0xF2, 0x5E, 0xD0, 0xBA);
-               CALC_K256 (k, 26, 0x8B, 0xAE, 0x30, 0x5B);
-               CALC_K256 (k, 28, 0x84, 0x8A, 0x54, 0x00);
-               CALC_K256 (k, 30, 0xDF, 0xBC, 0x23, 0x9D);
+               /* CALC_K256/CALC_K192/CALC_K loops were unrolled.
+                * Unrolling produced x2.5 more code (+18k on i386),
+                * and speeded up key setup by 7%:
+                * unrolled: twofish_setkey/sec: 41128
+                *     loop: twofish_setkey/sec: 38148
+                * CALC_K256: ~100 insns each
+                * CALC_K192: ~90 insns
+                *    CALC_K: ~70 insns
+                */
+               /* Calculate whitening and round subkeys */
+               for ( i = 0; i < 8; i += 2 ) {
+                       CALC_K256 (w, i, q0[i], q1[i], q0[i+1], q1[i+1]);
+               }
+               for ( i = 0; i < 32; i += 2 ) {
+                       CALC_K256 (k, i, q0[i+8], q1[i+8], q0[i+9], q1[i+9]);
+               }
        } else if (key_len == 24) { /* 192-bit key */
                /* Compute the S-boxes. */
                for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) {
                        CALC_SB192_2( i, calc_sb_tbl[j], calc_sb_tbl[k] );
                }
 
-               /* Calculate whitening and round subkeys.  The constants are
-                * indices of subkeys, preprocessed through q0 and q1. */
-               CALC_K192 (w, 0, 0xA9, 0x75, 0x67, 0xF3);
-               CALC_K192 (w, 2, 0xB3, 0xC6, 0xE8, 0xF4);
-               CALC_K192 (w, 4, 0x04, 0xDB, 0xFD, 0x7B);
-               CALC_K192 (w, 6, 0xA3, 0xFB, 0x76, 0xC8);
-               CALC_K192 (k, 0, 0x9A, 0x4A, 0x92, 0xD3);
-               CALC_K192 (k, 2, 0x80, 0xE6, 0x78, 0x6B);
-               CALC_K192 (k, 4, 0xE4, 0x45, 0xDD, 0x7D);
-               CALC_K192 (k, 6, 0xD1, 0xE8, 0x38, 0x4B);
-               CALC_K192 (k, 8, 0x0D, 0xD6, 0xC6, 0x32);
-               CALC_K192 (k, 10, 0x35, 0xD8, 0x98, 0xFD);
-               CALC_K192 (k, 12, 0x18, 0x37, 0xF7, 0x71);
-               CALC_K192 (k, 14, 0xEC, 0xF1, 0x6C, 0xE1);
-               CALC_K192 (k, 16, 0x43, 0x30, 0x75, 0x0F);
-               CALC_K192 (k, 18, 0x37, 0xF8, 0x26, 0x1B);
-               CALC_K192 (k, 20, 0xFA, 0x87, 0x13, 0xFA);
-               CALC_K192 (k, 22, 0x94, 0x06, 0x48, 0x3F);
-               CALC_K192 (k, 24, 0xF2, 0x5E, 0xD0, 0xBA);
-               CALC_K192 (k, 26, 0x8B, 0xAE, 0x30, 0x5B);
-               CALC_K192 (k, 28, 0x84, 0x8A, 0x54, 0x00);
-               CALC_K192 (k, 30, 0xDF, 0xBC, 0x23, 0x9D);
+               /* Calculate whitening and round subkeys */
+               for ( i = 0; i < 8; i += 2 ) {
+                       CALC_K192 (w, i, q0[i], q1[i], q0[i+1], q1[i+1]);
+               }
+               for ( i = 0; i < 32; i += 2 ) {
+                       CALC_K192 (k, i, q0[i+8], q1[i+8], q0[i+9], q1[i+9]);
+               }
        } else { /* 128-bit key */
                /* Compute the S-boxes. */
                for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) {
                        CALC_SB_2( i, calc_sb_tbl[j], calc_sb_tbl[k] );
                }
 
-               /* Calculate whitening and round subkeys.  The constants are
-                * indices of subkeys, preprocessed through q0 and q1. */
-               CALC_K (w, 0, 0xA9, 0x75, 0x67, 0xF3);
-               CALC_K (w, 2, 0xB3, 0xC6, 0xE8, 0xF4);
-               CALC_K (w, 4, 0x04, 0xDB, 0xFD, 0x7B);
-               CALC_K (w, 6, 0xA3, 0xFB, 0x76, 0xC8);
-               CALC_K (k, 0, 0x9A, 0x4A, 0x92, 0xD3);
-               CALC_K (k, 2, 0x80, 0xE6, 0x78, 0x6B);
-               CALC_K (k, 4, 0xE4, 0x45, 0xDD, 0x7D);
-               CALC_K (k, 6, 0xD1, 0xE8, 0x38, 0x4B);
-               CALC_K (k, 8, 0x0D, 0xD6, 0xC6, 0x32);
-               CALC_K (k, 10, 0x35, 0xD8, 0x98, 0xFD);
-               CALC_K (k, 12, 0x18, 0x37, 0xF7, 0x71);
-               CALC_K (k, 14, 0xEC, 0xF1, 0x6C, 0xE1);
-               CALC_K (k, 16, 0x43, 0x30, 0x75, 0x0F);
-               CALC_K (k, 18, 0x37, 0xF8, 0x26, 0x1B);
-               CALC_K (k, 20, 0xFA, 0x87, 0x13, 0xFA);
-               CALC_K (k, 22, 0x94, 0x06, 0x48, 0x3F);
-               CALC_K (k, 24, 0xF2, 0x5E, 0xD0, 0xBA);
-               CALC_K (k, 26, 0x8B, 0xAE, 0x30, 0x5B);
-               CALC_K (k, 28, 0x84, 0x8A, 0x54, 0x00);
-               CALC_K (k, 30, 0xDF, 0xBC, 0x23, 0x9D);
+               /* Calculate whitening and round subkeys */
+               for ( i = 0; i < 8; i += 2 ) {
+                       CALC_K (w, i, q0[i], q1[i], q0[i+1], q1[i+1]);
+               }
+               for ( i = 0; i < 32; i += 2 ) {
+                       CALC_K (k, i, q0[i+8], q1[i+8], q0[i+9], q1[i+9]);
+               }
        }
 
        return 0;
index ac68f3b..a82959d 100644 (file)
@@ -19,6 +19,7 @@
  *     Kazunori Miyazawa <miyazawa@linux-ipv6.org>
  */
 
+#include <crypto/scatterwalk.h>
 #include <linux/crypto.h>
 #include <linux/err.h>
 #include <linux/hardirq.h>
@@ -27,7 +28,6 @@
 #include <linux/rtnetlink.h>
 #include <linux/slab.h>
 #include <linux/scatterlist.h>
-#include "internal.h"
 
 static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101,
                           0x02020202, 0x02020202, 0x02020202, 0x02020202,
@@ -307,7 +307,8 @@ static struct crypto_instance *xcbc_alloc(struct rtattr **tb)
        case 16:
                break;
        default:
-               return ERR_PTR(PTR_ERR(alg));
+               inst = ERR_PTR(-EINVAL);
+               goto out_put_alg;
        }
 
        inst = crypto_alloc_instance("xcbc", alg);
@@ -320,10 +321,7 @@ static struct crypto_instance *xcbc_alloc(struct rtattr **tb)
        inst->alg.cra_alignmask = alg->cra_alignmask;
        inst->alg.cra_type = &crypto_hash_type;
 
-       inst->alg.cra_hash.digestsize =
-               (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
-               CRYPTO_ALG_TYPE_HASH ? alg->cra_hash.digestsize :
-                                      alg->cra_blocksize;
+       inst->alg.cra_hash.digestsize = alg->cra_blocksize;
        inst->alg.cra_ctxsize = sizeof(struct crypto_xcbc_ctx) +
                                ALIGN(inst->alg.cra_blocksize * 3, sizeof(void *));
        inst->alg.cra_init = xcbc_init_tfm;
index 556fd81..c422e87 100644 (file)
@@ -28,6 +28,7 @@
 #include <linux/kernel.h>
 #include <linux/pci.h>
 #include <linux/hw_random.h>
+#include <linux/delay.h>
 #include <asm/io.h>
 
 
@@ -52,11 +53,18 @@ MODULE_DEVICE_TABLE(pci, pci_tbl);
 static struct pci_dev *amd_pdev;
 
 
-static int amd_rng_data_present(struct hwrng *rng)
+static int amd_rng_data_present(struct hwrng *rng, int wait)
 {
        u32 pmbase = (u32)rng->priv;
+       int data, i;
 
-       return !!(inl(pmbase + 0xF4) & 1);
+       for (i = 0; i < 20; i++) {
+               data = !!(inl(pmbase + 0xF4) & 1);
+               if (data || !wait)
+                       break;
+               udelay(10);
+       }
+       return data;
 }
 
 static int amd_rng_data_read(struct hwrng *rng, u32 *data)
index 26a860a..0118b98 100644 (file)
@@ -66,11 +66,11 @@ static inline void hwrng_cleanup(struct hwrng *rng)
                rng->cleanup(rng);
 }
 
-static inline int hwrng_data_present(struct hwrng *rng)
+static inline int hwrng_data_present(struct hwrng *rng, int wait)
 {
        if (!rng->data_present)
                return 1;
-       return rng->data_present(rng);
+       return rng->data_present(rng, wait);
 }
 
 static inline int hwrng_data_read(struct hwrng *rng, u32 *data)
@@ -94,8 +94,7 @@ static ssize_t rng_dev_read(struct file *filp, char __user *buf,
 {
        u32 data;
        ssize_t ret = 0;
-       int i, err = 0;
-       int data_present;
+       int err = 0;
        int bytes_read;
 
        while (size) {
@@ -107,21 +106,10 @@ static ssize_t rng_dev_read(struct file *filp, char __user *buf,
                        err = -ENODEV;
                        goto out;
                }
-               if (filp->f_flags & O_NONBLOCK) {
-                       data_present = hwrng_data_present(current_rng);
-               } else {
-                       /* Some RNG require some time between data_reads to gather
-                        * new entropy. Poll it.
-                        */
-                       for (i = 0; i < 20; i++) {
-                               data_present = hwrng_data_present(current_rng);
-                               if (data_present)
-                                       break;
-                               udelay(10);
-                       }
-               }
+
                bytes_read = 0;
-               if (data_present)
+               if (hwrng_data_present(current_rng,
+                                      !(filp->f_flags & O_NONBLOCK)))
                        bytes_read = hwrng_data_read(current_rng, &data);
                mutex_unlock(&rng_mutex);
 
index 8e8658d..fed4ef5 100644 (file)
@@ -28,6 +28,7 @@
 #include <linux/kernel.h>
 #include <linux/pci.h>
 #include <linux/hw_random.h>
+#include <linux/delay.h>
 #include <asm/io.h>
 
 
@@ -61,11 +62,18 @@ static int geode_rng_data_read(struct hwrng *rng, u32 *data)
        return 4;
 }
 
-static int geode_rng_data_present(struct hwrng *rng)
+static int geode_rng_data_present(struct hwrng *rng, int wait)
 {
        void __iomem *mem = (void __iomem *)rng->priv;
+       int data, i;
 
-       return !!(readl(mem + GEODE_RNG_STATUS_REG));
+       for (i = 0; i < 20; i++) {
+               data = !!(readl(mem + GEODE_RNG_STATUS_REG));
+               if (data || !wait)
+                       break;
+               udelay(10);
+       }
+       return data;
 }
 
 
index 753f460..5cc651e 100644 (file)
@@ -29,6 +29,7 @@
 #include <linux/module.h>
 #include <linux/pci.h>
 #include <linux/stop_machine.h>
+#include <linux/delay.h>
 #include <asm/io.h>
 
 
@@ -162,11 +163,19 @@ static inline u8 hwstatus_set(void __iomem *mem,
        return hwstatus_get(mem);
 }
 
-static int intel_rng_data_present(struct hwrng *rng)
+static int intel_rng_data_present(struct hwrng *rng, int wait)
 {
        void __iomem *mem = (void __iomem *)rng->priv;
-
-       return !!(readb(mem + INTEL_RNG_STATUS) & INTEL_RNG_DATA_PRESENT);
+       int data, i;
+
+       for (i = 0; i < 20; i++) {
+               data = !!(readb(mem + INTEL_RNG_STATUS) &
+                         INTEL_RNG_DATA_PRESENT);
+               if (data || !wait)
+                       break;
+               udelay(10);
+       }
+       return data;
 }
 
 static int intel_rng_data_read(struct hwrng *rng, u32 *data)
index 3f35a1c..7e31995 100644 (file)
@@ -29,6 +29,7 @@
 #include <linux/err.h>
 #include <linux/platform_device.h>
 #include <linux/hw_random.h>
+#include <linux/delay.h>
 
 #include <asm/io.h>
 
@@ -65,9 +66,17 @@ static void omap_rng_write_reg(int reg, u32 val)
 }
 
 /* REVISIT: Does the status bit really work on 16xx? */
-static int omap_rng_data_present(struct hwrng *rng)
+static int omap_rng_data_present(struct hwrng *rng, int wait)
 {
-       return omap_rng_read_reg(RNG_STAT_REG) ? 0 : 1;
+       int data, i;
+
+       for (i = 0; i < 20; i++) {
+               data = omap_rng_read_reg(RNG_STAT_REG) ? 0 : 1;
+               if (data || !wait)
+                       break;
+               udelay(10);
+       }
+       return data;
 }
 
 static int omap_rng_data_read(struct hwrng *rng, u32 *data)
index fa6040b..e2ea210 100644 (file)
@@ -23,6 +23,7 @@
 #include <linux/kernel.h>
 #include <linux/platform_device.h>
 #include <linux/hw_random.h>
+#include <linux/delay.h>
 #include <asm/of_platform.h>
 #include <asm/io.h>
 
 
 #define MODULE_NAME "pasemi_rng"
 
-static int pasemi_rng_data_present(struct hwrng *rng)
+static int pasemi_rng_data_present(struct hwrng *rng, int wait)
 {
        void __iomem *rng_regs = (void __iomem *)rng->priv;
-
-       return (in_le32(rng_regs + SDCRNG_CTL_REG)
-               & SDCRNG_CTL_FVLD_M) ? 1 : 0;
+       int data, i;
+
+       for (i = 0; i < 20; i++) {
+               data = (in_le32(rng_regs + SDCRNG_CTL_REG)
+                       & SDCRNG_CTL_FVLD_M) ? 1 : 0;
+               if (data || !wait)
+                       break;
+               udelay(10);
+       }
+       return data;
 }
 
 static int pasemi_rng_data_read(struct hwrng *rng, u32 *data)
index ec435cb..868e39f 100644 (file)
@@ -27,6 +27,7 @@
 #include <linux/module.h>
 #include <linux/kernel.h>
 #include <linux/hw_random.h>
+#include <linux/delay.h>
 #include <asm/io.h>
 #include <asm/msr.h>
 #include <asm/cpufeature.h>
@@ -77,10 +78,11 @@ static inline u32 xstore(u32 *addr, u32 edx_in)
        return eax_out;
 }
 
-static int via_rng_data_present(struct hwrng *rng)
+static int via_rng_data_present(struct hwrng *rng, int wait)
 {
        u32 bytes_out;
        u32 *via_rng_datum = (u32 *)(&rng->priv);
+       int i;
 
        /* We choose the recommended 1-byte-per-instruction RNG rate,
         * for greater randomness at the expense of speed.  Larger
@@ -95,12 +97,15 @@ static int via_rng_data_present(struct hwrng *rng)
         * completes.
         */
 
-       *via_rng_datum = 0; /* paranoia, not really necessary */
-       bytes_out = xstore(via_rng_datum, VIA_RNG_CHUNK_1);
-       bytes_out &= VIA_XSTORE_CNT_MASK;
-       if (bytes_out == 0)
-               return 0;
-       return 1;
+       for (i = 0; i < 20; i++) {
+               *via_rng_datum = 0; /* paranoia, not really necessary */
+               bytes_out = xstore(via_rng_datum, VIA_RNG_CHUNK_1);
+               bytes_out &= VIA_XSTORE_CNT_MASK;
+               if (bytes_out || !wait)
+                       break;
+               udelay(10);
+       }
+       return bytes_out ? 1 : 0;
 }
 
 static int via_rng_data_read(struct hwrng *rng, u32 *data)
index ddd3a25..74bd599 100644 (file)
@@ -83,4 +83,15 @@ config ZCRYPT_MONOLITHIC
          that contains all parts of the crypto device driver (ap bus,
          request router and all the card drivers).
 
+config CRYPTO_DEV_HIFN_795X
+       tristate "Driver HIFN 795x crypto accelerator chips"
+       select CRYPTO_DES
+       select CRYPTO_ALGAPI
+       select CRYPTO_BLKCIPHER
+       depends on PCI
+       help
+         This option allows you to have support for HIFN 795x crypto adapters.
+
+
+
 endif # CRYPTO_HW
index d070030..c0327f0 100644 (file)
@@ -1,3 +1,4 @@
 obj-$(CONFIG_CRYPTO_DEV_PADLOCK_AES) += padlock-aes.o
 obj-$(CONFIG_CRYPTO_DEV_PADLOCK_SHA) += padlock-sha.o
 obj-$(CONFIG_CRYPTO_DEV_GEODE) += geode-aes.o
+obj-$(CONFIG_CRYPTO_DEV_HIFN_795X) += hifn_795x.o
index 711e246..4801162 100644 (file)
 #include <linux/crypto.h>
 #include <linux/spinlock.h>
 #include <crypto/algapi.h>
+#include <crypto/aes.h>
 
 #include <asm/io.h>
 #include <asm/delay.h>
 
 #include "geode-aes.h"
 
-/* Register definitions */
-
-#define AES_CTRLA_REG  0x0000
-
-#define AES_CTRL_START     0x01
-#define AES_CTRL_DECRYPT   0x00
-#define AES_CTRL_ENCRYPT   0x02
-#define AES_CTRL_WRKEY     0x04
-#define AES_CTRL_DCA       0x08
-#define AES_CTRL_SCA       0x10
-#define AES_CTRL_CBC       0x20
-
-#define AES_INTR_REG  0x0008
-
-#define AES_INTRA_PENDING (1 << 16)
-#define AES_INTRB_PENDING (1 << 17)
-
-#define AES_INTR_PENDING  (AES_INTRA_PENDING | AES_INTRB_PENDING)
-#define AES_INTR_MASK     0x07
-
-#define AES_SOURCEA_REG   0x0010
-#define AES_DSTA_REG      0x0014
-#define AES_LENA_REG      0x0018
-#define AES_WRITEKEY0_REG 0x0030
-#define AES_WRITEIV0_REG  0x0040
-
-/*  A very large counter that is used to gracefully bail out of an
- *  operation in case of trouble
- */
-
-#define AES_OP_TIMEOUT    0x50000
-
 /* Static structures */
 
 static void __iomem * _iobase;
@@ -87,9 +56,10 @@ do_crypt(void *src, void *dst, int len, u32 flags)
        /* Start the operation */
        iowrite32(AES_CTRL_START | flags, _iobase + AES_CTRLA_REG);
 
-       do
+       do {
                status = ioread32(_iobase + AES_INTR_REG);
-       while(!(status & AES_INTRA_PENDING) && --counter);
+               cpu_relax();
+       } while(!(status & AES_INTRA_PENDING) && --counter);
 
        /* Clear the event */
        iowrite32((status & 0xFF) | AES_INTRA_PENDING, _iobase + AES_INTR_REG);
@@ -101,6 +71,7 @@ geode_aes_crypt(struct geode_aes_op *op)
 {
        u32 flags = 0;
        unsigned long iflags;
+       int ret;
 
        if (op->len == 0)
                return 0;
@@ -129,7 +100,8 @@ geode_aes_crypt(struct geode_aes_op *op)
                _writefield(AES_WRITEKEY0_REG, op->key);
        }
 
-       do_crypt(op->src, op->dst, op->len, flags);
+       ret = do_crypt(op->src, op->dst, op->len, flags);
+       BUG_ON(ret);
 
        if (op->mode == AES_MODE_CBC)
                _readfield(AES_WRITEIV0_REG, op->iv);
@@ -141,18 +113,103 @@ geode_aes_crypt(struct geode_aes_op *op)
 
 /* CRYPTO-API Functions */
 
-static int
-geode_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int len)
+static int geode_setkey_cip(struct crypto_tfm *tfm, const u8 *key,
+               unsigned int len)
 {
        struct geode_aes_op *op = crypto_tfm_ctx(tfm);
+       unsigned int ret;
 
-       if (len != AES_KEY_LENGTH) {
+       op->keylen = len;
+
+       if (len == AES_KEYSIZE_128) {
+               memcpy(op->key, key, len);
+               return 0;
+       }
+
+       if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256) {
+               /* not supported at all */
                tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
                return -EINVAL;
        }
 
-       memcpy(op->key, key, len);
-       return 0;
+       /*
+        * The requested key size is not supported by HW, do a fallback
+        */
+       op->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
+       op->fallback.blk->base.crt_flags |= (tfm->crt_flags & CRYPTO_TFM_REQ_MASK);
+
+       ret = crypto_cipher_setkey(op->fallback.cip, key, len);
+       if (ret) {
+               tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
+               tfm->crt_flags |= (op->fallback.blk->base.crt_flags & CRYPTO_TFM_RES_MASK);
+       }
+       return ret;
+}
+
+static int geode_setkey_blk(struct crypto_tfm *tfm, const u8 *key,
+               unsigned int len)
+{
+       struct geode_aes_op *op = crypto_tfm_ctx(tfm);
+       unsigned int ret;
+
+       op->keylen = len;
+
+       if (len == AES_KEYSIZE_128) {
+               memcpy(op->key, key, len);
+               return 0;
+       }
+
+       if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256) {
+               /* not supported at all */
+               tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
+               return -EINVAL;
+       }
+
+       /*
+        * The requested key size is not supported by HW, do a fallback
+        */
+       op->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
+       op->fallback.blk->base.crt_flags |= (tfm->crt_flags & CRYPTO_TFM_REQ_MASK);
+
+       ret = crypto_blkcipher_setkey(op->fallback.blk, key, len);
+       if (ret) {
+               tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
+               tfm->crt_flags |= (op->fallback.blk->base.crt_flags & CRYPTO_TFM_RES_MASK);
+       }
+       return ret;
+}
+
+static int fallback_blk_dec(struct blkcipher_desc *desc,
+               struct scatterlist *dst, struct scatterlist *src,
+               unsigned int nbytes)
+{
+       unsigned int ret;
+       struct crypto_blkcipher *tfm;
+       struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
+
+       tfm = desc->tfm;
+       desc->tfm = op->fallback.blk;
+
+       ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
+
+       desc->tfm = tfm;
+       return ret;
+}
+static int fallback_blk_enc(struct blkcipher_desc *desc,
+               struct scatterlist *dst, struct scatterlist *src,
+               unsigned int nbytes)
+{
+       unsigned int ret;
+       struct crypto_blkcipher *tfm;
+       struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
+
+       tfm = desc->tfm;
+       desc->tfm = op->fallback.blk;
+
+       ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
+
+       desc->tfm = tfm;
+       return ret;
 }
 
 static void
@@ -160,8 +217,10 @@ geode_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {
        struct geode_aes_op *op = crypto_tfm_ctx(tfm);
 
-       if ((out == NULL) || (in == NULL))
+       if (unlikely(op->keylen != AES_KEYSIZE_128)) {
+               crypto_cipher_encrypt_one(op->fallback.cip, out, in);
                return;
+       }
 
        op->src = (void *) in;
        op->dst = (void *) out;
@@ -179,8 +238,10 @@ geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {
        struct geode_aes_op *op = crypto_tfm_ctx(tfm);
 
-       if ((out == NULL) || (in == NULL))
+       if (unlikely(op->keylen != AES_KEYSIZE_128)) {
+               crypto_cipher_decrypt_one(op->fallback.cip, out, in);
                return;
+       }
 
        op->src = (void *) in;
        op->dst = (void *) out;
@@ -192,24 +253,50 @@ geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
        geode_aes_crypt(op);
 }
 
+static int fallback_init_cip(struct crypto_tfm *tfm)
+{
+       const char *name = tfm->__crt_alg->cra_name;
+       struct geode_aes_op *op = crypto_tfm_ctx(tfm);
+
+       op->fallback.cip = crypto_alloc_cipher(name, 0,
+                               CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
+
+       if (IS_ERR(op->fallback.cip)) {
+               printk(KERN_ERR "Error allocating fallback algo %s\n", name);
+               return PTR_ERR(op->fallback.blk);
+       }
+
+       return 0;
+}
+
+static void fallback_exit_cip(struct crypto_tfm *tfm)
+{
+       struct geode_aes_op *op = crypto_tfm_ctx(tfm);
+
+       crypto_free_cipher(op->fallback.cip);
+       op->fallback.cip = NULL;
+}
 
 static struct crypto_alg geode_alg = {
-       .cra_name               =       "aes",
-       .cra_driver_name        =       "geode-aes-128",
-       .cra_priority           =       300,
-       .cra_alignmask          =       15,
-       .cra_flags              =       CRYPTO_ALG_TYPE_CIPHER,
+       .cra_name                       =       "aes",
+       .cra_driver_name        =       "geode-aes",
+       .cra_priority           =       300,
+       .cra_alignmask          =       15,
+       .cra_flags                      =       CRYPTO_ALG_TYPE_CIPHER |
+                                                       CRYPTO_ALG_NEED_FALLBACK,
+       .cra_init                       =       fallback_init_cip,
+       .cra_exit                       =       fallback_exit_cip,
        .cra_blocksize          =       AES_MIN_BLOCK_SIZE,
        .cra_ctxsize            =       sizeof(struct geode_aes_op),
-       .cra_module             =       THIS_MODULE,
-       .cra_list               =       LIST_HEAD_INIT(geode_alg.cra_list),
-       .cra_u                  =       {
-               .cipher = {
-                       .cia_min_keysize        =  AES_KEY_LENGTH,
-                       .cia_max_keysize        =  AES_KEY_LENGTH,
-                       .cia_setkey             =  geode_setkey,
-                       .cia_encrypt            =  geode_encrypt,
-                       .cia_decrypt            =  geode_decrypt
+       .cra_module                     =       THIS_MODULE,
+       .cra_list                       =       LIST_HEAD_INIT(geode_alg.cra_list),
+       .cra_u                          =       {
+               .cipher =       {
+                       .cia_min_keysize        =       AES_MIN_KEY_SIZE,
+                       .cia_max_keysize        =       AES_MAX_KEY_SIZE,
+                       .cia_setkey                     =       geode_setkey_cip,
+                       .cia_encrypt            =       geode_encrypt,
+                       .cia_decrypt            =       geode_decrypt
                }
        }
 };
@@ -223,8 +310,12 @@ geode_cbc_decrypt(struct blkcipher_desc *desc,
        struct blkcipher_walk walk;
        int err, ret;
 
+       if (unlikely(op->keylen != AES_KEYSIZE_128))
+               return fallback_blk_dec(desc, dst, src, nbytes);
+
        blkcipher_walk_init(&walk, dst, src, nbytes);
        err = blkcipher_walk_virt(desc, &walk);
+       op->iv = walk.iv;
 
        while((nbytes = walk.nbytes)) {
                op->src = walk.src.virt.addr,
@@ -233,13 +324,9 @@ geode_cbc_decrypt(struct blkcipher_desc *desc,
                op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE);
                op->dir = AES_DIR_DECRYPT;
 
-               memcpy(op->iv, walk.iv, AES_IV_LENGTH);
-
                ret = geode_aes_crypt(op);
 
-               memcpy(walk.iv, op->iv, AES_IV_LENGTH);
                nbytes -= ret;
-
                err = blkcipher_walk_done(desc, &walk, nbytes);
        }
 
@@ -255,8 +342,12 @@ geode_cbc_encrypt(struct blkcipher_desc *desc,
        struct blkcipher_walk walk;
        int err, ret;
 
+       if (unlikely(op->keylen != AES_KEYSIZE_128))
+               return fallback_blk_enc(desc, dst, src, nbytes);
+
        blkcipher_walk_init(&walk, dst, src, nbytes);
        err = blkcipher_walk_virt(desc, &walk);
+       op->iv = walk.iv;
 
        while((nbytes = walk.nbytes)) {
                op->src = walk.src.virt.addr,
@@ -265,8 +356,6 @@ geode_cbc_encrypt(struct blkcipher_desc *desc,
                op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE);
                op->dir = AES_DIR_ENCRYPT;
 
-               memcpy(op->iv, walk.iv, AES_IV_LENGTH);
-
                ret = geode_aes_crypt(op);
                nbytes -= ret;
                err = blkcipher_walk_done(desc, &walk, nbytes);
@@ -275,22 +364,49 @@ geode_cbc_encrypt(struct blkcipher_desc *desc,
        return err;
 }
 
+static int fallback_init_blk(struct crypto_tfm *tfm)
+{
+       const char *name = tfm->__crt_alg->cra_name;
+       struct geode_aes_op *op = crypto_tfm_ctx(tfm);
+
+       op->fallback.blk = crypto_alloc_blkcipher(name, 0,
+                       CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
+
+       if (IS_ERR(op->fallback.blk)) {
+               printk(KERN_ERR "Error allocating fallback algo %s\n", name);
+               return PTR_ERR(op->fallback.blk);
+       }
+
+       return 0;
+}
+
+static void fallback_exit_blk(struct crypto_tfm *tfm)
+{
+       struct geode_aes_op *op = crypto_tfm_ctx(tfm);
+
+       crypto_free_blkcipher(op->fallback.blk);
+       op->fallback.blk = NULL;
+}
+
 static struct crypto_alg geode_cbc_alg = {
        .cra_name               =       "cbc(aes)",
-       .cra_driver_name        =       "cbc-aes-geode-128",
+       .cra_driver_name        =       "cbc-aes-geode",
        .cra_priority           =       400,
-       .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER,
+       .cra_flags                      =       CRYPTO_ALG_TYPE_BLKCIPHER |
+                                                       CRYPTO_ALG_NEED_FALLBACK,
+       .cra_init                       =       fallback_init_blk,
+       .cra_exit                       =       fallback_exit_blk,
        .cra_blocksize          =       AES_MIN_BLOCK_SIZE,
        .cra_ctxsize            =       sizeof(struct geode_aes_op),
        .cra_alignmask          =       15,
-       .cra_type               =       &crypto_blkcipher_type,
-       .cra_module             =       THIS_MODULE,
-       .cra_list               =       LIST_HEAD_INIT(geode_cbc_alg.cra_list),
-       .cra_u                  =       {
-               .blkcipher = {
-                       .min_keysize            =       AES_KEY_LENGTH,
-                       .max_keysize            =       AES_KEY_LENGTH,
-                       .setkey                 =       geode_setkey,
+       .cra_type                       =       &crypto_blkcipher_type,
+       .cra_module                     =       THIS_MODULE,
+       .cra_list                       =       LIST_HEAD_INIT(geode_cbc_alg.cra_list),
+       .cra_u                          =       {
+               .blkcipher      =       {
+                       .min_keysize    =       AES_MIN_KEY_SIZE,
+                       .max_keysize    =       AES_MAX_KEY_SIZE,
+                       .setkey                 =       geode_setkey_blk,
                        .encrypt                =       geode_cbc_encrypt,
                        .decrypt                =       geode_cbc_decrypt,
                        .ivsize                 =       AES_IV_LENGTH,
@@ -307,6 +423,9 @@ geode_ecb_decrypt(struct blkcipher_desc *desc,
        struct blkcipher_walk walk;
        int err, ret;
 
+       if (unlikely(op->keylen != AES_KEYSIZE_128))
+               return fallback_blk_dec(desc, dst, src, nbytes);
+
        blkcipher_walk_init(&walk, dst, src, nbytes);
        err = blkcipher_walk_virt(desc, &walk);
 
@@ -334,6 +453,9 @@ geode_ecb_encrypt(struct blkcipher_desc *desc,
        struct blkcipher_walk walk;
        int err, ret;
 
+       if (unlikely(op->keylen != AES_KEYSIZE_128))
+               return fallback_blk_enc(desc, dst, src, nbytes);
+
        blkcipher_walk_init(&walk, dst, src, nbytes);
        err = blkcipher_walk_virt(desc, &walk);
 
@@ -353,28 +475,31 @@ geode_ecb_encrypt(struct blkcipher_desc *desc,
 }
 
 static struct crypto_alg geode_ecb_alg = {
-       .cra_name               =       "ecb(aes)",
-       .cra_driver_name        =       "ecb-aes-geode-128",
+       .cra_name                       =       "ecb(aes)",
+       .cra_driver_name        =       "ecb-aes-geode",
        .cra_priority           =       400,
-       .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER,
+       .cra_flags                      =       CRYPTO_ALG_TYPE_BLKCIPHER |
+                                                       CRYPTO_ALG_NEED_FALLBACK,
+       .cra_init                       =       fallback_init_blk,
+       .cra_exit                       =       fallback_exit_blk,
        .cra_blocksize          =       AES_MIN_BLOCK_SIZE,
        .cra_ctxsize            =       sizeof(struct geode_aes_op),
        .cra_alignmask          =       15,
-       .cra_type               =       &crypto_blkcipher_type,
-       .cra_module             =       THIS_MODULE,
-       .cra_list               =       LIST_HEAD_INIT(geode_ecb_alg.cra_list),
-       .cra_u                  =       {
-               .blkcipher = {
-                       .min_keysize            =       AES_KEY_LENGTH,
-                       .max_keysize            =       AES_KEY_LENGTH,
-                       .setkey                 =       geode_setkey,
+       .cra_type                       =       &crypto_blkcipher_type,
+       .cra_module                     =       THIS_MODULE,
+       .cra_list                       =       LIST_HEAD_INIT(geode_ecb_alg.cra_list),
+       .cra_u                          =       {
+               .blkcipher      =       {
+                       .min_keysize    =       AES_MIN_KEY_SIZE,
+                       .max_keysize    =       AES_MAX_KEY_SIZE,
+                       .setkey                 =       geode_setkey_blk,
                        .encrypt                =       geode_ecb_encrypt,
                        .decrypt                =       geode_ecb_decrypt,
                }
        }
 };
 
-static void
+static void __devexit
 geode_aes_remove(struct pci_dev *dev)
 {
        crypto_unregister_alg(&geode_alg);
@@ -389,7 +514,7 @@ geode_aes_remove(struct pci_dev *dev)
 }
 
 
-static int
+static int __devinit
 geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id)
 {
        int ret;
@@ -397,7 +522,7 @@ geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id)
        if ((ret = pci_enable_device(dev)))
                return ret;
 
-       if ((ret = pci_request_regions(dev, "geode-aes-128")))
+       if ((ret = pci_request_regions(dev, "geode-aes")))
                goto eenable;
 
        _iobase = pci_iomap(dev, 0, 0);
@@ -472,7 +597,6 @@ geode_aes_exit(void)
 MODULE_AUTHOR("Advanced Micro Devices, Inc.");
 MODULE_DESCRIPTION("Geode LX Hardware AES driver");
 MODULE_LICENSE("GPL");
-MODULE_ALIAS("aes");
 
 module_init(geode_aes_init);
 module_exit(geode_aes_exit);
index f479686..f1855b5 100644 (file)
@@ -9,9 +9,9 @@
 #ifndef _GEODE_AES_H_
 #define _GEODE_AES_H_
 
-#define AES_KEY_LENGTH 16
+/* driver logic flags */
 #define AES_IV_LENGTH  16
-
+#define AES_KEY_LENGTH 16
 #define AES_MIN_BLOCK_SIZE 16
 
 #define AES_MODE_ECB 0
 
 #define AES_FLAGS_HIDDENKEY (1 << 0)
 
+/* Register definitions */
+
+#define AES_CTRLA_REG  0x0000
+
+#define AES_CTRL_START     0x01
+#define AES_CTRL_DECRYPT   0x00
+#define AES_CTRL_ENCRYPT   0x02
+#define AES_CTRL_WRKEY     0x04
+#define AES_CTRL_DCA       0x08
+#define AES_CTRL_SCA       0x10
+#define AES_CTRL_CBC       0x20
+
+#define AES_INTR_REG  0x0008
+
+#define AES_INTRA_PENDING (1 << 16)
+#define AES_INTRB_PENDING (1 << 17)
+
+#define AES_INTR_PENDING  (AES_INTRA_PENDING | AES_INTRB_PENDING)
+#define AES_INTR_MASK     0x07
+
+#define AES_SOURCEA_REG   0x0010
+#define AES_DSTA_REG      0x0014
+#define AES_LENA_REG      0x0018
+#define AES_WRITEKEY0_REG 0x0030
+#define AES_WRITEIV0_REG  0x0040
+
+/*  A very large counter that is used to gracefully bail out of an
+ *  operation in case of trouble
+ */
+
+#define AES_OP_TIMEOUT    0x50000
+
 struct geode_aes_op {
 
        void *src;
@@ -33,7 +65,13 @@ struct geode_aes_op {
        int len;
 
        u8 key[AES_KEY_LENGTH];
-       u8 iv[AES_IV_LENGTH];
+       u8 *iv;
+
+       union {
+               struct crypto_blkcipher *blk;
+               struct crypto_cipher *cip;
+       } fallback;
+       u32 keylen;
 };
 
 #endif
diff --git a/drivers/crypto/hifn_795x.c b/drivers/crypto/hifn_795x.c
new file mode 100644 (file)
index 0000000..16413e5
--- /dev/null
@@ -0,0 +1,2838 @@
+/*
+ * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
+ * All rights reserved.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ */
+
+#include <linux/kernel.h>
+#include <linux/module.h>
+#include <linux/moduleparam.h>
+#include <linux/mod_devicetable.h>
+#include <linux/interrupt.h>
+#include <linux/pci.h>
+#include <linux/slab.h>
+#include <linux/delay.h>
+#include <linux/mm.h>
+#include <linux/dma-mapping.h>
+#include <linux/scatterlist.h>
+#include <linux/highmem.h>
+#include <linux/interrupt.h>
+#include <linux/crypto.h>
+#include <linux/hw_random.h>
+#include <linux/ktime.h>
+
+#include <crypto/algapi.h>
+#include <crypto/des.h>
+
+#include <asm/kmap_types.h>
+
+#undef dprintk
+
+#define HIFN_TEST
+//#define HIFN_DEBUG
+
+#ifdef HIFN_DEBUG
+#define dprintk(f, a...)       printk(f, ##a)
+#else
+#define dprintk(f, a...)       do {} while (0)
+#endif
+
+static char hifn_pll_ref[sizeof("extNNN")] = "ext";
+module_param_string(hifn_pll_ref, hifn_pll_ref, sizeof(hifn_pll_ref), 0444);
+MODULE_PARM_DESC(hifn_pll_ref,
+                "PLL reference clock (pci[freq] or ext[freq], default ext)");
+
+static atomic_t hifn_dev_number;
+
+#define ACRYPTO_OP_DECRYPT     0
+#define ACRYPTO_OP_ENCRYPT     1
+#define ACRYPTO_OP_HMAC                2
+#define ACRYPTO_OP_RNG         3
+
+#define ACRYPTO_MODE_ECB               0
+#define ACRYPTO_MODE_CBC               1
+#define ACRYPTO_MODE_CFB               2
+#define ACRYPTO_MODE_OFB               3
+
+#define ACRYPTO_TYPE_AES_128   0
+#define ACRYPTO_TYPE_AES_192   1
+#define ACRYPTO_TYPE_AES_256   2
+#define ACRYPTO_TYPE_3DES      3
+#define ACRYPTO_TYPE_DES       4
+
+#define PCI_VENDOR_ID_HIFN             0x13A3
+#define PCI_DEVICE_ID_HIFN_7955                0x0020
+#define        PCI_DEVICE_ID_HIFN_7956         0x001d
+
+/* I/O region sizes */
+
+#define HIFN_BAR0_SIZE                 0x1000
+#define HIFN_BAR1_SIZE                 0x2000
+#define HIFN_BAR2_SIZE                 0x8000
+
+/* DMA registres */
+
+#define HIFN_DMA_CRA                   0x0C    /* DMA Command Ring Address */
+#define HIFN_DMA_SDRA                  0x1C    /* DMA Source Data Ring Address */
+#define HIFN_DMA_RRA                   0x2C    /* DMA Result Ring Address */
+#define HIFN_DMA_DDRA                  0x3C    /* DMA Destination Data Ring Address */
+#define HIFN_DMA_STCTL                 0x40    /* DMA Status and Control */
+#define HIFN_DMA_INTREN                0x44    /* DMA Interrupt Enable */
+#define HIFN_DMA_CFG1                  0x48    /* DMA Configuration #1 */
+#define HIFN_DMA_CFG2                  0x6C    /* DMA Configuration #2 */
+#define HIFN_CHIP_ID                   0x98    /* Chip ID */
+
+/*
+ * Processing Unit Registers (offset from BASEREG0)
+ */
+#define        HIFN_0_PUDATA           0x00    /* Processing Unit Data */
+#define        HIFN_0_PUCTRL           0x04    /* Processing Unit Control */
+#define        HIFN_0_PUISR            0x08    /* Processing Unit Interrupt Status */
+#define        HIFN_0_PUCNFG           0x0c    /* Processing Unit Configuration */
+#define        HIFN_0_PUIER            0x10    /* Processing Unit Interrupt Enable */
+#define        HIFN_0_PUSTAT           0x14    /* Processing Unit Status/Chip ID */
+#define        HIFN_0_FIFOSTAT         0x18    /* FIFO Status */
+#define        HIFN_0_FIFOCNFG         0x1c    /* FIFO Configuration */
+#define        HIFN_0_SPACESIZE        0x20    /* Register space size */
+
+/* Processing Unit Control Register (HIFN_0_PUCTRL) */
+#define        HIFN_PUCTRL_CLRSRCFIFO  0x0010  /* clear source fifo */
+#define        HIFN_PUCTRL_STOP        0x0008  /* stop pu */
+#define        HIFN_PUCTRL_LOCKRAM     0x0004  /* lock ram */
+#define        HIFN_PUCTRL_DMAENA      0x0002  /* enable dma */
+#define        HIFN_PUCTRL_RESET       0x0001  /* Reset processing unit */
+
+/* Processing Unit Interrupt Status Register (HIFN_0_PUISR) */
+#define        HIFN_PUISR_CMDINVAL     0x8000  /* Invalid command interrupt */
+#define        HIFN_PUISR_DATAERR      0x4000  /* Data error interrupt */
+#define        HIFN_PUISR_SRCFIFO      0x2000  /* Source FIFO ready interrupt */
+#define        HIFN_PUISR_DSTFIFO      0x1000  /* Destination FIFO ready interrupt */
+#define        HIFN_PUISR_DSTOVER      0x0200  /* Destination overrun interrupt */
+#define        HIFN_PUISR_SRCCMD       0x0080  /* Source command interrupt */
+#define        HIFN_PUISR_SRCCTX       0x0040  /* Source context interrupt */
+#define        HIFN_PUISR_SRCDATA      0x0020  /* Source data interrupt */
+#define        HIFN_PUISR_DSTDATA      0x0010  /* Destination data interrupt */
+#define        HIFN_PUISR_DSTRESULT    0x0004  /* Destination result interrupt */
+
+/* Processing Unit Configuration Register (HIFN_0_PUCNFG) */
+#define        HIFN_PUCNFG_DRAMMASK    0xe000  /* DRAM size mask */
+#define        HIFN_PUCNFG_DSZ_256K    0x0000  /* 256k dram */
+#define        HIFN_PUCNFG_DSZ_512K    0x2000  /* 512k dram */
+#define        HIFN_PUCNFG_DSZ_1M      0x4000  /* 1m dram */
+#define        HIFN_PUCNFG_DSZ_2M      0x6000  /* 2m dram */
+#define        HIFN_PUCNFG_DSZ_4M      0x8000  /* 4m dram */
+#define        HIFN_PUCNFG_DSZ_8M      0xa000  /* 8m dram */
+#define        HIFN_PUNCFG_DSZ_16M     0xc000  /* 16m dram */
+#define        HIFN_PUCNFG_DSZ_32M     0xe000  /* 32m dram */
+#define        HIFN_PUCNFG_DRAMREFRESH 0x1800  /* DRAM refresh rate mask */
+#define        HIFN_PUCNFG_DRFR_512    0x0000  /* 512 divisor of ECLK */
+#define        HIFN_PUCNFG_DRFR_256    0x0800  /* 256 divisor of ECLK */
+#define        HIFN_PUCNFG_DRFR_128    0x1000  /* 128 divisor of ECLK */
+#define        HIFN_PUCNFG_TCALLPHASES 0x0200  /* your guess is as good as mine... */
+#define        HIFN_PUCNFG_TCDRVTOTEM  0x0100  /* your guess is as good as mine... */
+#define        HIFN_PUCNFG_BIGENDIAN   0x0080  /* DMA big endian mode */
+#define        HIFN_PUCNFG_BUS32       0x0040  /* Bus width 32bits */
+#define        HIFN_PUCNFG_BUS16       0x0000  /* Bus width 16 bits */
+#define        HIFN_PUCNFG_CHIPID      0x0020  /* Allow chipid from PUSTAT */
+#define        HIFN_PUCNFG_DRAM        0x0010  /* Context RAM is DRAM */
+#define        HIFN_PUCNFG_SRAM        0x0000  /* Context RAM is SRAM */
+#define        HIFN_PUCNFG_COMPSING    0x0004  /* Enable single compression context */
+#define        HIFN_PUCNFG_ENCCNFG     0x0002  /* Encryption configuration */
+
+/* Processing Unit Interrupt Enable Register (HIFN_0_PUIER) */
+#define        HIFN_PUIER_CMDINVAL     0x8000  /* Invalid command interrupt */
+#define        HIFN_PUIER_DATAERR      0x4000  /* Data error interrupt */
+#define        HIFN_PUIER_SRCFIFO      0x2000  /* Source FIFO ready interrupt */
+#define        HIFN_PUIER_DSTFIFO      0x1000  /* Destination FIFO ready interrupt */
+#define        HIFN_PUIER_DSTOVER      0x0200  /* Destination overrun interrupt */
+#define        HIFN_PUIER_SRCCMD       0x0080  /* Source command interrupt */
+#define        HIFN_PUIER_SRCCTX       0x0040  /* Source context interrupt */
+#define        HIFN_PUIER_SRCDATA      0x0020  /* Source data interrupt */
+#define        HIFN_PUIER_DSTDATA      0x0010  /* Destination data interrupt */
+#define        HIFN_PUIER_DSTRESULT    0x0004  /* Destination result interrupt */
+
+/* Processing Unit Status Register/Chip ID (HIFN_0_PUSTAT) */
+#define        HIFN_PUSTAT_CMDINVAL    0x8000  /* Invalid command interrupt */
+#define        HIFN_PUSTAT_DATAERR     0x4000  /* Data error interrupt */
+#define        HIFN_PUSTAT_SRCFIFO     0x2000  /* Source FIFO ready interrupt */
+#define        HIFN_PUSTAT_DSTFIFO     0x1000  /* Destination FIFO ready interrupt */
+#define        HIFN_PUSTAT_DSTOVER     0x0200  /* Destination overrun interrupt */
+#define        HIFN_PUSTAT_SRCCMD      0x0080  /* Source command interrupt */
+#define        HIFN_PUSTAT_SRCCTX      0x0040  /* Source context interrupt */
+#define        HIFN_PUSTAT_SRCDATA     0x0020  /* Source data interrupt */
+#define        HIFN_PUSTAT_DSTDATA     0x0010  /* Destination data interrupt */
+#define        HIFN_PUSTAT_DSTRESULT   0x0004  /* Destination result interrupt */
+#define        HIFN_PUSTAT_CHIPREV     0x00ff  /* Chip revision mask */
+#define        HIFN_PUSTAT_CHIPENA     0xff00  /* Chip enabled mask */
+#define        HIFN_PUSTAT_ENA_2       0x1100  /* Level 2 enabled */
+#define        HIFN_PUSTAT_ENA_1       0x1000  /* Level 1 enabled */
+#define        HIFN_PUSTAT_ENA_0       0x3000  /* Level 0 enabled */
+#define        HIFN_PUSTAT_REV_2       0x0020  /* 7751 PT6/2 */
+#define        HIFN_PUSTAT_REV_3       0x0030  /* 7751 PT6/3 */
+
+/* FIFO Status Register (HIFN_0_FIFOSTAT) */
+#define        HIFN_FIFOSTAT_SRC       0x7f00  /* Source FIFO available */
+#define        HIFN_FIFOSTAT_DST       0x007f  /* Destination FIFO available */
+
+/* FIFO Configuration Register (HIFN_0_FIFOCNFG) */
+#define        HIFN_FIFOCNFG_THRESHOLD 0x0400  /* must be written as 1 */
+
+/*
+ * DMA Interface Registers (offset from BASEREG1)
+ */
+#define        HIFN_1_DMA_CRAR         0x0c    /* DMA Command Ring Address */
+#define        HIFN_1_DMA_SRAR         0x1c    /* DMA Source Ring Address */
+#define        HIFN_1_DMA_RRAR         0x2c    /* DMA Result Ring Address */
+#define        HIFN_1_DMA_DRAR         0x3c    /* DMA Destination Ring Address */
+#define        HIFN_1_DMA_CSR          0x40    /* DMA Status and Control */
+#define        HIFN_1_DMA_IER          0x44    /* DMA Interrupt Enable */
+#define        HIFN_1_DMA_CNFG         0x48    /* DMA Configuration */
+#define        HIFN_1_PLL              0x4c    /* 795x: PLL config */
+#define        HIFN_1_7811_RNGENA      0x60    /* 7811: rng enable */
+#define        HIFN_1_7811_RNGCFG      0x64    /* 7811: rng config */
+#define        HIFN_1_7811_RNGDAT      0x68    /* 7811: rng data */
+#define        HIFN_1_7811_RNGSTS      0x6c    /* 7811: rng status */
+#define        HIFN_1_7811_MIPSRST     0x94    /* 7811: MIPS reset */
+#define        HIFN_1_REVID            0x98    /* Revision ID */
+#define        HIFN_1_UNLOCK_SECRET1   0xf4
+#define        HIFN_1_UNLOCK_SECRET2   0xfc
+#define        HIFN_1_PUB_RESET        0x204   /* Public/RNG Reset */
+#define        HIFN_1_PUB_BASE         0x300   /* Public Base Address */
+#define        HIFN_1_PUB_OPLEN        0x304   /* Public Operand Length */
+#define        HIFN_1_PUB_OP           0x308   /* Public Operand */
+#define        HIFN_1_PUB_STATUS       0x30c   /* Public Status */
+#define        HIFN_1_PUB_IEN          0x310   /* Public Interrupt enable */
+#define        HIFN_1_RNG_CONFIG       0x314   /* RNG config */
+#define        HIFN_1_RNG_DATA         0x318   /* RNG data */
+#define        HIFN_1_PUB_MEM          0x400   /* start of Public key memory */
+#define        HIFN_1_PUB_MEMEND       0xbff   /* end of Public key memory */
+
+/* DMA Status and Control Register (HIFN_1_DMA_CSR) */
+#define        HIFN_DMACSR_D_CTRLMASK  0xc0000000      /* Destinition Ring Control */
+#define        HIFN_DMACSR_D_CTRL_NOP  0x00000000      /* Dest. Control: no-op */
+#define        HIFN_DMACSR_D_CTRL_DIS  0x40000000      /* Dest. Control: disable */
+#define        HIFN_DMACSR_D_CTRL_ENA  0x80000000      /* Dest. Control: enable */
+#define        HIFN_DMACSR_D_ABORT     0x20000000      /* Destinition Ring PCIAbort */
+#define        HIFN_DMACSR_D_DONE      0x10000000      /* Destinition Ring Done */
+#define        HIFN_DMACSR_D_LAST      0x08000000      /* Destinition Ring Last */
+#define        HIFN_DMACSR_D_WAIT      0x04000000      /* Destinition Ring Waiting */
+#define        HIFN_DMACSR_D_OVER      0x02000000      /* Destinition Ring Overflow */
+#define        HIFN_DMACSR_R_CTRL      0x00c00000      /* Result Ring Control */
+#define        HIFN_DMACSR_R_CTRL_NOP  0x00000000      /* Result Control: no-op */
+#define        HIFN_DMACSR_R_CTRL_DIS  0x00400000      /* Result Control: disable */
+#define        HIFN_DMACSR_R_CTRL_ENA  0x00800000      /* Result Control: enable */
+#define        HIFN_DMACSR_R_ABORT     0x00200000      /* Result Ring PCI Abort */
+#define        HIFN_DMACSR_R_DONE      0x00100000      /* Result Ring Done */
+#define        HIFN_DMACSR_R_LAST      0x00080000      /* Result Ring Last */
+#define        HIFN_DMACSR_R_WAIT      0x00040000      /* Result Ring Waiting */
+#define        HIFN_DMACSR_R_OVER      0x00020000      /* Result Ring Overflow */
+#define        HIFN_DMACSR_S_CTRL      0x0000c000      /* Source Ring Control */
+#define        HIFN_DMACSR_S_CTRL_NOP  0x00000000      /* Source Control: no-op */
+#define        HIFN_DMACSR_S_CTRL_DIS  0x00004000      /* Source Control: disable */
+#define        HIFN_DMACSR_S_CTRL_ENA  0x00008000      /* Source Control: enable */
+#define        HIFN_DMACSR_S_ABORT     0x00002000      /* Source Ring PCI Abort */
+#define        HIFN_DMACSR_S_DONE      0x00001000      /* Source Ring Done */
+#define        HIFN_DMACSR_S_LAST      0x00000800      /* Source Ring Last */
+#define        HIFN_DMACSR_S_WAIT      0x00000400      /* Source Ring Waiting */
+#define        HIFN_DMACSR_ILLW        0x00000200      /* Illegal write (7811 only) */
+#define        HIFN_DMACSR_ILLR        0x00000100      /* Illegal read (7811 only) */
+#define        HIFN_DMACSR_C_CTRL      0x000000c0      /* Command Ring Control */
+#define        HIFN_DMACSR_C_CTRL_NOP  0x00000000      /* Command Control: no-op */
+#define        HIFN_DMACSR_C_CTRL_DIS  0x00000040      /* Command Control: disable */
+#define        HIFN_DMACSR_C_CTRL_ENA  0x00000080      /* Command Control: enable */
+#define        HIFN_DMACSR_C_ABORT     0x00000020      /* Command Ring PCI Abort */
+#define        HIFN_DMACSR_C_DONE      0x00000010      /* Command Ring Done */
+#define        HIFN_DMACSR_C_LAST      0x00000008      /* Command Ring Last */
+#define        HIFN_DMACSR_C_WAIT      0x00000004      /* Command Ring Waiting */
+#define        HIFN_DMACSR_PUBDONE     0x00000002      /* Public op done (7951 only) */
+#define        HIFN_DMACSR_ENGINE      0x00000001      /* Command Ring Engine IRQ */
+
+/* DMA Interrupt Enable Register (HIFN_1_DMA_IER) */
+#define        HIFN_DMAIER_D_ABORT     0x20000000      /* Destination Ring PCIAbort */
+#define        HIFN_DMAIER_D_DONE      0x10000000      /* Destination Ring Done */
+#define        HIFN_DMAIER_D_LAST      0x08000000      /* Destination Ring Last */
+#define        HIFN_DMAIER_D_WAIT      0x04000000      /* Destination Ring Waiting */
+#define        HIFN_DMAIER_D_OVER      0x02000000      /* Destination Ring Overflow */
+#define        HIFN_DMAIER_R_ABORT     0x00200000      /* Result Ring PCI Abort */
+#define        HIFN_DMAIER_R_DONE      0x00100000      /* Result Ring Done */
+#define        HIFN_DMAIER_R_LAST      0x00080000      /* Result Ring Last */
+#define        HIFN_DMAIER_R_WAIT      0x00040000      /* Result Ring Waiting */
+#define        HIFN_DMAIER_R_OVER      0x00020000      /* Result Ring Overflow */
+#define        HIFN_DMAIER_S_ABORT     0x00002000      /* Source Ring PCI Abort */
+#define        HIFN_DMAIER_S_DONE      0x00001000      /* Source Ring Done */
+#define        HIFN_DMAIER_S_LAST      0x00000800      /* Source Ring Last */
+#define        HIFN_DMAIER_S_WAIT      0x00000400      /* Source Ring Waiting */
+#define        HIFN_DMAIER_ILLW        0x00000200      /* Illegal write (7811 only) */
+#define        HIFN_DMAIER_ILLR        0x00000100      /* Illegal read (7811 only) */
+#define        HIFN_DMAIER_C_ABORT     0x00000020      /* Command Ring PCI Abort */
+#define        HIFN_DMAIER_C_DONE      0x00000010      /* Command Ring Done */
+#define        HIFN_DMAIER_C_LAST      0x00000008      /* Command Ring Last */
+#define        HIFN_DMAIER_C_WAIT      0x00000004      /* Command Ring Waiting */
+#define        HIFN_DMAIER_PUBDONE     0x00000002      /* public op done (7951 only) */
+#define        HIFN_DMAIER_ENGINE      0x00000001      /* Engine IRQ */
+
+/* DMA Configuration Register (HIFN_1_DMA_CNFG) */
+#define        HIFN_DMACNFG_BIGENDIAN  0x10000000      /* big endian mode */
+#define        HIFN_DMACNFG_POLLFREQ   0x00ff0000      /* Poll frequency mask */
+#define        HIFN_DMACNFG_UNLOCK     0x00000800
+#define        HIFN_DMACNFG_POLLINVAL  0x00000700      /* Invalid Poll Scalar */
+#define        HIFN_DMACNFG_LAST       0x00000010      /* Host control LAST bit */
+#define        HIFN_DMACNFG_MODE       0x00000004      /* DMA mode */
+#define        HIFN_DMACNFG_DMARESET   0x00000002      /* DMA Reset # */
+#define        HIFN_DMACNFG_MSTRESET   0x00000001      /* Master Reset # */
+
+/* PLL configuration register */
+#define HIFN_PLL_REF_CLK_HBI   0x00000000      /* HBI reference clock */
+#define HIFN_PLL_REF_CLK_PLL   0x00000001      /* PLL reference clock */
+#define HIFN_PLL_BP            0x00000002      /* Reference clock bypass */
+#define HIFN_PLL_PK_CLK_HBI    0x00000000      /* PK engine HBI clock */
+#define HIFN_PLL_PK_CLK_PLL    0x00000008      /* PK engine PLL clock */
+#define HIFN_PLL_PE_CLK_HBI    0x00000000      /* PE engine HBI clock */
+#define HIFN_PLL_PE_CLK_PLL    0x00000010      /* PE engine PLL clock */
+#define HIFN_PLL_RESERVED_1    0x00000400      /* Reserved bit, must be 1 */
+#define HIFN_PLL_ND_SHIFT      11              /* Clock multiplier shift */
+#define HIFN_PLL_ND_MULT_2     0x00000000      /* PLL clock multiplier 2 */
+#define HIFN_PLL_ND_MULT_4     0x00000800      /* PLL clock multiplier 4 */
+#define HIFN_PLL_ND_MULT_6     0x00001000      /* PLL clock multiplier 6 */
+#define HIFN_PLL_ND_MULT_8     0x00001800      /* PLL clock multiplier 8 */
+#define HIFN_PLL_ND_MULT_10    0x00002000      /* PLL clock multiplier 10 */
+#define HIFN_PLL_ND_MULT_12    0x00002800      /* PLL clock multiplier 12 */
+#define HIFN_PLL_IS_1_8                0x00000000      /* charge pump (mult. 1-8) */
+#define HIFN_PLL_IS_9_12       0x00010000      /* charge pump (mult. 9-12) */
+
+#define HIFN_PLL_FCK_MAX       266             /* Maximum PLL frequency */
+
+/* Public key reset register (HIFN_1_PUB_RESET) */
+#define        HIFN_PUBRST_RESET       0x00000001      /* reset public/rng unit */
+
+/* Public base address register (HIFN_1_PUB_BASE) */
+#define        HIFN_PUBBASE_ADDR       0x00003fff      /* base address */
+
+/* Public operand length register (HIFN_1_PUB_OPLEN) */
+#define        HIFN_PUBOPLEN_MOD_M     0x0000007f      /* modulus length mask */
+#define        HIFN_PUBOPLEN_MOD_S     0               /* modulus length shift */
+#define        HIFN_PUBOPLEN_EXP_M     0x0003ff80      /* exponent length mask */
+#define        HIFN_PUBOPLEN_EXP_S     7               /* exponent lenght shift */
+#define        HIFN_PUBOPLEN_RED_M     0x003c0000      /* reducend length mask */
+#define        HIFN_PUBOPLEN_RED_S     18              /* reducend length shift */
+
+/* Public operation register (HIFN_1_PUB_OP) */
+#define        HIFN_PUBOP_AOFFSET_M    0x0000007f      /* A offset mask */
+#define        HIFN_PUBOP_AOFFSET_S    0               /* A offset shift */
+#define        HIFN_PUBOP_BOFFSET_M    0x00000f80      /* B offset mask */
+#define        HIFN_PUBOP_BOFFSET_S    7               /* B offset shift */
+#define        HIFN_PUBOP_MOFFSET_M    0x0003f000      /* M offset mask */
+#define        HIFN_PUBOP_MOFFSET_S    12              /* M offset shift */
+#define        HIFN_PUBOP_OP_MASK      0x003c0000      /* Opcode: */
+#define        HIFN_PUBOP_OP_NOP       0x00000000      /*  NOP */
+#define        HIFN_PUBOP_OP_ADD       0x00040000      /*  ADD */
+#define        HIFN_PUBOP_OP_ADDC      0x00080000      /*  ADD w/carry */
+#define        HIFN_PUBOP_OP_SUB       0x000c0000      /*  SUB */
+#define        HIFN_PUBOP_OP_SUBC      0x00100000      /*  SUB w/carry */
+#define        HIFN_PUBOP_OP_MODADD    0x00140000      /*  Modular ADD */
+#define        HIFN_PUBOP_OP_MODSUB    0x00180000      /*  Modular SUB */
+#define        HIFN_PUBOP_OP_INCA      0x001c0000      /*  INC A */
+#define        HIFN_PUBOP_OP_DECA      0x00200000      /*  DEC A */
+#define        HIFN_PUBOP_OP_MULT      0x00240000      /*  MULT */
+#define        HIFN_PUBOP_OP_MODMULT   0x00280000      /*  Modular MULT */
+#define        HIFN_PUBOP_OP_MODRED    0x002c0000      /*  Modular RED */
+#define        HIFN_PUBOP_OP_MODEXP    0x00300000      /*  Modular EXP */
+
+/* Public status register (HIFN_1_PUB_STATUS) */
+#define        HIFN_PUBSTS_DONE        0x00000001      /* operation done */
+#define        HIFN_PUBSTS_CARRY       0x00000002      /* carry */
+
+/* Public interrupt enable register (HIFN_1_PUB_IEN) */
+#define        HIFN_PUBIEN_DONE        0x00000001      /* operation done interrupt */
+
+/* Random number generator config register (HIFN_1_RNG_CONFIG) */
+#define        HIFN_RNGCFG_ENA         0x00000001      /* enable rng */
+
+#define HIFN_NAMESIZE                  32
+#define HIFN_MAX_RESULT_ORDER          5
+
+#define        HIFN_D_CMD_RSIZE                24*4
+#define        HIFN_D_SRC_RSIZE                80*4
+#define        HIFN_D_DST_RSIZE                80*4
+#define        HIFN_D_RES_RSIZE                24*4
+
+#define HIFN_QUEUE_LENGTH              HIFN_D_CMD_RSIZE-5
+
+#define AES_MIN_KEY_SIZE               16
+#define AES_MAX_KEY_SIZE               32
+
+#define HIFN_DES_KEY_LENGTH            8
+#define HIFN_3DES_KEY_LENGTH           24
+#define HIFN_MAX_CRYPT_KEY_LENGTH      AES_MAX_KEY_SIZE
+#define HIFN_IV_LENGTH                 8
+#define HIFN_AES_IV_LENGTH             16
+#define        HIFN_MAX_IV_LENGTH              HIFN_AES_IV_LENGTH
+
+#define HIFN_MAC_KEY_LENGTH            64
+#define HIFN_MD5_LENGTH                        16
+#define HIFN_SHA1_LENGTH               20
+#define HIFN_MAC_TRUNC_LENGTH          12
+
+#define        HIFN_MAX_COMMAND                (8 + 8 + 8 + 64 + 260)
+#define        HIFN_MAX_RESULT                 (8 + 4 + 4 + 20 + 4)
+#define HIFN_USED_RESULT               12
+
+struct hifn_desc
+{
+       volatile u32            l;
+       volatile u32            p;
+};
+
+struct hifn_dma {
+       struct hifn_desc        cmdr[HIFN_D_CMD_RSIZE+1];
+       struct hifn_desc        srcr[HIFN_D_SRC_RSIZE+1];
+       struct hifn_desc        dstr[HIFN_D_DST_RSIZE+1];
+       struct hifn_desc        resr[HIFN_D_RES_RSIZE+1];
+
+       u8                      command_bufs[HIFN_D_CMD_RSIZE][HIFN_MAX_COMMAND];
+       u8                      result_bufs[HIFN_D_CMD_RSIZE][HIFN_MAX_RESULT];
+
+       u64                     test_src, test_dst;
+
+       /*
+        *  Our current positions for insertion and removal from the descriptor
+        *  rings.
+        */
+       volatile int            cmdi, srci, dsti, resi;
+       volatile int            cmdu, srcu, dstu, resu;
+       int                     cmdk, srck, dstk, resk;
+};
+
+#define HIFN_FLAG_CMD_BUSY     (1<<0)
+#define HIFN_FLAG_SRC_BUSY     (1<<1)
+#define HIFN_FLAG_DST_BUSY     (1<<2)
+#define HIFN_FLAG_RES_BUSY     (1<<3)
+#define HIFN_FLAG_OLD_KEY      (1<<4)
+
+#define HIFN_DEFAULT_ACTIVE_NUM        5
+
+struct hifn_device
+{
+       char                    name[HIFN_NAMESIZE];
+
+       int                     irq;
+
+       struct pci_dev          *pdev;
+       void __iomem            *bar[3];
+
+       unsigned long           result_mem;
+       dma_addr_t              dst;
+
+       void                    *desc_virt;
+       dma_addr_t              desc_dma;
+
+       u32                     dmareg;
+
+       void                    *sa[HIFN_D_RES_RSIZE];
+
+       spinlock_t              lock;
+
+       void                    *priv;
+
+       u32                     flags;
+       int                     active, started;
+       struct delayed_work     work;
+       unsigned long           reset;
+       unsigned long           success;
+       unsigned long           prev_success;
+
+       u8                      snum;
+
+       struct tasklet_struct   tasklet;
+
+       struct crypto_queue     queue;
+       struct list_head        alg_list;
+
+       unsigned int            pk_clk_freq;
+
+#if defined(CONFIG_HW_RANDOM) || defined(CONFIG_HW_RANDOM_MODULE)
+       unsigned int            rng_wait_time;
+       ktime_t                 rngtime;
+       struct hwrng            rng;
+#endif
+};
+
+#define        HIFN_D_LENGTH                   0x0000ffff
+#define        HIFN_D_NOINVALID                0x01000000
+#define        HIFN_D_MASKDONEIRQ              0x02000000
+#define        HIFN_D_DESTOVER                 0x04000000
+#define        HIFN_D_OVER                     0x08000000
+#define        HIFN_D_LAST                     0x20000000
+#define        HIFN_D_JUMP                     0x40000000
+#define        HIFN_D_VALID                    0x80000000
+
+struct hifn_base_command
+{
+       volatile u16            masks;
+       volatile u16            session_num;
+       volatile u16            total_source_count;
+       volatile u16            total_dest_count;
+};
+
+#define        HIFN_BASE_CMD_COMP              0x0100  /* enable compression engine */
+#define        HIFN_BASE_CMD_PAD               0x0200  /* enable padding engine */
+#define        HIFN_BASE_CMD_MAC               0x0400  /* enable MAC engine */
+#define        HIFN_BASE_CMD_CRYPT             0x0800  /* enable crypt engine */
+#define        HIFN_BASE_CMD_DECODE            0x2000
+#define        HIFN_BASE_CMD_SRCLEN_M          0xc000
+#define        HIFN_BASE_CMD_SRCLEN_S          14
+#define        HIFN_BASE_CMD_DSTLEN_M          0x3000
+#define        HIFN_BASE_CMD_DSTLEN_S          12
+#define        HIFN_BASE_CMD_LENMASK_HI        0x30000
+#define        HIFN_BASE_CMD_LENMASK_LO        0x0ffff
+
+/*
+ * Structure to help build up the command data structure.
+ */
+struct hifn_crypt_command
+{
+       volatile u16            masks;
+       volatile u16            header_skip;
+       volatile u16            source_count;
+       volatile u16            reserved;
+};
+
+#define        HIFN_CRYPT_CMD_ALG_MASK         0x0003          /* algorithm: */
+#define        HIFN_CRYPT_CMD_ALG_DES          0x0000          /*   DES */
+#define        HIFN_CRYPT_CMD_ALG_3DES         0x0001          /*   3DES */
+#define        HIFN_CRYPT_CMD_ALG_RC4          0x0002          /*   RC4 */
+#define        HIFN_CRYPT_CMD_ALG_AES          0x0003          /*   AES */
+#define        HIFN_CRYPT_CMD_MODE_MASK        0x0018          /* Encrypt mode: */
+#define        HIFN_CRYPT_CMD_MODE_ECB         0x0000          /*   ECB */
+#define        HIFN_CRYPT_CMD_MODE_CBC         0x0008          /*   CBC */
+#define        HIFN_CRYPT_CMD_MODE_CFB         0x0010          /*   CFB */
+#define        HIFN_CRYPT_CMD_MODE_OFB         0x0018          /*   OFB */
+#define        HIFN_CRYPT_CMD_CLR_CTX          0x0040          /* clear context */
+#define        HIFN_CRYPT_CMD_KSZ_MASK         0x0600          /* AES key size: */
+#define        HIFN_CRYPT_CMD_KSZ_128          0x0000          /*  128 bit */
+#define        HIFN_CRYPT_CMD_KSZ_192          0x0200          /*  192 bit */
+#define        HIFN_CRYPT_CMD_KSZ_256          0x0400          /*  256 bit */
+#define        HIFN_CRYPT_CMD_NEW_KEY          0x0800          /* expect new key */
+#define        HIFN_CRYPT_CMD_NEW_IV           0x1000          /* expect new iv */
+#define        HIFN_CRYPT_CMD_SRCLEN_M         0xc000
+#define        HIFN_CRYPT_CMD_SRCLEN_S         14
+
+/*
+ * Structure to help build up the command data structure.
+ */
+struct hifn_mac_command
+{
+       volatile u16            masks;
+       volatile u16            header_skip;
+       volatile u16            source_count;
+       volatile u16            reserved;
+};
+
+#define        HIFN_MAC_CMD_ALG_MASK           0x0001
+#define        HIFN_MAC_CMD_ALG_SHA1           0x0000
+#define        HIFN_MAC_CMD_ALG_MD5            0x0001
+#define        HIFN_MAC_CMD_MODE_MASK          0x000c
+#define        HIFN_MAC_CMD_MODE_HMAC          0x0000
+#define        HIFN_MAC_CMD_MODE_SSL_MAC       0x0004
+#define        HIFN_MAC_CMD_MODE_HASH          0x0008
+#define        HIFN_MAC_CMD_MODE_FULL          0x0004
+#define        HIFN_MAC_CMD_TRUNC              0x0010
+#define        HIFN_MAC_CMD_RESULT             0x0020
+#define        HIFN_MAC_CMD_APPEND             0x0040
+#define        HIFN_MAC_CMD_SRCLEN_M           0xc000
+#define        HIFN_MAC_CMD_SRCLEN_S           14
+
+/*
+ * MAC POS IPsec initiates authentication after encryption on encodes
+ * and before decryption on decodes.
+ */
+#define        HIFN_MAC_CMD_POS_IPSEC          0x0200
+#define        HIFN_MAC_CMD_NEW_KEY            0x0800
+
+struct hifn_comp_command
+{
+       volatile u16            masks;
+       volatile u16            header_skip;
+       volatile u16            source_count;
+       volatile u16            reserved;
+};
+
+#define        HIFN_COMP_CMD_SRCLEN_M          0xc000
+#define        HIFN_COMP_CMD_SRCLEN_S          14
+#define        HIFN_COMP_CMD_ONE               0x0100  /* must be one */
+#define        HIFN_COMP_CMD_CLEARHIST         0x0010  /* clear history */
+#define        HIFN_COMP_CMD_UPDATEHIST        0x0008  /* update history */
+#define        HIFN_COMP_CMD_LZS_STRIP0        0x0004  /* LZS: strip zero */
+#define        HIFN_COMP_CMD_MPPC_RESTART      0x0004  /* MPPC: restart */
+#define        HIFN_COMP_CMD_ALG_MASK          0x0001  /* compression mode: */
+#define        HIFN_COMP_CMD_ALG_MPPC          0x0001  /*   MPPC */
+#define        HIFN_COMP_CMD_ALG_LZS           0x0000  /*   LZS */
+
+struct hifn_base_result
+{
+       volatile u16            flags;
+       volatile u16            session;
+       volatile u16            src_cnt;                /* 15:0 of source count */
+       volatile u16            dst_cnt;                /* 15:0 of dest count */
+};
+
+#define        HIFN_BASE_RES_DSTOVERRUN        0x0200  /* destination overrun */
+#define        HIFN_BASE_RES_SRCLEN_M          0xc000  /* 17:16 of source count */
+#define        HIFN_BASE_RES_SRCLEN_S          14
+#define        HIFN_BASE_RES_DSTLEN_M          0x3000  /* 17:16 of dest count */
+#define        HIFN_BASE_RES_DSTLEN_S          12
+
+struct hifn_comp_result
+{
+       volatile u16            flags;
+       volatile u16            crc;
+};
+
+#define        HIFN_COMP_RES_LCB_M             0xff00  /* longitudinal check byte */
+#define        HIFN_COMP_RES_LCB_S             8
+#define        HIFN_COMP_RES_RESTART           0x0004  /* MPPC: restart */
+#define        HIFN_COMP_RES_ENDMARKER         0x0002  /* LZS: end marker seen */
+#define        HIFN_COMP_RES_SRC_NOTZERO       0x0001  /* source expired */
+
+struct hifn_mac_result
+{
+       volatile u16            flags;
+       volatile u16            reserved;
+       /* followed by 0, 6, 8, or 10 u16's of the MAC, then crypt */
+};
+
+#define        HIFN_MAC_RES_MISCOMPARE         0x0002  /* compare failed */
+#define        HIFN_MAC_RES_SRC_NOTZERO        0x0001  /* source expired */
+
+struct hifn_crypt_result
+{
+       volatile u16            flags;
+       volatile u16            reserved;
+};
+
+#define        HIFN_CRYPT_RES_SRC_NOTZERO      0x0001  /* source expired */
+
+#ifndef HIFN_POLL_FREQUENCY
+#define        HIFN_POLL_FREQUENCY     0x1
+#endif
+
+#ifndef HIFN_POLL_SCALAR
+#define        HIFN_POLL_SCALAR        0x0
+#endif
+
+#define        HIFN_MAX_SEGLEN         0xffff          /* maximum dma segment len */
+#define        HIFN_MAX_DMALEN         0x3ffff         /* maximum dma length */
+
+struct hifn_crypto_alg
+{
+       struct list_head        entry;
+       struct crypto_alg       alg;
+       struct hifn_device      *dev;
+};
+
+#define ASYNC_SCATTERLIST_CACHE        16
+
+#define ASYNC_FLAGS_MISALIGNED (1<<0)
+
+struct ablkcipher_walk
+{
+       struct scatterlist      cache[ASYNC_SCATTERLIST_CACHE];
+       u32                     flags;
+       int                     num;
+};
+
+struct hifn_context
+{
+       u8                      key[HIFN_MAX_CRYPT_KEY_LENGTH], *iv;
+       struct hifn_device      *dev;
+       unsigned int            keysize, ivsize;
+       u8                      op, type, mode, unused;
+       struct ablkcipher_walk  walk;
+       atomic_t                sg_num;
+};
+
+#define crypto_alg_to_hifn(a)  container_of(a, struct hifn_crypto_alg, alg)
+
+static inline u32 hifn_read_0(struct hifn_device *dev, u32 reg)
+{
+       u32 ret;
+
+       ret = readl((char *)(dev->bar[0]) + reg);
+
+       return ret;
+}
+
+static inline u32 hifn_read_1(struct hifn_device *dev, u32 reg)
+{
+       u32 ret;
+
+       ret = readl((char *)(dev->bar[1]) + reg);
+
+       return ret;
+}
+
+static inline void hifn_write_0(struct hifn_device *dev, u32 reg, u32 val)
+{
+       writel(val, (char *)(dev->bar[0]) + reg);
+}
+
+static inline void hifn_write_1(struct hifn_device *dev, u32 reg, u32 val)
+{
+       writel(val, (char *)(dev->bar[1]) + reg);
+}
+
+static void hifn_wait_puc(struct hifn_device *dev)
+{
+       int i;
+       u32 ret;
+
+       for (i=10000; i > 0; --i) {
+               ret = hifn_read_0(dev, HIFN_0_PUCTRL);
+               if (!(ret & HIFN_PUCTRL_RESET))
+                       break;
+
+               udelay(1);
+       }
+
+       if (!i)
+               dprintk("%s: Failed to reset PUC unit.\n", dev->name);
+}
+
+static void hifn_reset_puc(struct hifn_device *dev)
+{
+       hifn_write_0(dev, HIFN_0_PUCTRL, HIFN_PUCTRL_DMAENA);
+       hifn_wait_puc(dev);
+}
+
+static void hifn_stop_device(struct hifn_device *dev)
+{
+       hifn_write_1(dev, HIFN_1_DMA_CSR,
+               HIFN_DMACSR_D_CTRL_DIS | HIFN_DMACSR_R_CTRL_DIS |
+               HIFN_DMACSR_S_CTRL_DIS | HIFN_DMACSR_C_CTRL_DIS);
+       hifn_write_0(dev, HIFN_0_PUIER, 0);
+       hifn_write_1(dev, HIFN_1_DMA_IER, 0);
+}
+
+static void hifn_reset_dma(struct hifn_device *dev, int full)
+{
+       hifn_stop_device(dev);
+
+       /*
+        * Setting poll frequency and others to 0.
+        */
+       hifn_write_1(dev, HIFN_1_DMA_CNFG, HIFN_DMACNFG_MSTRESET |
+                       HIFN_DMACNFG_DMARESET | HIFN_DMACNFG_MODE);
+       mdelay(1);
+
+       /*
+        * Reset DMA.
+        */
+       if (full) {
+               hifn_write_1(dev, HIFN_1_DMA_CNFG, HIFN_DMACNFG_MODE);
+               mdelay(1);
+       } else {
+               hifn_write_1(dev, HIFN_1_DMA_CNFG, HIFN_DMACNFG_MODE |
+                               HIFN_DMACNFG_MSTRESET);
+               hifn_reset_puc(dev);
+       }
+
+       hifn_write_1(dev, HIFN_1_DMA_CNFG, HIFN_DMACNFG_MSTRESET |
+                       HIFN_DMACNFG_DMARESET | HIFN_DMACNFG_MODE);
+
+       hifn_reset_puc(dev);
+}
+
+static u32 hifn_next_signature(u_int32_t a, u_int cnt)
+{
+       int i;
+       u32 v;
+
+       for (i = 0; i < cnt; i++) {
+
+               /* get the parity */
+               v = a & 0x80080125;
+               v ^= v >> 16;
+               v ^= v >> 8;
+               v ^= v >> 4;
+               v ^= v >> 2;
+               v ^= v >> 1;
+
+               a = (v & 1) ^ (a << 1);
+       }
+
+       return a;
+}
+
+static struct pci2id {
+       u_short         pci_vendor;
+       u_short         pci_prod;
+       char            card_id[13];
+} pci2id[] = {
+       {
+               PCI_VENDOR_ID_HIFN,
+               PCI_DEVICE_ID_HIFN_7955,
+               { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                 0x00, 0x00, 0x00, 0x00, 0x00 }
+       },
+       {
+               PCI_VENDOR_ID_HIFN,
+               PCI_DEVICE_ID_HIFN_7956,
+               { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+                 0x00, 0x00, 0x00, 0x00, 0x00 }
+       }
+};
+
+#if defined(CONFIG_HW_RANDOM) || defined(CONFIG_HW_RANDOM_MODULE)
+static int hifn_rng_data_present(struct hwrng *rng, int wait)
+{
+       struct hifn_device *dev = (struct hifn_device *)rng->priv;
+       s64 nsec;
+
+       nsec = ktime_to_ns(ktime_sub(ktime_get(), dev->rngtime));
+       nsec -= dev->rng_wait_time;
+       if (nsec <= 0)
+               return 1;
+       if (!wait)
+               return 0;
+       ndelay(nsec);
+       return 1;
+}
+
+static int hifn_rng_data_read(struct hwrng *rng, u32 *data)
+{
+       struct hifn_device *dev = (struct hifn_device *)rng->priv;
+
+       *data = hifn_read_1(dev, HIFN_1_RNG_DATA);
+       dev->rngtime = ktime_get();
+       return 4;
+}
+
+static int hifn_register_rng(struct hifn_device *dev)
+{
+       /*
+        * We must wait at least 256 Pk_clk cycles between two reads of the rng.
+        */
+       dev->rng_wait_time      = DIV_ROUND_UP(NSEC_PER_SEC, dev->pk_clk_freq) *
+                                 256;
+
+       dev->rng.name           = dev->name;
+       dev->rng.data_present   = hifn_rng_data_present,
+       dev->rng.data_read      = hifn_rng_data_read,
+       dev->rng.priv           = (unsigned long)dev;
+
+       return hwrng_register(&dev->rng);
+}
+
+static void hifn_unregister_rng(struct hifn_device *dev)
+{
+       hwrng_unregister(&dev->rng);
+}
+#else
+#define hifn_register_rng(dev)         0
+#define hifn_unregister_rng(dev)
+#endif
+
+static int hifn_init_pubrng(struct hifn_device *dev)
+{
+       int i;
+
+       hifn_write_1(dev, HIFN_1_PUB_RESET, hifn_read_1(dev, HIFN_1_PUB_RESET) |
+                       HIFN_PUBRST_RESET);
+
+       for (i=100; i > 0; --i) {
+               mdelay(1);
+
+               if ((hifn_read_1(dev, HIFN_1_PUB_RESET) & HIFN_PUBRST_RESET) == 0)
+                       break;
+       }
+
+       if (!i)
+               dprintk("Chip %s: Failed to initialise public key engine.\n",
+                               dev->name);
+       else {
+               hifn_write_1(dev, HIFN_1_PUB_IEN, HIFN_PUBIEN_DONE);
+               dev->dmareg |= HIFN_DMAIER_PUBDONE;
+               hifn_write_1(dev, HIFN_1_DMA_IER, dev->dmareg);
+
+               dprintk("Chip %s: Public key engine has been sucessfully "
+                               "initialised.\n", dev->name);
+       }
+
+       /*
+        * Enable RNG engine.
+        */
+
+       hifn_write_1(dev, HIFN_1_RNG_CONFIG,
+                       hifn_read_1(dev, HIFN_1_RNG_CONFIG) | HIFN_RNGCFG_ENA);
+       dprintk("Chip %s: RNG engine has been successfully initialised.\n",
+                       dev->name);
+
+#if defined(CONFIG_HW_RANDOM) || defined(CONFIG_HW_RANDOM_MODULE)
+       /* First value must be discarded */
+       hifn_read_1(dev, HIFN_1_RNG_DATA);
+       dev->rngtime = ktime_get();
+#endif
+       return 0;
+}
+
+static int hifn_enable_crypto(struct hifn_device *dev)
+{
+       u32 dmacfg, addr;
+       char *offtbl = NULL;
+       int i;
+
+       for (i = 0; i < sizeof(pci2id)/sizeof(pci2id[0]); i++) {
+               if (pci2id[i].pci_vendor == dev->pdev->vendor &&
+                               pci2id[i].pci_prod == dev->pdev->device) {
+                       offtbl = pci2id[i].card_id;
+                       break;
+               }
+       }
+
+       if (offtbl == NULL) {
+               dprintk("Chip %s: Unknown card!\n", dev->name);
+               return -ENODEV;
+       }
+
+       dmacfg = hifn_read_1(dev, HIFN_1_DMA_CNFG);
+
+       hifn_write_1(dev, HIFN_1_DMA_CNFG,
+                       HIFN_DMACNFG_UNLOCK | HIFN_DMACNFG_MSTRESET |
+                       HIFN_DMACNFG_DMARESET | HIFN_DMACNFG_MODE);
+       mdelay(1);
+       addr = hifn_read_1(dev, HIFN_1_UNLOCK_SECRET1);
+       mdelay(1);
+       hifn_write_1(dev, HIFN_1_UNLOCK_SECRET2, 0);
+       mdelay(1);
+
+       for (i=0; i<12; ++i) {
+               addr = hifn_next_signature(addr, offtbl[i] + 0x101);
+               hifn_write_1(dev, HIFN_1_UNLOCK_SECRET2, addr);
+
+               mdelay(1);
+       }
+       hifn_write_1(dev, HIFN_1_DMA_CNFG, dmacfg);
+
+       dprintk("Chip %s: %s.\n", dev->name, pci_name(dev->pdev));
+
+       return 0;
+}
+
+static void hifn_init_dma(struct hifn_device *dev)
+{
+       struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
+       u32 dptr = dev->desc_dma;
+       int i;
+
+       for (i=0; i<HIFN_D_CMD_RSIZE; ++i)
+               dma->cmdr[i].p = __cpu_to_le32(dptr +
+                               offsetof(struct hifn_dma, command_bufs[i][0]));
+       for (i=0; i<HIFN_D_RES_RSIZE; ++i)
+               dma->resr[i].p = __cpu_to_le32(dptr +
+                               offsetof(struct hifn_dma, result_bufs[i][0]));
+
+       /*
+        * Setup LAST descriptors.
+        */
+       dma->cmdr[HIFN_D_CMD_RSIZE].p = __cpu_to_le32(dptr +
+                       offsetof(struct hifn_dma, cmdr[0]));
+       dma->srcr[HIFN_D_SRC_RSIZE].p = __cpu_to_le32(dptr +
+                       offsetof(struct hifn_dma, srcr[0]));
+       dma->dstr[HIFN_D_DST_RSIZE].p = __cpu_to_le32(dptr +
+                       offsetof(struct hifn_dma, dstr[0]));
+       dma->resr[HIFN_D_RES_RSIZE].p = __cpu_to_le32(dptr +
+                       offsetof(struct hifn_dma, resr[0]));
+
+       dma->cmdu = dma->srcu = dma->dstu = dma->resu = 0;
+       dma->cmdi = dma->srci = dma->dsti = dma->resi = 0;
+       dma->cmdk = dma->srck = dma->dstk = dma->resk = 0;
+}
+
+/*
+ * Initialize the PLL. We need to know the frequency of the reference clock
+ * to calculate the optimal multiplier. For PCI we assume 66MHz, since that
+ * allows us to operate without the risk of overclocking the chip. If it
+ * actually uses 33MHz, the chip will operate at half the speed, this can be
+ * overriden by specifying the frequency as module parameter (pci33).
+ *
+ * Unfortunately the PCI clock is not very suitable since the HIFN needs a
+ * stable clock and the PCI clock frequency may vary, so the default is the
+ * external clock. There is no way to find out its frequency, we default to
+ * 66MHz since according to Mike Ham of HiFn, almost every board in existence
+ * has an external crystal populated at 66MHz.
+ */
+static void hifn_init_pll(struct hifn_device *dev)
+{
+       unsigned int freq, m;
+       u32 pllcfg;
+
+       pllcfg = HIFN_1_PLL | HIFN_PLL_RESERVED_1;
+
+       if (strncmp(hifn_pll_ref, "ext", 3) == 0)
+               pllcfg |= HIFN_PLL_REF_CLK_PLL;
+       else
+               pllcfg |= HIFN_PLL_REF_CLK_HBI;
+
+       if (hifn_pll_ref[3] != '\0')
+               freq = simple_strtoul(hifn_pll_ref + 3, NULL, 10);
+       else {
+               freq = 66;
+               printk(KERN_INFO "hifn795x: assuming %uMHz clock speed, "
+                                "override with hifn_pll_ref=%.3s<frequency>\n",
+                      freq, hifn_pll_ref);
+       }
+
+       m = HIFN_PLL_FCK_MAX / freq;
+
+       pllcfg |= (m / 2 - 1) << HIFN_PLL_ND_SHIFT;
+       if (m <= 8)
+               pllcfg |= HIFN_PLL_IS_1_8;
+       else
+               pllcfg |= HIFN_PLL_IS_9_12;
+
+       /* Select clock source and enable clock bypass */
+       hifn_write_1(dev, HIFN_1_PLL, pllcfg |
+                    HIFN_PLL_PK_CLK_HBI | HIFN_PLL_PE_CLK_HBI | HIFN_PLL_BP);
+
+       /* Let the chip lock to the input clock */
+       mdelay(10);
+
+       /* Disable clock bypass */
+       hifn_write_1(dev, HIFN_1_PLL, pllcfg |
+                    HIFN_PLL_PK_CLK_HBI | HIFN_PLL_PE_CLK_HBI);
+
+       /* Switch the engines to the PLL */
+       hifn_write_1(dev, HIFN_1_PLL, pllcfg |
+                    HIFN_PLL_PK_CLK_PLL | HIFN_PLL_PE_CLK_PLL);
+
+       /*
+        * The Fpk_clk runs at half the total speed. Its frequency is needed to
+        * calculate the minimum time between two reads of the rng. Since 33MHz
+        * is actually 33.333... we overestimate the frequency here, resulting
+        * in slightly larger intervals.
+        */
+       dev->pk_clk_freq = 1000000 * (freq + 1) * m / 2;
+}
+
+static void hifn_init_registers(struct hifn_device *dev)
+{
+       u32 dptr = dev->desc_dma;
+
+       /* Initialization magic... */
+       hifn_write_0(dev, HIFN_0_PUCTRL, HIFN_PUCTRL_DMAENA);
+       hifn_write_0(dev, HIFN_0_FIFOCNFG, HIFN_FIFOCNFG_THRESHOLD);
+       hifn_write_0(dev, HIFN_0_PUIER, HIFN_PUIER_DSTOVER);
+
+       /* write all 4 ring address registers */
+       hifn_write_1(dev, HIFN_1_DMA_CRAR, __cpu_to_le32(dptr +
+                               offsetof(struct hifn_dma, cmdr[0])));
+       hifn_write_1(dev, HIFN_1_DMA_SRAR, __cpu_to_le32(dptr +
+                               offsetof(struct hifn_dma, srcr[0])));
+       hifn_write_1(dev, HIFN_1_DMA_DRAR, __cpu_to_le32(dptr +
+                               offsetof(struct hifn_dma, dstr[0])));
+       hifn_write_1(dev, HIFN_1_DMA_RRAR, __cpu_to_le32(dptr +
+                               offsetof(struct hifn_dma, resr[0])));
+
+       mdelay(2);
+#if 0
+       hifn_write_1(dev, HIFN_1_DMA_CSR,
+           HIFN_DMACSR_D_CTRL_DIS | HIFN_DMACSR_R_CTRL_DIS |
+           HIFN_DMACSR_S_CTRL_DIS | HIFN_DMACSR_C_CTRL_DIS |
+           HIFN_DMACSR_D_ABORT | HIFN_DMACSR_D_DONE | HIFN_DMACSR_D_LAST |
+           HIFN_DMACSR_D_WAIT | HIFN_DMACSR_D_OVER |
+           HIFN_DMACSR_R_ABORT | HIFN_DMACSR_R_DONE | HIFN_DMACSR_R_LAST |
+           HIFN_DMACSR_R_WAIT | HIFN_DMACSR_R_OVER |
+           HIFN_DMACSR_S_ABORT | HIFN_DMACSR_S_DONE | HIFN_DMACSR_S_LAST |
+           HIFN_DMACSR_S_WAIT |
+           HIFN_DMACSR_C_ABORT | HIFN_DMACSR_C_DONE | HIFN_DMACSR_C_LAST |
+           HIFN_DMACSR_C_WAIT |
+           HIFN_DMACSR_ENGINE |
+           HIFN_DMACSR_PUBDONE);
+#else
+       hifn_write_1(dev, HIFN_1_DMA_CSR,
+           HIFN_DMACSR_C_CTRL_ENA | HIFN_DMACSR_S_CTRL_ENA |
+           HIFN_DMACSR_D_CTRL_ENA | HIFN_DMACSR_R_CTRL_ENA |
+           HIFN_DMACSR_D_ABORT | HIFN_DMACSR_D_DONE | HIFN_DMACSR_D_LAST |
+           HIFN_DMACSR_D_WAIT | HIFN_DMACSR_D_OVER |
+           HIFN_DMACSR_R_ABORT | HIFN_DMACSR_R_DONE | HIFN_DMACSR_R_LAST |
+           HIFN_DMACSR_R_WAIT | HIFN_DMACSR_R_OVER |
+           HIFN_DMACSR_S_ABORT | HIFN_DMACSR_S_DONE | HIFN_DMACSR_S_LAST |
+           HIFN_DMACSR_S_WAIT |
+           HIFN_DMACSR_C_ABORT | HIFN_DMACSR_C_DONE | HIFN_DMACSR_C_LAST |
+           HIFN_DMACSR_C_WAIT |
+           HIFN_DMACSR_ENGINE |
+           HIFN_DMACSR_PUBDONE);
+#endif
+       hifn_read_1(dev, HIFN_1_DMA_CSR);
+
+       dev->dmareg |= HIFN_DMAIER_R_DONE | HIFN_DMAIER_C_ABORT |
+           HIFN_DMAIER_D_OVER | HIFN_DMAIER_R_OVER |
+           HIFN_DMAIER_S_ABORT | HIFN_DMAIER_D_ABORT | HIFN_DMAIER_R_ABORT |
+           HIFN_DMAIER_ENGINE;
+       dev->dmareg &= ~HIFN_DMAIER_C_WAIT;
+
+       hifn_write_1(dev, HIFN_1_DMA_IER, dev->dmareg);
+       hifn_read_1(dev, HIFN_1_DMA_IER);
+#if 0
+       hifn_write_0(dev, HIFN_0_PUCNFG, HIFN_PUCNFG_ENCCNFG |
+                   HIFN_PUCNFG_DRFR_128 | HIFN_PUCNFG_TCALLPHASES |
+                   HIFN_PUCNFG_TCDRVTOTEM | HIFN_PUCNFG_BUS32 |
+                   HIFN_PUCNFG_DRAM);
+#else
+       hifn_write_0(dev, HIFN_0_PUCNFG, 0x10342);
+#endif
+       hifn_init_pll(dev);
+
+       hifn_write_0(dev, HIFN_0_PUISR, HIFN_PUISR_DSTOVER);
+       hifn_write_1(dev, HIFN_1_DMA_CNFG, HIFN_DMACNFG_MSTRESET |
+           HIFN_DMACNFG_DMARESET | HIFN_DMACNFG_MODE | HIFN_DMACNFG_LAST |
+           ((HIFN_POLL_FREQUENCY << 16 ) & HIFN_DMACNFG_POLLFREQ) |
+           ((HIFN_POLL_SCALAR << 8) & HIFN_DMACNFG_POLLINVAL));
+}
+
+static int hifn_setup_base_command(struct hifn_device *dev, u8 *buf,
+               unsigned dlen, unsigned slen, u16 mask, u8 snum)
+{
+       struct hifn_base_command *base_cmd;
+       u8 *buf_pos = buf;
+
+       base_cmd = (struct hifn_base_command *)buf_pos;
+       base_cmd->masks = __cpu_to_le16(mask);
+       base_cmd->total_source_count =
+               __cpu_to_le16(slen & HIFN_BASE_CMD_LENMASK_LO);
+       base_cmd->total_dest_count =
+               __cpu_to_le16(dlen & HIFN_BASE_CMD_LENMASK_LO);
+
+       dlen >>= 16;
+       slen >>= 16;
+       base_cmd->session_num = __cpu_to_le16(snum |
+           ((slen << HIFN_BASE_CMD_SRCLEN_S) & HIFN_BASE_CMD_SRCLEN_M) |
+           ((dlen << HIFN_BASE_CMD_DSTLEN_S) & HIFN_BASE_CMD_DSTLEN_M));
+
+       return sizeof(struct hifn_base_command);
+}
+
+static int hifn_setup_crypto_command(struct hifn_device *dev,
+               u8 *buf, unsigned dlen, unsigned slen,
+               u8 *key, int keylen, u8 *iv, int ivsize, u16 mode)
+{
+       struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
+       struct hifn_crypt_command *cry_cmd;
+       u8 *buf_pos = buf;
+       u16 cmd_len;
+
+       cry_cmd = (struct hifn_crypt_command *)buf_pos;
+
+       cry_cmd->source_count = __cpu_to_le16(dlen & 0xffff);
+       dlen >>= 16;
+       cry_cmd->masks = __cpu_to_le16(mode |
+                       ((dlen << HIFN_CRYPT_CMD_SRCLEN_S) &
+                        HIFN_CRYPT_CMD_SRCLEN_M));
+       cry_cmd->header_skip = 0;
+       cry_cmd->reserved = 0;
+
+       buf_pos += sizeof(struct hifn_crypt_command);
+
+       dma->cmdu++;
+       if (dma->cmdu > 1) {
+               dev->dmareg |= HIFN_DMAIER_C_WAIT;
+               hifn_write_1(dev, HIFN_1_DMA_IER, dev->dmareg);
+       }
+
+       if (keylen) {
+               memcpy(buf_pos, key, keylen);
+               buf_pos += keylen;
+       }
+       if (ivsize) {
+               memcpy(buf_pos, iv, ivsize);
+               buf_pos += ivsize;
+       }
+
+       cmd_len = buf_pos - buf;
+
+       return cmd_len;
+}
+
+static int hifn_setup_src_desc(struct hifn_device *dev, struct page *page,
+               unsigned int offset, unsigned int size)
+{
+       struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
+       int idx;
+       dma_addr_t addr;
+
+       addr = pci_map_page(dev->pdev, page, offset, size, PCI_DMA_TODEVICE);
+
+       idx = dma->srci;
+
+       dma->srcr[idx].p = __cpu_to_le32(addr);
+       dma->srcr[idx].l = __cpu_to_le32(size) | HIFN_D_VALID |
+                       HIFN_D_MASKDONEIRQ | HIFN_D_NOINVALID | HIFN_D_LAST;
+
+       if (++idx == HIFN_D_SRC_RSIZE) {
+               dma->srcr[idx].l = __cpu_to_le32(HIFN_D_VALID |
+                               HIFN_D_JUMP |
+                               HIFN_D_MASKDONEIRQ | HIFN_D_LAST);
+               idx = 0;
+       }
+
+       dma->srci = idx;
+       dma->srcu++;
+
+       if (!(dev->flags & HIFN_FLAG_SRC_BUSY)) {
+               hifn_write_1(dev, HIFN_1_DMA_CSR, HIFN_DMACSR_S_CTRL_ENA);
+               dev->flags |= HIFN_FLAG_SRC_BUSY;
+       }
+
+       return size;
+}
+
+static void hifn_setup_res_desc(struct hifn_device *dev)
+{
+       struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
+
+       dma->resr[dma->resi].l = __cpu_to_le32(HIFN_USED_RESULT |
+                       HIFN_D_VALID | HIFN_D_LAST);
+       /*
+        * dma->resr[dma->resi].l = __cpu_to_le32(HIFN_MAX_RESULT | HIFN_D_VALID |
+        *                                      HIFN_D_LAST | HIFN_D_NOINVALID);
+        */
+
+       if (++dma->resi == HIFN_D_RES_RSIZE) {
+               dma->resr[HIFN_D_RES_RSIZE].l = __cpu_to_le32(HIFN_D_VALID |
+                               HIFN_D_JUMP | HIFN_D_MASKDONEIRQ | HIFN_D_LAST);
+               dma->resi = 0;
+       }
+
+       dma->resu++;
+
+       if (!(dev->flags & HIFN_FLAG_RES_BUSY)) {
+               hifn_write_1(dev, HIFN_1_DMA_CSR, HIFN_DMACSR_R_CTRL_ENA);
+               dev->flags |= HIFN_FLAG_RES_BUSY;
+       }
+}
+
+static void hifn_setup_dst_desc(struct hifn_device *dev, struct page *page,
+               unsigned offset, unsigned size)
+{
+       struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
+       int idx;
+       dma_addr_t addr;
+
+       addr = pci_map_page(dev->pdev, page, offset, size, PCI_DMA_FROMDEVICE);
+
+       idx = dma->dsti;
+       dma->dstr[idx].p = __cpu_to_le32(addr);
+       dma->dstr[idx].l = __cpu_to_le32(size | HIFN_D_VALID |
+                       HIFN_D_MASKDONEIRQ | HIFN_D_NOINVALID | HIFN_D_LAST);
+
+       if (++idx == HIFN_D_DST_RSIZE) {
+               dma->dstr[idx].l = __cpu_to_le32(HIFN_D_VALID |
+                               HIFN_D_JUMP | HIFN_D_MASKDONEIRQ |
+                               HIFN_D_LAST | HIFN_D_NOINVALID);
+               idx = 0;
+       }
+       dma->dsti = idx;
+       dma->dstu++;
+
+       if (!(dev->flags & HIFN_FLAG_DST_BUSY)) {
+               hifn_write_1(dev, HIFN_1_DMA_CSR, HIFN_DMACSR_D_CTRL_ENA);
+               dev->flags |= HIFN_FLAG_DST_BUSY;
+       }
+}
+
+static int hifn_setup_dma(struct hifn_device *dev, struct page *spage, unsigned int soff,
+               struct page *dpage, unsigned int doff, unsigned int nbytes, void *priv,
+               struct hifn_context *ctx)
+{
+       struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
+       int cmd_len, sa_idx;
+       u8 *buf, *buf_pos;
+       u16 mask;
+
+       dprintk("%s: spage: %p, soffset: %u, dpage: %p, doffset: %u, nbytes: %u, priv: %p, ctx: %p.\n",
+                       dev->name, spage, soff, dpage, doff, nbytes, priv, ctx);
+
+       sa_idx = dma->resi;
+
+       hifn_setup_src_desc(dev, spage, soff, nbytes);
+
+       buf_pos = buf = dma->command_bufs[dma->cmdi];
+
+       mask = 0;
+       switch (ctx->op) {
+               case ACRYPTO_OP_DECRYPT:
+                       mask = HIFN_BASE_CMD_CRYPT | HIFN_BASE_CMD_DECODE;
+                       break;
+               case ACRYPTO_OP_ENCRYPT:
+                       mask = HIFN_BASE_CMD_CRYPT;
+                       break;
+               case ACRYPTO_OP_HMAC:
+                       mask = HIFN_BASE_CMD_MAC;
+                       break;
+               default:
+                       goto err_out;
+       }
+
+       buf_pos += hifn_setup_base_command(dev, buf_pos, nbytes,
+                       nbytes, mask, dev->snum);
+
+       if (ctx->op == ACRYPTO_OP_ENCRYPT || ctx->op == ACRYPTO_OP_DECRYPT) {
+               u16 md = 0;
+
+               if (ctx->keysize)
+                       md |= HIFN_CRYPT_CMD_NEW_KEY;
+               if (ctx->iv && ctx->mode != ACRYPTO_MODE_ECB)
+                       md |= HIFN_CRYPT_CMD_NEW_IV;
+
+               switch (ctx->mode) {
+                       case ACRYPTO_MODE_ECB:
+                               md |= HIFN_CRYPT_CMD_MODE_ECB;
+                               break;
+                       case ACRYPTO_MODE_CBC:
+                               md |= HIFN_CRYPT_CMD_MODE_CBC;
+                               break;
+                       case ACRYPTO_MODE_CFB:
+                               md |= HIFN_CRYPT_CMD_MODE_CFB;
+                               break;
+                       case ACRYPTO_MODE_OFB:
+                               md |= HIFN_CRYPT_CMD_MODE_OFB;
+                               break;
+                       default:
+                               goto err_out;
+               }
+
+               switch (ctx->type) {
+                       case ACRYPTO_TYPE_AES_128:
+                               if (ctx->keysize != 16)
+                                       goto err_out;
+                               md |= HIFN_CRYPT_CMD_KSZ_128 |
+                                       HIFN_CRYPT_CMD_ALG_AES;
+                               break;
+                       case ACRYPTO_TYPE_AES_192:
+                               if (ctx->keysize != 24)
+                                       goto err_out;
+                               md |= HIFN_CRYPT_CMD_KSZ_192 |
+                                       HIFN_CRYPT_CMD_ALG_AES;
+                               break;
+                       case ACRYPTO_TYPE_AES_256:
+                               if (ctx->keysize != 32)
+                                       goto err_out;
+                               md |= HIFN_CRYPT_CMD_KSZ_256 |
+                                       HIFN_CRYPT_CMD_ALG_AES;
+                               break;
+                       case ACRYPTO_TYPE_3DES:
+                               if (ctx->keysize != 24)
+                                       goto err_out;
+                               md |= HIFN_CRYPT_CMD_ALG_3DES;
+                               break;
+                       case ACRYPTO_TYPE_DES:
+                               if (ctx->keysize != 8)
+                                       goto err_out;
+                               md |= HIFN_CRYPT_CMD_ALG_DES;
+                               break;
+                       default:
+                               goto err_out;
+               }
+
+               buf_pos += hifn_setup_crypto_command(dev, buf_pos,
+                               nbytes, nbytes, ctx->key, ctx->keysize,
+                               ctx->iv, ctx->ivsize, md);
+       }
+
+       dev->sa[sa_idx] = priv;
+
+       cmd_len = buf_pos - buf;
+       dma->cmdr[dma->cmdi].l = __cpu_to_le32(cmd_len | HIFN_D_VALID |
+                       HIFN_D_LAST | HIFN_D_MASKDONEIRQ);
+
+       if (++dma->cmdi == HIFN_D_CMD_RSIZE) {
+               dma->cmdr[dma->cmdi].l = __cpu_to_le32(HIFN_MAX_COMMAND |
+                       HIFN_D_VALID | HIFN_D_LAST |
+                       HIFN_D_MASKDONEIRQ | HIFN_D_JUMP);
+               dma->cmdi = 0;
+       } else
+               dma->cmdr[dma->cmdi-1].l |= __cpu_to_le32(HIFN_D_VALID);
+
+       if (!(dev->flags & HIFN_FLAG_CMD_BUSY)) {
+               hifn_write_1(dev, HIFN_1_DMA_CSR, HIFN_DMACSR_C_CTRL_ENA);
+               dev->flags |= HIFN_FLAG_CMD_BUSY;
+       }
+
+       hifn_setup_dst_desc(dev, dpage, doff, nbytes);
+       hifn_setup_res_desc(dev);
+
+       return 0;
+
+err_out:
+       return -EINVAL;
+}
+
+static int ablkcipher_walk_init(struct ablkcipher_walk *w,
+               int num, gfp_t gfp_flags)
+{
+       int i;
+
+       num = min(ASYNC_SCATTERLIST_CACHE, num);
+       sg_init_table(w->cache, num);
+
+       w->num = 0;
+       for (i=0; i<num; ++i) {
+               struct page *page = alloc_page(gfp_flags);
+               struct scatterlist *s;
+
+               if (!page)
+                       break;
+
+               s = &w->cache[i];
+
+               sg_set_page(s, page, PAGE_SIZE, 0);
+               w->num++;
+       }
+
+       return i;
+}
+
+static void ablkcipher_walk_exit(struct ablkcipher_walk *w)
+{
+       int i;
+
+       for (i=0; i<w->num; ++i) {
+               struct scatterlist *s = &w->cache[i];
+
+               __free_page(sg_page(s));
+
+               s->length = 0;
+       }
+
+       w->num = 0;
+}
+
+static int ablkcipher_add(void *daddr, unsigned int *drestp, struct scatterlist *src,
+               unsigned int size, unsigned int *nbytesp)
+{
+       unsigned int copy, drest = *drestp, nbytes = *nbytesp;
+       int idx = 0;
+       void *saddr;
+
+       if (drest < size || size > nbytes)
+               return -EINVAL;
+
+       while (size) {
+               copy = min(drest, src->length);
+
+               saddr = kmap_atomic(sg_page(src), KM_SOFTIRQ1);
+               memcpy(daddr, saddr + src->offset, copy);
+               kunmap_atomic(saddr, KM_SOFTIRQ1);
+
+               size -= copy;
+               drest -= copy;
+               nbytes -= copy;
+               daddr += copy;
+
+               dprintk("%s: copy: %u, size: %u, drest: %u, nbytes: %u.\n",
+                               __func__, copy, size, drest, nbytes);
+
+               src++;
+               idx++;
+       }
+
+       *nbytesp = nbytes;
+       *drestp = drest;
+
+       return idx;
+}
+
+static int ablkcipher_walk(struct ablkcipher_request *req,
+               struct ablkcipher_walk *w)
+{
+       unsigned blocksize =
+               crypto_ablkcipher_blocksize(crypto_ablkcipher_reqtfm(req));
+       unsigned alignmask =
+               crypto_ablkcipher_alignmask(crypto_ablkcipher_reqtfm(req));
+       struct scatterlist *src, *dst, *t;
+       void *daddr;
+       unsigned int nbytes = req->nbytes, offset, copy, diff;
+       int idx, tidx, err;
+
+       tidx = idx = 0;
+       offset = 0;
+       while (nbytes) {
+               if (idx >= w->num && (w->flags & ASYNC_FLAGS_MISALIGNED))
+                       return -EINVAL;
+
+               src = &req->src[idx];
+               dst = &req->dst[idx];
+
+               dprintk("\n%s: slen: %u, dlen: %u, soff: %u, doff: %u, offset: %u, "
+                               "blocksize: %u, nbytes: %u.\n",
+                               __func__, src->length, dst->length, src->offset,
+                               dst->offset, offset, blocksize, nbytes);
+
+               if (src->length & (blocksize - 1) ||
+                               src->offset & (alignmask - 1) ||
+                               dst->length & (blocksize - 1) ||
+                               dst->offset & (alignmask - 1) ||
+                               offset) {
+                       unsigned slen = src->length - offset;
+                       unsigned dlen = PAGE_SIZE;
+
+                       t = &w->cache[idx];
+
+                       daddr = kmap_atomic(sg_page(t), KM_SOFTIRQ0);
+                       err = ablkcipher_add(daddr, &dlen, src, slen, &nbytes);
+                       if (err < 0)
+                               goto err_out_unmap;
+
+                       idx += err;
+
+                       copy = slen & ~(blocksize - 1);
+                       diff = slen & (blocksize - 1);
+
+                       if (dlen < nbytes) {
+                               /*
+                                * Destination page does not have enough space
+                                * to put there additional blocksized chunk,
+                                * so we mark that page as containing only
+                                * blocksize aligned chunks:
+                                *      t->length = (slen & ~(blocksize - 1));
+                                * and increase number of bytes to be processed
+                                * in next chunk:
+                                *      nbytes += diff;
+                                */
+                               nbytes += diff;
+
+                               /*
+                                * Temporary of course...
+                                * Kick author if you will catch this one.
+                                */
+                               printk(KERN_ERR "%s: dlen: %u, nbytes: %u,"
+                                       "slen: %u, offset: %u.\n",
+                                       __func__, dlen, nbytes, slen, offset);
+                               printk(KERN_ERR "%s: please contact author to fix this "
+                                       "issue, generally you should not catch "
+                                       "this path under any condition but who "
+                                       "knows how did you use crypto code.\n"
+                                       "Thank you.\n", __func__);
+                               BUG();
+                       } else {
+                               copy += diff + nbytes;
+
+                               src = &req->src[idx];
+
+                               err = ablkcipher_add(daddr + slen, &dlen, src, nbytes, &nbytes);
+                               if (err < 0)
+                                       goto err_out_unmap;
+
+                               idx += err;
+                       }
+
+                       t->length = copy;
+                       t->offset = offset;
+
+                       kunmap_atomic(daddr, KM_SOFTIRQ0);
+               } else {
+                       nbytes -= src->length;
+                       idx++;
+               }
+
+               tidx++;
+       }
+
+       return tidx;
+
+err_out_unmap:
+       kunmap_atomic(daddr, KM_SOFTIRQ0);
+       return err;
+}
+
+static int hifn_setup_session(struct ablkcipher_request *req)
+{
+       struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
+       struct hifn_device *dev = ctx->dev;
+       struct page *spage, *dpage;
+       unsigned long soff, doff, flags;
+       unsigned int nbytes = req->nbytes, idx = 0, len;
+       int err = -EINVAL, sg_num;
+       struct scatterlist *src, *dst, *t;
+       unsigned blocksize =
+               crypto_ablkcipher_blocksize(crypto_ablkcipher_reqtfm(req));
+       unsigned alignmask =
+               crypto_ablkcipher_alignmask(crypto_ablkcipher_reqtfm(req));
+
+       if (ctx->iv && !ctx->ivsize && ctx->mode != ACRYPTO_MODE_ECB)
+               goto err_out_exit;
+
+       ctx->walk.flags = 0;
+
+       while (nbytes) {
+               src = &req->src[idx];
+               dst = &req->dst[idx];
+
+               if (src->length & (blocksize - 1) ||
+                               src->offset & (alignmask - 1) ||
+                               dst->length & (blocksize - 1) ||
+                               dst->offset & (alignmask - 1)) {
+                       ctx->walk.flags |= ASYNC_FLAGS_MISALIGNED;
+               }
+
+               nbytes -= src->length;
+               idx++;
+       }
+
+       if (ctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
+               err = ablkcipher_walk_init(&ctx->walk, idx, GFP_ATOMIC);
+               if (err < 0)
+                       return err;
+       }
+
+       nbytes = req->nbytes;
+       idx = 0;
+
+       sg_num = ablkcipher_walk(req, &ctx->walk);
+
+       atomic_set(&ctx->sg_num, sg_num);
+
+       spin_lock_irqsave(&dev->lock, flags);
+       if (dev->started + sg_num > HIFN_QUEUE_LENGTH) {
+               err = -EAGAIN;
+               goto err_out;
+       }
+
+       dev->snum++;
+       dev->started += sg_num;
+
+       while (nbytes) {
+               src = &req->src[idx];
+               dst = &req->dst[idx];
+               t = &ctx->walk.cache[idx];
+
+               if (t->length) {
+                       spage = dpage = sg_page(t);
+                       soff = doff = 0;
+                       len = t->length;
+               } else {
+                       spage = sg_page(src);
+                       soff = src->offset;
+
+                       dpage = sg_page(dst);
+                       doff = dst->offset;
+
+                       len = dst->length;
+               }
+
+               idx++;
+
+               err = hifn_setup_dma(dev, spage, soff, dpage, doff, nbytes,
+                               req, ctx);
+               if (err)
+                       goto err_out;
+
+               nbytes -= len;
+       }
+
+       dev->active = HIFN_DEFAULT_ACTIVE_NUM;
+       spin_unlock_irqrestore(&dev->lock, flags);
+
+       return 0;
+
+err_out:
+       spin_unlock_irqrestore(&dev->lock, flags);
+err_out_exit:
+       if (err && printk_ratelimit())
+               dprintk("%s: iv: %p [%d], key: %p [%d], mode: %u, op: %u, "
+                               "type: %u, err: %d.\n",
+                       dev->name, ctx->iv, ctx->ivsize,
+                       ctx->key, ctx->keysize,
+                       ctx->mode, ctx->op, ctx->type, err);
+
+       return err;
+}
+
+static int hifn_test(struct hifn_device *dev, int encdec, u8 snum)
+{
+       int n, err;
+       u8 src[16];
+       struct hifn_context ctx;
+       u8 fips_aes_ecb_from_zero[16] = {
+               0x66, 0xE9, 0x4B, 0xD4,
+               0xEF, 0x8A, 0x2C, 0x3B,
+               0x88, 0x4C, 0xFA, 0x59,
+               0xCA, 0x34, 0x2B, 0x2E};
+
+       memset(src, 0, sizeof(src));
+       memset(ctx.key, 0, sizeof(ctx.key));
+
+       ctx.dev = dev;
+       ctx.keysize = 16;
+       ctx.ivsize = 0;
+       ctx.iv = NULL;
+       ctx.op = (encdec)?ACRYPTO_OP_ENCRYPT:ACRYPTO_OP_DECRYPT;
+       ctx.mode = ACRYPTO_MODE_ECB;
+       ctx.type = ACRYPTO_TYPE_AES_128;
+       atomic_set(&ctx.sg_num, 1);
+
+       err = hifn_setup_dma(dev,
+                       virt_to_page(src), offset_in_page(src),
+                       virt_to_page(src), offset_in_page(src),
+                       sizeof(src), NULL, &ctx);
+       if (err)
+               goto err_out;
+
+       msleep(200);
+
+       dprintk("%s: decoded: ", dev->name);
+       for (n=0; n<sizeof(src); ++n)
+               dprintk("%02x ", src[n]);
+       dprintk("\n");
+       dprintk("%s: FIPS   : ", dev->name);
+       for (n=0; n<sizeof(fips_aes_ecb_from_zero); ++n)
+               dprintk("%02x ", fips_aes_ecb_from_zero[n]);
+       dprintk("\n");
+
+       if (!memcmp(src, fips_aes_ecb_from_zero, sizeof(fips_aes_ecb_from_zero))) {
+               printk(KERN_INFO "%s: AES 128 ECB test has been successfully "
+                               "passed.\n", dev->name);
+               return 0;
+       }
+
+err_out:
+       printk(KERN_INFO "%s: AES 128 ECB test has been failed.\n", dev->name);
+       return -1;
+}
+
+static int hifn_start_device(struct hifn_device *dev)
+{
+       int err;
+
+       hifn_reset_dma(dev, 1);
+
+       err = hifn_enable_crypto(dev);
+       if (err)
+               return err;
+
+       hifn_reset_puc(dev);
+
+       hifn_init_dma(dev);
+
+       hifn_init_registers(dev);
+
+       hifn_init_pubrng(dev);
+
+       return 0;
+}
+
+static int ablkcipher_get(void *saddr, unsigned int *srestp, unsigned int offset,
+               struct scatterlist *dst, unsigned int size, unsigned int *nbytesp)
+{
+       unsigned int srest = *srestp, nbytes = *nbytesp, copy;
+       void *daddr;
+       int idx = 0;
+
+       if (srest < size || size > nbytes)
+               return -EINVAL;
+
+       while (size) {
+
+               copy = min(dst->length, srest);
+
+               daddr = kmap_atomic(sg_page(dst), KM_IRQ0);
+               memcpy(daddr + dst->offset + offset, saddr, copy);
+               kunmap_atomic(daddr, KM_IRQ0);
+
+               nbytes -= copy;
+               size -= copy;
+               srest -= copy;
+               saddr += copy;
+               offset = 0;
+
+               dprintk("%s: copy: %u, size: %u, srest: %u, nbytes: %u.\n",
+                               __func__, copy, size, srest, nbytes);
+
+               dst++;
+               idx++;
+       }
+
+       *nbytesp = nbytes;
+       *srestp = srest;
+
+       return idx;
+}
+
+static void hifn_process_ready(struct ablkcipher_request *req, int error)
+{
+       struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
+       struct hifn_device *dev;
+
+       dprintk("%s: req: %p, ctx: %p.\n", __func__, req, ctx);
+
+       dev = ctx->dev;
+       dprintk("%s: req: %p, started: %d, sg_num: %d.\n",
+               __func__, req, dev->started, atomic_read(&ctx->sg_num));
+
+       if (--dev->started < 0)
+               BUG();
+
+       if (atomic_dec_and_test(&ctx->sg_num)) {
+               unsigned int nbytes = req->nbytes;
+               int idx = 0, err;
+               struct scatterlist *dst, *t;
+               void *saddr;
+
+               if (ctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
+                       while (nbytes) {
+                               t = &ctx->walk.cache[idx];
+                               dst = &req->dst[idx];
+
+                               dprintk("\n%s: sg_page(t): %p, t->length: %u, "
+                                       "sg_page(dst): %p, dst->length: %u, "
+                                       "nbytes: %u.\n",
+                                       __func__, sg_page(t), t->length,
+                                       sg_page(dst), dst->length, nbytes);
+
+                               if (!t->length) {
+                                       nbytes -= dst->length;
+                                       idx++;
+                                       continue;
+                               }
+
+                               saddr = kmap_atomic(sg_page(t), KM_IRQ1);
+
+                               err = ablkcipher_get(saddr, &t->length, t->offset,
+                                               dst, nbytes, &nbytes);
+                               if (err < 0) {
+                                       kunmap_atomic(saddr, KM_IRQ1);
+                                       break;
+                               }
+
+                               idx += err;
+                               kunmap_atomic(saddr, KM_IRQ1);
+                       }
+
+                       ablkcipher_walk_exit(&ctx->walk);
+               }
+
+               req->base.complete(&req->base, error);
+       }
+}
+
+static void hifn_check_for_completion(struct hifn_device *dev, int error)
+{
+       int i;
+       struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
+
+       for (i=0; i<HIFN_D_RES_RSIZE; ++i) {
+               struct hifn_desc *d = &dma->resr[i];
+
+               if (!(d->l & __cpu_to_le32(HIFN_D_VALID)) && dev->sa[i]) {
+                       dev->success++;
+                       dev->reset = 0;
+                       hifn_process_ready(dev->sa[i], error);
+                       dev->sa[i] = NULL;
+               }
+
+               if (d->l & __cpu_to_le32(HIFN_D_DESTOVER | HIFN_D_OVER))
+                       if (printk_ratelimit())
+                               printk("%s: overflow detected [d: %u, o: %u] "
+                                               "at %d resr: l: %08x, p: %08x.\n",
+                                       dev->name,
+                                       !!(d->l & __cpu_to_le32(HIFN_D_DESTOVER)),
+                                       !!(d->l & __cpu_to_le32(HIFN_D_OVER)),
+                                       i, d->l, d->p);
+       }
+}
+
+static void hifn_clear_rings(struct hifn_device *dev)
+{
+       struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
+       int i, u;
+
+       dprintk("%s: ring cleanup 1: i: %d.%d.%d.%d, u: %d.%d.%d.%d, "
+                       "k: %d.%d.%d.%d.\n",
+                       dev->name,
+                       dma->cmdi, dma->srci, dma->dsti, dma->resi,
+                       dma->cmdu, dma->srcu, dma->dstu, dma->resu,
+                       dma->cmdk, dma->srck, dma->dstk, dma->resk);
+
+       i = dma->resk; u = dma->resu;
+       while (u != 0) {
+               if (dma->resr[i].l & __cpu_to_le32(HIFN_D_VALID))
+                       break;
+
+               if (i != HIFN_D_RES_RSIZE)
+                       u--;
+
+               if (++i == (HIFN_D_RES_RSIZE + 1))
+                       i = 0;
+       }
+       dma->resk = i; dma->resu = u;
+
+       i = dma->srck; u = dma->srcu;
+       while (u != 0) {
+               if (i == HIFN_D_SRC_RSIZE)
+                       i = 0;
+               if (dma->srcr[i].l & __cpu_to_le32(HIFN_D_VALID))
+                       break;
+               i++, u--;
+       }
+       dma->srck = i; dma->srcu = u;
+
+       i = dma->cmdk; u = dma->cmdu;
+       while (u != 0) {
+               if (dma->cmdr[i].l & __cpu_to_le32(HIFN_D_VALID))
+                       break;
+               if (i != HIFN_D_CMD_RSIZE)
+                       u--;
+               if (++i == (HIFN_D_CMD_RSIZE + 1))
+                       i = 0;
+       }
+       dma->cmdk = i; dma->cmdu = u;
+
+       i = dma->dstk; u = dma->dstu;
+       while (u != 0) {
+               if (i == HIFN_D_DST_RSIZE)
+                       i = 0;
+               if (dma->dstr[i].l & __cpu_to_le32(HIFN_D_VALID))
+                       break;
+               i++, u--;
+       }
+       dma->dstk = i; dma->dstu = u;
+
+       dprintk("%s: ring cleanup 2: i: %d.%d.%d.%d, u: %d.%d.%d.%d, "
+                       "k: %d.%d.%d.%d.\n",
+                       dev->name,
+                       dma->cmdi, dma->srci, dma->dsti, dma->resi,
+                       dma->cmdu, dma->srcu, dma->dstu, dma->resu,
+                       dma->cmdk, dma->srck, dma->dstk, dma->resk);
+}
+
+static void hifn_work(struct work_struct *work)
+{
+       struct delayed_work *dw = container_of(work, struct delayed_work, work);
+       struct hifn_device *dev = container_of(dw, struct hifn_device, work);
+       unsigned long flags;
+       int reset = 0;
+       u32 r = 0;
+
+       spin_lock_irqsave(&dev->lock, flags);
+       if (dev->active == 0) {
+               struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
+
+               if (dma->cmdu == 0 && (dev->flags & HIFN_FLAG_CMD_BUSY)) {
+                       dev->flags &= ~HIFN_FLAG_CMD_BUSY;
+                       r |= HIFN_DMACSR_C_CTRL_DIS;
+               }
+               if (dma->srcu == 0 && (dev->flags & HIFN_FLAG_SRC_BUSY)) {
+                       dev->flags &= ~HIFN_FLAG_SRC_BUSY;
+                       r |= HIFN_DMACSR_S_CTRL_DIS;
+               }
+               if (dma->dstu == 0 && (dev->flags & HIFN_FLAG_DST_BUSY)) {
+                       dev->flags &= ~HIFN_FLAG_DST_BUSY;
+                       r |= HIFN_DMACSR_D_CTRL_DIS;
+               }
+               if (dma->resu == 0 && (dev->flags & HIFN_FLAG_RES_BUSY)) {
+                       dev->flags &= ~HIFN_FLAG_RES_BUSY;
+                       r |= HIFN_DMACSR_R_CTRL_DIS;
+               }
+               if (r)
+                       hifn_write_1(dev, HIFN_1_DMA_CSR, r);
+       } else
+               dev->active--;
+
+       if (dev->prev_success == dev->success && dev->started)
+               reset = 1;
+       dev->prev_success = dev->success;
+       spin_unlock_irqrestore(&dev->lock, flags);
+
+       if (reset) {
+               dprintk("%s: r: %08x, active: %d, started: %d, "
+                               "success: %lu: reset: %d.\n",
+                       dev->name, r, dev->active, dev->started,
+                       dev->success, reset);
+
+               if (++dev->reset >= 5) {
+                       dprintk("%s: really hard reset.\n", dev->name);
+                       hifn_reset_dma(dev, 1);
+                       hifn_stop_device(dev);
+                       hifn_start_device(dev);
+                       dev->reset = 0;
+               }
+
+               spin_lock_irqsave(&dev->lock, flags);
+               hifn_check_for_completion(dev, -EBUSY);
+               hifn_clear_rings(dev);
+               dev->started = 0;
+               spin_unlock_irqrestore(&dev->lock, flags);
+       }
+
+       schedule_delayed_work(&dev->work, HZ);
+}
+
+static irqreturn_t hifn_interrupt(int irq, void *data)
+{
+       struct hifn_device *dev = (struct hifn_device *)data;
+       struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
+       u32 dmacsr, restart;
+
+       dmacsr = hifn_read_1(dev, HIFN_1_DMA_CSR);
+
+       dprintk("%s: 1 dmacsr: %08x, dmareg: %08x, res: %08x [%d], "
+                       "i: %d.%d.%d.%d, u: %d.%d.%d.%d.\n",
+               dev->name, dmacsr, dev->dmareg, dmacsr & dev->dmareg, dma->cmdi,
+               dma->cmdu, dma->srcu, dma->dstu, dma->resu,
+               dma->cmdi, dma->srci, dma->dsti, dma->resi);
+
+       if ((dmacsr & dev->dmareg) == 0)
+               return IRQ_NONE;
+
+       hifn_write_1(dev, HIFN_1_DMA_CSR, dmacsr & dev->dmareg);
+
+       if (dmacsr & HIFN_DMACSR_ENGINE)
+               hifn_write_0(dev, HIFN_0_PUISR, hifn_read_0(dev, HIFN_0_PUISR));
+       if (dmacsr & HIFN_DMACSR_PUBDONE)
+               hifn_write_1(dev, HIFN_1_PUB_STATUS,
+                       hifn_read_1(dev, HIFN_1_PUB_STATUS) | HIFN_PUBSTS_DONE);
+
+       restart = dmacsr & (HIFN_DMACSR_R_OVER | HIFN_DMACSR_D_OVER);
+       if (restart) {
+               u32 puisr = hifn_read_0(dev, HIFN_0_PUISR);
+
+               if (printk_ratelimit())
+                       printk("%s: overflow: r: %d, d: %d, puisr: %08x, d: %u.\n",
+                               dev->name, !!(dmacsr & HIFN_DMACSR_R_OVER),
+                               !!(dmacsr & HIFN_DMACSR_D_OVER),
+                               puisr, !!(puisr & HIFN_PUISR_DSTOVER));
+               if (!!(puisr & HIFN_PUISR_DSTOVER))
+                       hifn_write_0(dev, HIFN_0_PUISR, HIFN_PUISR_DSTOVER);
+               hifn_write_1(dev, HIFN_1_DMA_CSR, dmacsr & (HIFN_DMACSR_R_OVER |
+                                       HIFN_DMACSR_D_OVER));
+       }
+
+       restart = dmacsr & (HIFN_DMACSR_C_ABORT | HIFN_DMACSR_S_ABORT |
+                       HIFN_DMACSR_D_ABORT | HIFN_DMACSR_R_ABORT);
+       if (restart) {
+               if (printk_ratelimit())
+                       printk("%s: abort: c: %d, s: %d, d: %d, r: %d.\n",
+                               dev->name, !!(dmacsr & HIFN_DMACSR_C_ABORT),
+                               !!(dmacsr & HIFN_DMACSR_S_ABORT),
+                               !!(dmacsr & HIFN_DMACSR_D_ABORT),
+                               !!(dmacsr & HIFN_DMACSR_R_ABORT));
+               hifn_reset_dma(dev, 1);
+               hifn_init_dma(dev);
+               hifn_init_registers(dev);
+       }
+
+       if ((dmacsr & HIFN_DMACSR_C_WAIT) && (dma->cmdu == 0)) {
+               dprintk("%s: wait on command.\n", dev->name);
+               dev->dmareg &= ~(HIFN_DMAIER_C_WAIT);
+               hifn_write_1(dev, HIFN_1_DMA_IER, dev->dmareg);
+       }
+
+       tasklet_schedule(&dev->tasklet);
+       hifn_clear_rings(dev);
+
+       return IRQ_HANDLED;
+}
+
+static void hifn_flush(struct hifn_device *dev)
+{
+       unsigned long flags;
+       struct crypto_async_request *async_req;
+       struct hifn_context *ctx;
+       struct ablkcipher_request *req;
+       struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
+       int i;
+
+       spin_lock_irqsave(&dev->lock, flags);
+       for (i=0; i<HIFN_D_RES_RSIZE; ++i) {
+               struct hifn_desc *d = &dma->resr[i];
+
+               if (dev->sa[i]) {
+                       hifn_process_ready(dev->sa[i],
+                               (d->l & __cpu_to_le32(HIFN_D_VALID))?-ENODEV:0);
+               }
+       }
+
+       while ((async_req = crypto_dequeue_request(&dev->queue))) {
+               ctx = crypto_tfm_ctx(async_req->tfm);
+               req = container_of(async_req, struct ablkcipher_request, base);
+
+               hifn_process_ready(req, -ENODEV);
+       }
+       spin_unlock_irqrestore(&dev->lock, flags);
+}
+
+static int hifn_setkey(struct crypto_ablkcipher *cipher, const u8 *key,
+               unsigned int len)
+{
+       struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
+       struct hifn_context *ctx = crypto_tfm_ctx(tfm);
+       struct hifn_device *dev = ctx->dev;
+
+       if (len > HIFN_MAX_CRYPT_KEY_LENGTH) {
+               crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
+               return -1;
+       }
+
+       if (len == HIFN_DES_KEY_LENGTH) {
+               u32 tmp[DES_EXPKEY_WORDS];
+               int ret = des_ekey(tmp, key);
+               
+               if (unlikely(ret == 0) && (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
+                       tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
+                       return -EINVAL;
+               }
+       }
+
+       dev->flags &= ~HIFN_FLAG_OLD_KEY;
+
+       memcpy(ctx->key, key, len);
+       ctx->keysize = len;
+
+       return 0;
+}
+
+static int hifn_handle_req(struct ablkcipher_request *req)
+{
+       struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
+       struct hifn_device *dev = ctx->dev;
+       int err = -EAGAIN;
+
+       if (dev->started + DIV_ROUND_UP(req->nbytes, PAGE_SIZE) <= HIFN_QUEUE_LENGTH)
+               err = hifn_setup_session(req);
+
+       if (err == -EAGAIN) {
+               unsigned long flags;
+
+               spin_lock_irqsave(&dev->lock, flags);
+               err = ablkcipher_enqueue_request(&dev->queue, req);
+               spin_unlock_irqrestore(&dev->lock, flags);
+       }
+
+       return err;
+}
+
+static int hifn_setup_crypto_req(struct ablkcipher_request *req, u8 op,
+               u8 type, u8 mode)
+{
+       struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
+       unsigned ivsize;
+
+       ivsize = crypto_ablkcipher_ivsize(crypto_ablkcipher_reqtfm(req));
+
+       if (req->info && mode != ACRYPTO_MODE_ECB) {
+               if (type == ACRYPTO_TYPE_AES_128)
+                       ivsize = HIFN_AES_IV_LENGTH;
+               else if (type == ACRYPTO_TYPE_DES)
+                       ivsize = HIFN_DES_KEY_LENGTH;
+               else if (type == ACRYPTO_TYPE_3DES)
+                       ivsize = HIFN_3DES_KEY_LENGTH;
+       }
+
+       if (ctx->keysize != 16 && type == ACRYPTO_TYPE_AES_128) {
+               if (ctx->keysize == 24)
+                       type = ACRYPTO_TYPE_AES_192;
+               else if (ctx->keysize == 32)
+                       type = ACRYPTO_TYPE_AES_256;
+       }
+
+       ctx->op = op;
+       ctx->mode = mode;
+       ctx->type = type;
+       ctx->iv = req->info;
+       ctx->ivsize = ivsize;
+
+       /*
+        * HEAVY TODO: needs to kick Herbert XU to write documentation.
+        * HEAVY TODO: needs to kick Herbert XU to write documentation.
+        * HEAVY TODO: needs to kick Herbert XU to write documentation.
+        */
+
+       return hifn_handle_req(req);
+}
+
+static int hifn_process_queue(struct hifn_device *dev)
+{
+       struct crypto_async_request *async_req;
+       struct hifn_context *ctx;
+       struct ablkcipher_request *req;
+       unsigned long flags;
+       int err = 0;
+
+       while (dev->started < HIFN_QUEUE_LENGTH) {
+               spin_lock_irqsave(&dev->lock, flags);
+               async_req = crypto_dequeue_request(&dev->queue);
+               spin_unlock_irqrestore(&dev->lock, flags);
+
+               if (!async_req)
+                       break;
+
+               ctx = crypto_tfm_ctx(async_req->tfm);
+               req = container_of(async_req, struct ablkcipher_request, base);
+
+               err = hifn_handle_req(req);
+               if (err)
+                       break;
+       }
+
+       return err;
+}
+
+static int hifn_setup_crypto(struct ablkcipher_request *req, u8 op,
+               u8 type, u8 mode)
+{
+       int err;
+       struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
+       struct hifn_device *dev = ctx->dev;
+
+       err = hifn_setup_crypto_req(req, op, type, mode);
+       if (err)
+               return err;
+
+       if (dev->started < HIFN_QUEUE_LENGTH && dev->queue.qlen)
+               err = hifn_process_queue(dev);
+
+       return err;
+}
+
+/*
+ * AES ecryption functions.
+ */
+static inline int hifn_encrypt_aes_ecb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
+                       ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_ECB);
+}
+static inline int hifn_encrypt_aes_cbc(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
+                       ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_CBC);
+}
+static inline int hifn_encrypt_aes_cfb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
+                       ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_CFB);
+}
+static inline int hifn_encrypt_aes_ofb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
+                       ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_OFB);
+}
+
+/*
+ * AES decryption functions.
+ */
+static inline int hifn_decrypt_aes_ecb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
+                       ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_ECB);
+}
+static inline int hifn_decrypt_aes_cbc(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
+                       ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_CBC);
+}
+static inline int hifn_decrypt_aes_cfb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
+                       ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_CFB);
+}
+static inline int hifn_decrypt_aes_ofb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
+                       ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_OFB);
+}
+
+/*
+ * DES ecryption functions.
+ */
+static inline int hifn_encrypt_des_ecb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
+                       ACRYPTO_TYPE_DES, ACRYPTO_MODE_ECB);
+}
+static inline int hifn_encrypt_des_cbc(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
+                       ACRYPTO_TYPE_DES, ACRYPTO_MODE_CBC);
+}
+static inline int hifn_encrypt_des_cfb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
+                       ACRYPTO_TYPE_DES, ACRYPTO_MODE_CFB);
+}
+static inline int hifn_encrypt_des_ofb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
+                       ACRYPTO_TYPE_DES, ACRYPTO_MODE_OFB);
+}
+
+/*
+ * DES decryption functions.
+ */
+static inline int hifn_decrypt_des_ecb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
+                       ACRYPTO_TYPE_DES, ACRYPTO_MODE_ECB);
+}
+static inline int hifn_decrypt_des_cbc(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
+                       ACRYPTO_TYPE_DES, ACRYPTO_MODE_CBC);
+}
+static inline int hifn_decrypt_des_cfb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
+                       ACRYPTO_TYPE_DES, ACRYPTO_MODE_CFB);
+}
+static inline int hifn_decrypt_des_ofb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
+                       ACRYPTO_TYPE_DES, ACRYPTO_MODE_OFB);
+}
+
+/*
+ * 3DES ecryption functions.
+ */
+static inline int hifn_encrypt_3des_ecb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
+                       ACRYPTO_TYPE_3DES, ACRYPTO_MODE_ECB);
+}
+static inline int hifn_encrypt_3des_cbc(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
+                       ACRYPTO_TYPE_3DES, ACRYPTO_MODE_CBC);
+}
+static inline int hifn_encrypt_3des_cfb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
+                       ACRYPTO_TYPE_3DES, ACRYPTO_MODE_CFB);
+}
+static inline int hifn_encrypt_3des_ofb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
+                       ACRYPTO_TYPE_3DES, ACRYPTO_MODE_OFB);
+}
+
+/*
+ * 3DES decryption functions.
+ */
+static inline int hifn_decrypt_3des_ecb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
+                       ACRYPTO_TYPE_3DES, ACRYPTO_MODE_ECB);
+}
+static inline int hifn_decrypt_3des_cbc(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
+                       ACRYPTO_TYPE_3DES, ACRYPTO_MODE_CBC);
+}
+static inline int hifn_decrypt_3des_cfb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
+                       ACRYPTO_TYPE_3DES, ACRYPTO_MODE_CFB);
+}
+static inline int hifn_decrypt_3des_ofb(struct ablkcipher_request *req)
+{
+       return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
+                       ACRYPTO_TYPE_3DES, ACRYPTO_MODE_OFB);
+}
+
+struct hifn_alg_template
+{
+       char name[CRYPTO_MAX_ALG_NAME];
+       char drv_name[CRYPTO_MAX_ALG_NAME];
+       unsigned int bsize;
+       struct ablkcipher_alg ablkcipher;
+};
+
+static struct hifn_alg_template hifn_alg_templates[] = {
+       /*
+        * 3DES ECB, CBC, CFB and OFB modes.
+        */
+       {
+               .name = "cfb(des3_ede)", .drv_name = "hifn-3des", .bsize = 8,
+               .ablkcipher = {
+                       .min_keysize    =       HIFN_3DES_KEY_LENGTH,
+                       .max_keysize    =       HIFN_3DES_KEY_LENGTH,
+                       .setkey         =       hifn_setkey,
+                       .encrypt        =       hifn_encrypt_3des_cfb,
+                       .decrypt        =       hifn_decrypt_3des_cfb,
+               },
+       },
+       {
+               .name = "ofb(des3_ede)", .drv_name = "hifn-3des", .bsize = 8,
+               .ablkcipher = {
+                       .min_keysize    =       HIFN_3DES_KEY_LENGTH,
+                       .max_keysize    =       HIFN_3DES_KEY_LENGTH,
+                       .setkey         =       hifn_setkey,
+                       .encrypt        =       hifn_encrypt_3des_ofb,
+                       .decrypt        =       hifn_decrypt_3des_ofb,
+               },
+       },
+       {
+               .name = "cbc(des3_ede)", .drv_name = "hifn-3des", .bsize = 8,
+               .ablkcipher = {
+                       .min_keysize    =       HIFN_3DES_KEY_LENGTH,
+                       .max_keysize    =       HIFN_3DES_KEY_LENGTH,
+                       .setkey         =       hifn_setkey,
+                       .encrypt        =       hifn_encrypt_3des_cbc,
+                       .decrypt        =       hifn_decrypt_3des_cbc,
+               },
+       },
+       {
+               .name = "ecb(des3_ede)", .drv_name = "hifn-3des", .bsize = 8,
+               .ablkcipher = {
+                       .min_keysize    =       HIFN_3DES_KEY_LENGTH,
+                       .max_keysize    =       HIFN_3DES_KEY_LENGTH,
+                       .setkey         =       hifn_setkey,
+                       .encrypt        =       hifn_encrypt_3des_ecb,
+                       .decrypt        =       hifn_decrypt_3des_ecb,
+               },
+       },
+
+       /*
+        * DES ECB, CBC, CFB and OFB modes.
+        */
+       {
+               .name = "cfb(des)", .drv_name = "hifn-des", .bsize = 8,
+               .ablkcipher = {
+                       .min_keysize    =       HIFN_DES_KEY_LENGTH,
+                       .max_keysize    =       HIFN_DES_KEY_LENGTH,
+                       .setkey         =       hifn_setkey,
+                       .encrypt        =       hifn_encrypt_des_cfb,
+                       .decrypt        =       hifn_decrypt_des_cfb,
+               },
+       },
+       {
+               .name = "ofb(des)", .drv_name = "hifn-des", .bsize = 8,
+               .ablkcipher = {
+                       .min_keysize    =       HIFN_DES_KEY_LENGTH,
+                       .max_keysize    =       HIFN_DES_KEY_LENGTH,
+                       .setkey         =       hifn_setkey,
+                       .encrypt        =       hifn_encrypt_des_ofb,
+                       .decrypt        =       hifn_decrypt_des_ofb,
+               },
+       },
+       {
+               .name = "cbc(des)", .drv_name = "hifn-des", .bsize = 8,
+               .ablkcipher = {
+                       .min_keysize    =       HIFN_DES_KEY_LENGTH,
+                       .max_keysize    =       HIFN_DES_KEY_LENGTH,
+                       .setkey         =       hifn_setkey,
+                       .encrypt        =       hifn_encrypt_des_cbc,
+                       .decrypt        =       hifn_decrypt_des_cbc,
+               },
+       },
+       {
+               .name = "ecb(des)", .drv_name = "hifn-des", .bsize = 8,
+               .ablkcipher = {
+                       .min_keysize    =       HIFN_DES_KEY_LENGTH,
+                       .max_keysize    =       HIFN_DES_KEY_LENGTH,
+                       .setkey         =       hifn_setkey,
+                       .encrypt        =       hifn_encrypt_des_ecb,
+                       .decrypt        =       hifn_decrypt_des_ecb,
+               },
+       },
+
+       /*
+        * AES ECB, CBC, CFB and OFB modes.
+        */
+       {
+               .name = "ecb(aes)", .drv_name = "hifn-aes", .bsize = 16,
+               .ablkcipher = {
+                       .min_keysize    =       AES_MIN_KEY_SIZE,
+                       .max_keysize    =       AES_MAX_KEY_SIZE,
+                       .setkey         =       hifn_setkey,
+                       .encrypt        =       hifn_encrypt_aes_ecb,
+                       .decrypt        =       hifn_decrypt_aes_ecb,
+               },
+       },
+       {
+               .name = "cbc(aes)", .drv_name = "hifn-aes", .bsize = 16,
+               .ablkcipher = {
+                       .min_keysize    =       AES_MIN_KEY_SIZE,
+                       .max_keysize    =       AES_MAX_KEY_SIZE,
+                       .setkey         =       hifn_setkey,
+                       .encrypt        =       hifn_encrypt_aes_cbc,
+                       .decrypt        =       hifn_decrypt_aes_cbc,
+               },
+       },
+       {
+               .name = "cfb(aes)", .drv_name = "hifn-aes", .bsize = 16,
+               .ablkcipher = {
+                       .min_keysize    =       AES_MIN_KEY_SIZE,
+                       .max_keysize    =       AES_MAX_KEY_SIZE,
+                       .setkey         =       hifn_setkey,
+                       .encrypt        =       hifn_encrypt_aes_cfb,
+                       .decrypt        =       hifn_decrypt_aes_cfb,
+               },
+       },
+       {
+               .name = "ofb(aes)", .drv_name = "hifn-aes", .bsize = 16,
+               .ablkcipher = {
+                       .min_keysize    =       AES_MIN_KEY_SIZE,
+                       .max_keysize    =       AES_MAX_KEY_SIZE,
+                       .setkey         =       hifn_setkey,
+                       .encrypt        =       hifn_encrypt_aes_ofb,
+                       .decrypt        =       hifn_decrypt_aes_ofb,
+               },
+       },
+};
+
+static int hifn_cra_init(struct crypto_tfm *tfm)
+{
+       struct crypto_alg *alg = tfm->__crt_alg;
+       struct hifn_crypto_alg *ha = crypto_alg_to_hifn(alg);
+       struct hifn_context *ctx = crypto_tfm_ctx(tfm);
+
+       ctx->dev = ha->dev;
+
+       return 0;
+}
+
+static int hifn_alg_alloc(struct hifn_device *dev, struct hifn_alg_template *t)
+{
+       struct hifn_crypto_alg *alg;
+       int err;
+
+       alg = kzalloc(sizeof(struct hifn_crypto_alg), GFP_KERNEL);
+       if (!alg)
+               return -ENOMEM;
+
+       snprintf(alg->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s", t->name);
+       snprintf(alg->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", t->drv_name);
+
+       alg->alg.cra_priority = 300;
+       alg->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
+       alg->alg.cra_blocksize = t->bsize;
+       alg->alg.cra_ctxsize = sizeof(struct hifn_context);
+       alg->alg.cra_alignmask = 15;
+       if (t->bsize == 8)
+               alg->alg.cra_alignmask = 3;
+       alg->alg.cra_type = &crypto_ablkcipher_type;
+       alg->alg.cra_module = THIS_MODULE;
+       alg->alg.cra_u.ablkcipher = t->ablkcipher;
+       alg->alg.cra_init = hifn_cra_init;
+
+       alg->dev = dev;
+
+       list_add_tail(&alg->entry, &dev->alg_list);
+
+       err = crypto_register_alg(&alg->alg);
+       if (err) {
+               list_del(&alg->entry);
+               kfree(alg);
+       }
+
+       return err;
+}
+
+static void hifn_unregister_alg(struct hifn_device *dev)
+{
+       struct hifn_crypto_alg *a, *n;
+
+       list_for_each_entry_safe(a, n, &dev->alg_list, entry) {
+               list_del(&a->entry);
+               crypto_unregister_alg(&a->alg);
+               kfree(a);
+       }
+}
+
+static int hifn_register_alg(struct hifn_device *dev)
+{
+       int i, err;
+
+       for (i=0; i<ARRAY_SIZE(hifn_alg_templates); ++i) {
+               err = hifn_alg_alloc(dev, &hifn_alg_templates[i]);
+               if (err)
+                       goto err_out_exit;
+       }
+
+       return 0;
+
+err_out_exit:
+       hifn_unregister_alg(dev);
+       return err;
+}
+
+static void hifn_tasklet_callback(unsigned long data)
+{
+       struct hifn_device *dev = (struct hifn_device *)data;
+
+       /*
+        * This is ok to call this without lock being held,
+        * althogh it modifies some parameters used in parallel,
+        * (like dev->success), but they are used in process
+        * context or update is atomic (like setting dev->sa[i] to NULL).
+        */
+       hifn_check_for_completion(dev, 0);
+}
+
+static int hifn_probe(struct pci_dev *pdev, const struct pci_device_id *id)
+{
+       int err, i;
+       struct hifn_device *dev;
+       char name[8];
+
+       err = pci_enable_device(pdev);
+       if (err)
+               return err;
+       pci_set_master(pdev);
+
+       err = pci_set_dma_mask(pdev, DMA_32BIT_MASK);
+       if (err)
+               goto err_out_disable_pci_device;
+
+       snprintf(name, sizeof(name), "hifn%d",
+                       atomic_inc_return(&hifn_dev_number)-1);
+
+       err = pci_request_regions(pdev, name);
+       if (err)
+               goto err_out_disable_pci_device;
+
+       if (pci_resource_len(pdev, 0) < HIFN_BAR0_SIZE ||
+           pci_resource_len(pdev, 1) < HIFN_BAR1_SIZE ||
+           pci_resource_len(pdev, 2) < HIFN_BAR2_SIZE) {
+               dprintk("%s: Broken hardware - I/O regions are too small.\n",
+                               pci_name(pdev));
+               err = -ENODEV;
+               goto err_out_free_regions;
+       }
+
+       dev = kzalloc(sizeof(struct hifn_device) + sizeof(struct crypto_alg),
+                       GFP_KERNEL);
+       if (!dev) {
+               err = -ENOMEM;
+               goto err_out_free_regions;
+       }
+
+       INIT_LIST_HEAD(&dev->alg_list);
+
+       snprintf(dev->name, sizeof(dev->name), "%s", name);
+       spin_lock_init(&dev->lock);
+
+       for (i=0; i<3; ++i) {
+               unsigned long addr, size;
+
+               addr = pci_resource_start(pdev, i);
+               size = pci_resource_len(pdev, i);
+
+               dev->bar[i] = ioremap_nocache(addr, size);
+               if (!dev->bar[i])
+                       goto err_out_unmap_bars;
+       }
+
+       dev->result_mem = __get_free_pages(GFP_KERNEL, HIFN_MAX_RESULT_ORDER);
+       if (!dev->result_mem) {
+               dprintk("Failed to allocate %d pages for result_mem.\n",
+                               HIFN_MAX_RESULT_ORDER);
+               goto err_out_unmap_bars;
+       }
+       memset((void *)dev->result_mem, 0, PAGE_SIZE*(1<<HIFN_MAX_RESULT_ORDER));
+
+       dev->dst = pci_map_single(pdev, (void *)dev->result_mem,
+                       PAGE_SIZE << HIFN_MAX_RESULT_ORDER, PCI_DMA_FROMDEVICE);
+
+       dev->desc_virt = pci_alloc_consistent(pdev, sizeof(struct hifn_dma),
+                       &dev->desc_dma);
+       if (!dev->desc_virt) {
+               dprintk("Failed to allocate descriptor rings.\n");
+               goto err_out_free_result_pages;
+       }
+       memset(dev->desc_virt, 0, sizeof(struct hifn_dma));
+
+       dev->pdev = pdev;
+       dev->irq = pdev->irq;
+
+       for (i=0; i<HIFN_D_RES_RSIZE; ++i)
+               dev->sa[i] = NULL;
+
+       pci_set_drvdata(pdev, dev);
+
+       tasklet_init(&dev->tasklet, hifn_tasklet_callback, (unsigned long)dev);
+
+       crypto_init_queue(&dev->queue, 1);
+
+       err = request_irq(dev->irq, hifn_interrupt, IRQF_SHARED, dev->name, dev);
+       if (err) {
+               dprintk("Failed to request IRQ%d: err: %d.\n", dev->irq, err);
+               dev->irq = 0;
+               goto err_out_free_desc;
+       }
+
+       err = hifn_start_device(dev);
+       if (err)
+               goto err_out_free_irq;
+
+       err = hifn_test(dev, 1, 0);
+       if (err)
+               goto err_out_stop_device;
+
+       err = hifn_register_rng(dev);
+       if (err)
+               goto err_out_stop_device;
+
+       err = hifn_register_alg(dev);
+       if (err)
+               goto err_out_unregister_rng;
+
+       INIT_DELAYED_WORK(&dev->work, hifn_work);
+       schedule_delayed_work(&dev->work, HZ);
+
+       dprintk("HIFN crypto accelerator card at %s has been "
+                       "successfully registered as %s.\n",
+                       pci_name(pdev), dev->name);
+
+       return 0;
+
+err_out_unregister_rng:
+       hifn_unregister_rng(dev);
+err_out_stop_device:
+       hifn_reset_dma(dev, 1);
+       hifn_stop_device(dev);
+err_out_free_irq:
+       free_irq(dev->irq, dev->name);
+       tasklet_kill(&dev->tasklet);
+err_out_free_desc:
+       pci_free_consistent(pdev, sizeof(struct hifn_dma),
+                       dev->desc_virt, dev->desc_dma);
+
+err_out_free_result_pages:
+       pci_unmap_single(pdev, dev->dst, PAGE_SIZE << HIFN_MAX_RESULT_ORDER,
+                       PCI_DMA_FROMDEVICE);
+       free_pages(dev->result_mem, HIFN_MAX_RESULT_ORDER);
+
+err_out_unmap_bars:
+       for (i=0; i<3; ++i)
+               if (dev->bar[i])
+                       iounmap(dev->bar[i]);
+
+err_out_free_regions:
+       pci_release_regions(pdev);
+
+err_out_disable_pci_device:
+       pci_disable_device(pdev);
+
+       return err;
+}
+
+static void hifn_remove(struct pci_dev *pdev)
+{
+       int i;
+       struct hifn_device *dev;
+
+       dev = pci_get_drvdata(pdev);
+
+       if (dev) {
+               cancel_delayed_work(&dev->work);
+               flush_scheduled_work();
+
+               hifn_unregister_rng(dev);
+               hifn_unregister_alg(dev);
+               hifn_reset_dma(dev, 1);
+               hifn_stop_device(dev);
+
+               free_irq(dev->irq, dev->name);
+               tasklet_kill(&dev->tasklet);
+
+               hifn_flush(dev);
+
+               pci_free_consistent(pdev, sizeof(struct hifn_dma),
+                               dev->desc_virt, dev->desc_dma);
+               pci_unmap_single(pdev, dev->dst,
+                               PAGE_SIZE << HIFN_MAX_RESULT_ORDER,
+                               PCI_DMA_FROMDEVICE);
+               free_pages(dev->result_mem, HIFN_MAX_RESULT_ORDER);
+               for (i=0; i<3; ++i)
+                       if (dev->bar[i])
+                               iounmap(dev->bar[i]);
+
+               kfree(dev);
+       }
+
+       pci_release_regions(pdev);
+       pci_disable_device(pdev);
+}
+
+static struct pci_device_id hifn_pci_tbl[] = {
+       { PCI_DEVICE(PCI_VENDOR_ID_HIFN, PCI_DEVICE_ID_HIFN_7955) },
+       { PCI_DEVICE(PCI_VENDOR_ID_HIFN, PCI_DEVICE_ID_HIFN_7956) },
+       { 0 }
+};
+MODULE_DEVICE_TABLE(pci, hifn_pci_tbl);
+
+static struct pci_driver hifn_pci_driver = {
+       .name     = "hifn795x",
+       .id_table = hifn_pci_tbl,
+       .probe    = hifn_probe,
+       .remove   = __devexit_p(hifn_remove),
+};
+
+static int __devinit hifn_init(void)
+{
+       unsigned int freq;
+       int err;
+
+       if (strncmp(hifn_pll_ref, "ext", 3) &&
+           strncmp(hifn_pll_ref, "pci", 3)) {
+               printk(KERN_ERR "hifn795x: invalid hifn_pll_ref clock, "
+                               "must be pci or ext");
+               return -EINVAL;
+       }
+
+       /*
+        * For the 7955/7956 the reference clock frequency must be in the
+        * range of 20MHz-100MHz. For the 7954 the upper bound is 66.67MHz,
+        * but this chip is currently not supported.
+        */
+       if (hifn_pll_ref[3] != '\0') {
+               freq = simple_strtoul(hifn_pll_ref + 3, NULL, 10);
+               if (freq < 20 || freq > 100) {
+                       printk(KERN_ERR "hifn795x: invalid hifn_pll_ref "
+                                       "frequency, must be in the range "
+                                       "of 20-100");
+                       return -EINVAL;
+               }
+       }
+
+       err = pci_register_driver(&hifn_pci_driver);
+       if (err < 0) {
+               dprintk("Failed to register PCI driver for %s device.\n",
+                               hifn_pci_driver.name);
+               return -ENODEV;
+       }
+
+       printk(KERN_INFO "Driver for HIFN 795x crypto accelerator chip "
+                       "has been successfully registered.\n");
+
+       return 0;
+}
+
+static void __devexit hifn_fini(void)
+{
+       pci_unregister_driver(&hifn_pci_driver);
+
+       printk(KERN_INFO "Driver for HIFN 795x crypto accelerator chip "
+                       "has been successfully unregistered.\n");
+}
+
+module_init(hifn_init);
+module_exit(hifn_fini);
+
+MODULE_LICENSE("GPL");
+MODULE_AUTHOR("Evgeniy Polyakov <johnpol@2ka.mipt.ru>");
+MODULE_DESCRIPTION("Driver for HIFN 795x crypto accelerator chip.");
index 5f7e718..2f3ad3f 100644 (file)
@@ -44,6 +44,7 @@
  */
 
 #include <crypto/algapi.h>
+#include <crypto/aes.h>
 #include <linux/module.h>
 #include <linux/init.h>
 #include <linux/types.h>
@@ -53,9 +54,6 @@
 #include <asm/byteorder.h>
 #include "padlock.h"
 
-#define AES_MIN_KEY_SIZE       16      /* in uint8_t units */
-#define AES_MAX_KEY_SIZE       32      /* ditto */
-#define AES_BLOCK_SIZE         16      /* ditto */
 #define AES_EXTENDED_KEY_SIZE  64      /* in uint32_t units */
 #define AES_EXTENDED_KEY_SIZE_B        (AES_EXTENDED_KEY_SIZE * sizeof(uint32_t))
 
@@ -419,6 +417,11 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 /* ====== Encryption/decryption routines ====== */
 
 /* These are the real call to PadLock. */
+static inline void padlock_reset_key(void)
+{
+       asm volatile ("pushfl; popfl");
+}
+
 static inline void padlock_xcrypt(const u8 *input, u8 *output, void *key,
                                  void *control_word)
 {
@@ -439,8 +442,6 @@ static void aes_crypt_copy(const u8 *in, u8 *out, u32 *key, struct cword *cword)
 static inline void aes_crypt(const u8 *in, u8 *out, u32 *key,
                             struct cword *cword)
 {
-       asm volatile ("pushfl; popfl");
-
        /* padlock_xcrypt requires at least two blocks of data. */
        if (unlikely(!(((unsigned long)in ^ (PAGE_SIZE - AES_BLOCK_SIZE)) &
                       (PAGE_SIZE - 1)))) {
@@ -459,7 +460,6 @@ static inline void padlock_xcrypt_ecb(const u8 *input, u8 *output, void *key,
                return;
        }
 
-       asm volatile ("pushfl; popfl");         /* enforce key reload. */
        asm volatile ("test $1, %%cl;"
                      "je 1f;"
                      "lea -1(%%ecx), %%eax;"
@@ -476,8 +476,6 @@ static inline void padlock_xcrypt_ecb(const u8 *input, u8 *output, void *key,
 static inline u8 *padlock_xcrypt_cbc(const u8 *input, u8 *output, void *key,
                                     u8 *iv, void *control_word, u32 count)
 {
-       /* Enforce key reload. */
-       asm volatile ("pushfl; popfl");
        /* rep xcryptcbc */
        asm volatile (".byte 0xf3,0x0f,0xa7,0xd0"
                      : "+S" (input), "+D" (output), "+a" (iv)
@@ -488,12 +486,14 @@ static inline u8 *padlock_xcrypt_cbc(const u8 *input, u8 *output, void *key,
 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {
        struct aes_ctx *ctx = aes_ctx(tfm);
+       padlock_reset_key();
        aes_crypt(in, out, ctx->E, &ctx->cword.encrypt);
 }
 
 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 {
        struct aes_ctx *ctx = aes_ctx(tfm);
+       padlock_reset_key();
        aes_crypt(in, out, ctx->D, &ctx->cword.decrypt);
 }
 
@@ -526,6 +526,8 @@ static int ecb_aes_encrypt(struct blkcipher_desc *desc,
        struct blkcipher_walk walk;
        int err;
 
+       padlock_reset_key();
+
        blkcipher_walk_init(&walk, dst, src, nbytes);
        err = blkcipher_walk_virt(desc, &walk);
 
@@ -548,6 +550,8 @@ static int ecb_aes_decrypt(struct blkcipher_desc *desc,
        struct blkcipher_walk walk;
        int err;
 
+       padlock_reset_key();
+
        blkcipher_walk_init(&walk, dst, src, nbytes);
        err = blkcipher_walk_virt(desc, &walk);
 
@@ -592,6 +596,8 @@ static int cbc_aes_encrypt(struct blkcipher_desc *desc,
        struct blkcipher_walk walk;
        int err;
 
+       padlock_reset_key();
+
        blkcipher_walk_init(&walk, dst, src, nbytes);
        err = blkcipher_walk_virt(desc, &walk);
 
@@ -616,6 +622,8 @@ static int cbc_aes_decrypt(struct blkcipher_desc *desc,
        struct blkcipher_walk walk;
        int err;
 
+       padlock_reset_key();
+
        blkcipher_walk_init(&walk, dst, src, nbytes);
        err = blkcipher_walk_virt(desc, &walk);
 
diff --git a/include/crypto/aead.h b/include/crypto/aead.h
new file mode 100644 (file)
index 0000000..0edf949
--- /dev/null
@@ -0,0 +1,105 @@
+/*
+ * AEAD: Authenticated Encryption with Associated Data
+ * 
+ * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option) 
+ * any later version.
+ *
+ */
+
+#ifndef _CRYPTO_AEAD_H
+#define _CRYPTO_AEAD_H
+
+#include <linux/crypto.h>
+#include <linux/kernel.h>
+#include <linux/slab.h>
+
+/**
+ *     struct aead_givcrypt_request - AEAD request with IV generation
+ *     @seq: Sequence number for IV generation
+ *     @giv: Space for generated IV
+ *     @areq: The AEAD request itself
+ */
+struct aead_givcrypt_request {
+       u64 seq;
+       u8 *giv;
+
+       struct aead_request areq;
+};
+
+static inline struct crypto_aead *aead_givcrypt_reqtfm(
+       struct aead_givcrypt_request *req)
+{
+       return crypto_aead_reqtfm(&req->areq);
+}
+
+static inline int crypto_aead_givencrypt(struct aead_givcrypt_request *req)
+{
+       struct aead_tfm *crt = crypto_aead_crt(aead_givcrypt_reqtfm(req));
+       return crt->givencrypt(req);
+};
+
+static inline int crypto_aead_givdecrypt(struct aead_givcrypt_request *req)
+{
+       struct aead_tfm *crt = crypto_aead_crt(aead_givcrypt_reqtfm(req));
+       return crt->givdecrypt(req);
+};
+
+static inline void aead_givcrypt_set_tfm(struct aead_givcrypt_request *req,
+                                        struct crypto_aead *tfm)
+{
+       req->areq.base.tfm = crypto_aead_tfm(tfm);
+}
+
+static inline struct aead_givcrypt_request *aead_givcrypt_alloc(
+       struct crypto_aead *tfm, gfp_t gfp)
+{
+       struct aead_givcrypt_request *req;
+
+       req = kmalloc(sizeof(struct aead_givcrypt_request) +
+                     crypto_aead_reqsize(tfm), gfp);
+
+       if (likely(req))
+               aead_givcrypt_set_tfm(req, tfm);
+
+       return req;
+}
+
+static inline void aead_givcrypt_free(struct aead_givcrypt_request *req)
+{
+       kfree(req);
+}
+
+static inline void aead_givcrypt_set_callback(
+       struct aead_givcrypt_request *req, u32 flags,
+       crypto_completion_t complete, void *data)
+{
+       aead_request_set_callback(&req->areq, flags, complete, data);
+}
+
+static inline void aead_givcrypt_set_crypt(struct aead_givcrypt_request *req,
+                                          struct scatterlist *src,
+                                          struct scatterlist *dst,
+                                          unsigned int nbytes, void *iv)
+{
+       aead_request_set_crypt(&req->areq, src, dst, nbytes, iv);
+}
+
+static inline void aead_givcrypt_set_assoc(struct aead_givcrypt_request *req,
+                                          struct scatterlist *assoc,
+                                          unsigned int assoclen)
+{
+       aead_request_set_assoc(&req->areq, assoc, assoclen);
+}
+
+static inline void aead_givcrypt_set_giv(struct aead_givcrypt_request *req,
+                                        u8 *giv, u64 seq)
+{
+       req->giv = giv;
+       req->seq = seq;
+}
+
+#endif /* _CRYPTO_AEAD_H */
diff --git a/include/crypto/aes.h b/include/crypto/aes.h
new file mode 100644 (file)
index 0000000..d480b76
--- /dev/null
@@ -0,0 +1,31 @@
+/*
+ * Common values for AES algorithms
+ */
+
+#ifndef _CRYPTO_AES_H
+#define _CRYPTO_AES_H
+
+#include <linux/types.h>
+#include <linux/crypto.h>
+
+#define AES_MIN_KEY_SIZE       16
+#define AES_MAX_KEY_SIZE       32
+#define AES_KEYSIZE_128                16
+#define AES_KEYSIZE_192                24
+#define AES_KEYSIZE_256                32
+#define AES_BLOCK_SIZE         16
+
+struct crypto_aes_ctx {
+       u32 key_length;
+       u32 key_enc[60];
+       u32 key_dec[60];
+};
+
+extern u32 crypto_ft_tab[4][256];
+extern u32 crypto_fl_tab[4][256];
+extern u32 crypto_it_tab[4][256];
+extern u32 crypto_il_tab[4][256];
+
+int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
+               unsigned int key_len);
+#endif
index b9b05d3..60d06e7 100644 (file)
@@ -111,8 +111,15 @@ void crypto_drop_spawn(struct crypto_spawn *spawn);
 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
                                    u32 mask);
 
+static inline void crypto_set_spawn(struct crypto_spawn *spawn,
+                                   struct crypto_instance *inst)
+{
+       spawn->inst = inst;
+}
+
 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
 int crypto_check_attr_type(struct rtattr **tb, u32 type);
+const char *crypto_attr_alg_name(struct rtattr *rta);
 struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask);
 int crypto_attr_u32(struct rtattr *rta, u32 *num);
 struct crypto_instance *crypto_alloc_instance(const char *name,
@@ -124,6 +131,10 @@ int crypto_enqueue_request(struct crypto_queue *queue,
 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
 
+/* These functions require the input/output to be aligned as u32. */
+void crypto_inc(u8 *a, unsigned int size);
+void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
+
 int blkcipher_walk_done(struct blkcipher_desc *desc,
                        struct blkcipher_walk *walk, int err);
 int blkcipher_walk_virt(struct blkcipher_desc *desc,
@@ -187,20 +198,11 @@ static inline struct crypto_instance *crypto_aead_alg_instance(
        return crypto_tfm_alg_instance(&aead->base);
 }
 
-static inline struct crypto_ablkcipher *crypto_spawn_ablkcipher(
-       struct crypto_spawn *spawn)
-{
-       u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
-       u32 mask = CRYPTO_ALG_TYPE_MASK;
-
-       return __crypto_ablkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
-}
-
 static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
        struct crypto_spawn *spawn)
 {
        u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
-       u32 mask = CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC;
+       u32 mask = CRYPTO_ALG_TYPE_MASK;
 
        return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
 }
@@ -303,5 +305,14 @@ static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
        return crypto_attr_alg(tb[1], type, mask);
 }
 
+/*
+ * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
+ * Otherwise returns zero.
+ */
+static inline int crypto_requires_sync(u32 type, u32 mask)
+{
+       return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
+}
+
 #endif /* _CRYPTO_ALGAPI_H */
 
diff --git a/include/crypto/authenc.h b/include/crypto/authenc.h
new file mode 100644 (file)
index 0000000..e47b044
--- /dev/null
@@ -0,0 +1,27 @@
+/*
+ * Authenc: Simple AEAD wrapper for IPsec
+ *
+ * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+#ifndef _CRYPTO_AUTHENC_H
+#define _CRYPTO_AUTHENC_H
+
+#include <linux/types.h>
+
+enum {
+       CRYPTO_AUTHENC_KEYA_UNSPEC,
+       CRYPTO_AUTHENC_KEYA_PARAM,
+};
+
+struct crypto_authenc_key_param {
+       __be32 enckeylen;
+};
+
+#endif /* _CRYPTO_AUTHENC_H */
+
diff --git a/include/crypto/ctr.h b/include/crypto/ctr.h
new file mode 100644 (file)
index 0000000..4180fc0
--- /dev/null
@@ -0,0 +1,20 @@
+/*
+ * CTR: Counter mode
+ *
+ * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#ifndef _CRYPTO_CTR_H
+#define _CRYPTO_CTR_H
+
+#define CTR_RFC3686_NONCE_SIZE 4
+#define CTR_RFC3686_IV_SIZE 8
+#define CTR_RFC3686_BLOCK_SIZE 16
+
+#endif  /* _CRYPTO_CTR_H */
diff --git a/include/crypto/des.h b/include/crypto/des.h
new file mode 100644 (file)
index 0000000..2971c63
--- /dev/null
@@ -0,0 +1,19 @@
+/* 
+ * DES & Triple DES EDE Cipher Algorithms.
+ */
+
+#ifndef __CRYPTO_DES_H
+#define __CRYPTO_DES_H
+
+#define DES_KEY_SIZE           8
+#define DES_EXPKEY_WORDS       32
+#define DES_BLOCK_SIZE         8
+
+#define DES3_EDE_KEY_SIZE      (3 * DES_KEY_SIZE)
+#define DES3_EDE_EXPKEY_WORDS  (3 * DES_EXPKEY_WORDS)
+#define DES3_EDE_BLOCK_SIZE    DES_BLOCK_SIZE
+
+
+extern unsigned long des_ekey(u32 *pe, const u8 *k);
+
+#endif /* __CRYPTO_DES_H */
diff --git a/include/crypto/internal/aead.h b/include/crypto/internal/aead.h
new file mode 100644 (file)
index 0000000..d838c94
--- /dev/null
@@ -0,0 +1,80 @@
+/*
+ * AEAD: Authenticated Encryption with Associated Data
+ * 
+ * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option) 
+ * any later version.
+ *
+ */
+
+#ifndef _CRYPTO_INTERNAL_AEAD_H
+#define _CRYPTO_INTERNAL_AEAD_H
+
+#include <crypto/aead.h>
+#include <crypto/algapi.h>
+#include <linux/types.h>
+
+struct rtattr;
+
+struct crypto_aead_spawn {
+       struct crypto_spawn base;
+};
+
+extern const struct crypto_type crypto_nivaead_type;
+
+static inline void crypto_set_aead_spawn(
+       struct crypto_aead_spawn *spawn, struct crypto_instance *inst)
+{
+       crypto_set_spawn(&spawn->base, inst);
+}
+
+int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
+                    u32 type, u32 mask);
+
+static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn)
+{
+       crypto_drop_spawn(&spawn->base);
+}
+
+static inline struct crypto_alg *crypto_aead_spawn_alg(
+       struct crypto_aead_spawn *spawn)
+{
+       return spawn->base.alg;
+}
+
+static inline struct crypto_aead *crypto_spawn_aead(
+       struct crypto_aead_spawn *spawn)
+{
+       return __crypto_aead_cast(
+               crypto_spawn_tfm(&spawn->base, CRYPTO_ALG_TYPE_AEAD,
+                                CRYPTO_ALG_TYPE_MASK));
+}
+
+struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl,
+                                        struct rtattr **tb, u32 type,
+                                        u32 mask);
+void aead_geniv_free(struct crypto_instance *inst);
+int aead_geniv_init(struct crypto_tfm *tfm);
+void aead_geniv_exit(struct crypto_tfm *tfm);
+
+static inline struct crypto_aead *aead_geniv_base(struct crypto_aead *geniv)
+{
+       return crypto_aead_crt(geniv)->base;
+}
+
+static inline void *aead_givcrypt_reqctx(struct aead_givcrypt_request *req)
+{
+       return aead_request_ctx(&req->areq);
+}
+
+static inline void aead_givcrypt_complete(struct aead_givcrypt_request *req,
+                                         int err)
+{
+       aead_request_complete(&req->areq, err);
+}
+
+#endif /* _CRYPTO_INTERNAL_AEAD_H */
+
diff --git a/include/crypto/internal/skcipher.h b/include/crypto/internal/skcipher.h
new file mode 100644 (file)
index 0000000..2ba42cd
--- /dev/null
@@ -0,0 +1,110 @@
+/*
+ * Symmetric key ciphers.
+ * 
+ * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option) 
+ * any later version.
+ *
+ */
+
+#ifndef _CRYPTO_INTERNAL_SKCIPHER_H
+#define _CRYPTO_INTERNAL_SKCIPHER_H
+
+#include <crypto/algapi.h>
+#include <crypto/skcipher.h>
+#include <linux/types.h>
+
+struct rtattr;
+
+struct crypto_skcipher_spawn {
+       struct crypto_spawn base;
+};
+
+extern const struct crypto_type crypto_givcipher_type;
+
+static inline void crypto_set_skcipher_spawn(
+       struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst)
+{
+       crypto_set_spawn(&spawn->base, inst);
+}
+
+int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
+                        u32 type, u32 mask);
+
+static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
+{
+       crypto_drop_spawn(&spawn->base);
+}
+
+static inline struct crypto_alg *crypto_skcipher_spawn_alg(
+       struct crypto_skcipher_spawn *spawn)
+{
+       return spawn->base.alg;
+}
+
+static inline struct crypto_ablkcipher *crypto_spawn_skcipher(
+       struct crypto_skcipher_spawn *spawn)
+{
+       return __crypto_ablkcipher_cast(
+               crypto_spawn_tfm(&spawn->base, crypto_skcipher_type(0),
+                                crypto_skcipher_mask(0)));
+}
+
+int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req);
+int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req);
+const char *crypto_default_geniv(const struct crypto_alg *alg);
+
+struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl,
+                                            struct rtattr **tb, u32 type,
+                                            u32 mask);
+void skcipher_geniv_free(struct crypto_instance *inst);
+int skcipher_geniv_init(struct crypto_tfm *tfm);
+void skcipher_geniv_exit(struct crypto_tfm *tfm);
+
+static inline struct crypto_ablkcipher *skcipher_geniv_cipher(
+       struct crypto_ablkcipher *geniv)
+{
+       return crypto_ablkcipher_crt(geniv)->base;
+}
+
+static inline int skcipher_enqueue_givcrypt(
+       struct crypto_queue *queue, struct skcipher_givcrypt_request *request)
+{
+       return ablkcipher_enqueue_request(queue, &request->creq);
+}
+
+static inline struct skcipher_givcrypt_request *skcipher_dequeue_givcrypt(
+       struct crypto_queue *queue)
+{
+       return container_of(ablkcipher_dequeue_request(queue),
+                           struct skcipher_givcrypt_request, creq);
+}
+
+static inline void *skcipher_givcrypt_reqctx(
+       struct skcipher_givcrypt_request *req)
+{
+       return ablkcipher_request_ctx(&req->creq);
+}
+
+static inline void ablkcipher_request_complete(struct ablkcipher_request *req,
+                                              int err)
+{
+       req->base.complete(&req->base, err);
+}
+
+static inline void skcipher_givcrypt_complete(
+       struct skcipher_givcrypt_request *req, int err)
+{
+       ablkcipher_request_complete(&req->creq, err);
+}
+
+static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req)
+{
+       return req->base.flags;
+}
+
+#endif /* _CRYPTO_INTERNAL_SKCIPHER_H */
+
similarity index 69%
rename from crypto/scatterwalk.h
rename to include/crypto/scatterwalk.h
index 87ed681..224658b 100644 (file)
@@ -1,9 +1,10 @@
 /*
- * Cryptographic API.
+ * Cryptographic scatter and gather helpers.
  *
  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  * Copyright (c) 2002 Adam J. Richter <adam@yggdrasil.com>
  * Copyright (c) 2004 Jean-Luc Cooke <jlcooke@certainkey.com>
+ * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
  *
  * This program is free software; you can redistribute it and/or modify it
  * under the terms of the GNU General Public License as published by the Free
 #ifndef _CRYPTO_SCATTERWALK_H
 #define _CRYPTO_SCATTERWALK_H
 
+#include <asm/kmap_types.h>
+#include <crypto/algapi.h>
+#include <linux/hardirq.h>
+#include <linux/highmem.h>
+#include <linux/kernel.h>
 #include <linux/mm.h>
 #include <linux/scatterlist.h>
+#include <linux/sched.h>
 
-#include "internal.h"
+static inline enum km_type crypto_kmap_type(int out)
+{
+       enum km_type type;
+
+       if (in_softirq())
+               type = out * (KM_SOFTIRQ1 - KM_SOFTIRQ0) + KM_SOFTIRQ0;
+       else
+               type = out * (KM_USER1 - KM_USER0) + KM_USER0;
+
+       return type;
+}
+
+static inline void *crypto_kmap(struct page *page, int out)
+{
+       return kmap_atomic(page, crypto_kmap_type(out));
+}
+
+static inline void crypto_kunmap(void *vaddr, int out)
+{
+       kunmap_atomic(vaddr, crypto_kmap_type(out));
+}
+
+static inline void crypto_yield(u32 flags)
+{
+       if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
+               cond_resched();
+}
+
+static inline void scatterwalk_sg_chain(struct scatterlist *sg1, int num,
+                                       struct scatterlist *sg2)
+{
+       sg_set_page(&sg1[num - 1], (void *)sg2, 0, 0);
+}
 
 static inline struct scatterlist *scatterwalk_sg_next(struct scatterlist *sg)
 {
-       return (++sg)->length ? sg : (void *) sg_page(sg);
+       return (++sg)->length ? sg : (void *)sg_page(sg);
 }
 
 static inline unsigned long scatterwalk_samebuf(struct scatter_walk *walk_in,
index 0686e1f..c0ccc2b 100644 (file)
@@ -8,6 +8,9 @@
 #define SHA1_DIGEST_SIZE        20
 #define SHA1_BLOCK_SIZE         64
 
+#define SHA224_DIGEST_SIZE     28
+#define SHA224_BLOCK_SIZE      64
+
 #define SHA256_DIGEST_SIZE      32
 #define SHA256_BLOCK_SIZE       64
 
 #define SHA1_H3                0x10325476UL
 #define SHA1_H4                0xc3d2e1f0UL
 
+#define SHA224_H0      0xc1059ed8UL
+#define SHA224_H1      0x367cd507UL
+#define SHA224_H2      0x3070dd17UL
+#define SHA224_H3      0xf70e5939UL
+#define SHA224_H4      0xffc00b31UL
+#define SHA224_H5      0x68581511UL
+#define SHA224_H6      0x64f98fa7UL
+#define SHA224_H7      0xbefa4fa4UL
+
 #define SHA256_H0      0x6a09e667UL
 #define SHA256_H1      0xbb67ae85UL
 #define SHA256_H2      0x3c6ef372UL
diff --git a/include/crypto/skcipher.h b/include/crypto/skcipher.h
new file mode 100644 (file)
index 0000000..25fd612
--- /dev/null
@@ -0,0 +1,110 @@
+/*
+ * Symmetric key ciphers.
+ * 
+ * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option) 
+ * any later version.
+ *
+ */
+
+#ifndef _CRYPTO_SKCIPHER_H
+#define _CRYPTO_SKCIPHER_H
+
+#include <linux/crypto.h>
+#include <linux/kernel.h>
+#include <linux/slab.h>
+
+/**
+ *     struct skcipher_givcrypt_request - Crypto request with IV generation
+ *     @seq: Sequence number for IV generation
+ *     @giv: Space for generated IV
+ *     @creq: The crypto request itself
+ */
+struct skcipher_givcrypt_request {
+       u64 seq;
+       u8 *giv;
+
+       struct ablkcipher_request creq;
+};
+
+static inline struct crypto_ablkcipher *skcipher_givcrypt_reqtfm(
+       struct skcipher_givcrypt_request *req)
+{
+       return crypto_ablkcipher_reqtfm(&req->creq);
+}
+
+static inline int crypto_skcipher_givencrypt(
+       struct skcipher_givcrypt_request *req)
+{
+       struct ablkcipher_tfm *crt =
+               crypto_ablkcipher_crt(skcipher_givcrypt_reqtfm(req));
+       return crt->givencrypt(req);
+};
+
+static inline int crypto_skcipher_givdecrypt(
+       struct skcipher_givcrypt_request *req)
+{
+       struct ablkcipher_tfm *crt =
+               crypto_ablkcipher_crt(skcipher_givcrypt_reqtfm(req));
+       return crt->givdecrypt(req);
+};
+
+static inline void skcipher_givcrypt_set_tfm(
+       struct skcipher_givcrypt_request *req, struct crypto_ablkcipher *tfm)
+{
+       req->creq.base.tfm = crypto_ablkcipher_tfm(tfm);
+}
+
+static inline struct skcipher_givcrypt_request *skcipher_givcrypt_cast(
+       struct crypto_async_request *req)
+{
+       return container_of(ablkcipher_request_cast(req),
+                           struct skcipher_givcrypt_request, creq);
+}
+
+static inline struct skcipher_givcrypt_request *skcipher_givcrypt_alloc(
+       struct crypto_ablkcipher *tfm, gfp_t gfp)
+{
+       struct skcipher_givcrypt_request *req;
+
+       req = kmalloc(sizeof(struct skcipher_givcrypt_request) +
+                     crypto_ablkcipher_reqsize(tfm), gfp);
+
+       if (likely(req))
+               skcipher_givcrypt_set_tfm(req, tfm);
+
+       return req;
+}
+
+static inline void skcipher_givcrypt_free(struct skcipher_givcrypt_request *req)
+{
+       kfree(req);
+}
+
+static inline void skcipher_givcrypt_set_callback(
+       struct skcipher_givcrypt_request *req, u32 flags,
+       crypto_completion_t complete, void *data)
+{
+       ablkcipher_request_set_callback(&req->creq, flags, complete, data);
+}
+
+static inline void skcipher_givcrypt_set_crypt(
+       struct skcipher_givcrypt_request *req,
+       struct scatterlist *src, struct scatterlist *dst,
+       unsigned int nbytes, void *iv)
+{
+       ablkcipher_request_set_crypt(&req->creq, src, dst, nbytes, iv);
+}
+
+static inline void skcipher_givcrypt_set_giv(
+       struct skcipher_givcrypt_request *req, u8 *giv, u64 seq)
+{
+       req->giv = giv;
+       req->seq = seq;
+}
+
+#endif /* _CRYPTO_SKCIPHER_H */
+
index f3110eb..5e02d1b 100644 (file)
 #define CRYPTO_ALG_TYPE_DIGEST         0x00000002
 #define CRYPTO_ALG_TYPE_HASH           0x00000003
 #define CRYPTO_ALG_TYPE_BLKCIPHER      0x00000004
-#define CRYPTO_ALG_TYPE_COMPRESS       0x00000005
-#define CRYPTO_ALG_TYPE_AEAD           0x00000006
+#define CRYPTO_ALG_TYPE_ABLKCIPHER     0x00000005
+#define CRYPTO_ALG_TYPE_GIVCIPHER      0x00000006
+#define CRYPTO_ALG_TYPE_COMPRESS       0x00000008
+#define CRYPTO_ALG_TYPE_AEAD           0x00000009
 
 #define CRYPTO_ALG_TYPE_HASH_MASK      0x0000000e
+#define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c
 
 #define CRYPTO_ALG_LARVAL              0x00000010
 #define CRYPTO_ALG_DEAD                        0x00000020
  */
 #define CRYPTO_ALG_NEED_FALLBACK       0x00000100
 
+/*
+ * This bit is set for symmetric key ciphers that have already been wrapped
+ * with a generic IV generator to prevent them from being wrapped again.
+ */
+#define CRYPTO_ALG_GENIV               0x00000200
+
 /*
  * Transform masks and values (for crt_flags).
  */
 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
 #elif defined(ARCH_SLAB_MINALIGN)
 #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN
+#else
+#define CRYPTO_MINALIGN __alignof__(unsigned long long)
 #endif
 
-#ifdef CRYPTO_MINALIGN
 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
-#else
-#define CRYPTO_MINALIGN_ATTR
-#endif
 
 struct scatterlist;
 struct crypto_ablkcipher;
@@ -97,6 +104,8 @@ struct crypto_blkcipher;
 struct crypto_hash;
 struct crypto_tfm;
 struct crypto_type;
+struct aead_givcrypt_request;
+struct skcipher_givcrypt_request;
 
 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
 
@@ -176,6 +185,10 @@ struct ablkcipher_alg {
                      unsigned int keylen);
        int (*encrypt)(struct ablkcipher_request *req);
        int (*decrypt)(struct ablkcipher_request *req);
+       int (*givencrypt)(struct skcipher_givcrypt_request *req);
+       int (*givdecrypt)(struct skcipher_givcrypt_request *req);
+
+       const char *geniv;
 
        unsigned int min_keysize;
        unsigned int max_keysize;
@@ -185,11 +198,16 @@ struct ablkcipher_alg {
 struct aead_alg {
        int (*setkey)(struct crypto_aead *tfm, const u8 *key,
                      unsigned int keylen);
+       int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize);
        int (*encrypt)(struct aead_request *req);
        int (*decrypt)(struct aead_request *req);
+       int (*givencrypt)(struct aead_givcrypt_request *req);
+       int (*givdecrypt)(struct aead_givcrypt_request *req);
+
+       const char *geniv;
 
        unsigned int ivsize;
-       unsigned int authsize;
+       unsigned int maxauthsize;
 };
 
 struct blkcipher_alg {
@@ -202,6 +220,8 @@ struct blkcipher_alg {
                       struct scatterlist *dst, struct scatterlist *src,
                       unsigned int nbytes);
 
+       const char *geniv;
+
        unsigned int min_keysize;
        unsigned int max_keysize;
        unsigned int ivsize;
@@ -317,6 +337,11 @@ struct ablkcipher_tfm {
                      unsigned int keylen);
        int (*encrypt)(struct ablkcipher_request *req);
        int (*decrypt)(struct ablkcipher_request *req);
+       int (*givencrypt)(struct skcipher_givcrypt_request *req);
+       int (*givdecrypt)(struct skcipher_givcrypt_request *req);
+
+       struct crypto_ablkcipher *base;
+
        unsigned int ivsize;
        unsigned int reqsize;
 };
@@ -326,6 +351,11 @@ struct aead_tfm {
                      unsigned int keylen);
        int (*encrypt)(struct aead_request *req);
        int (*decrypt)(struct aead_request *req);
+       int (*givencrypt)(struct aead_givcrypt_request *req);
+       int (*givdecrypt)(struct aead_givcrypt_request *req);
+
+       struct crypto_aead *base;
+
        unsigned int ivsize;
        unsigned int authsize;
        unsigned int reqsize;
@@ -525,17 +555,23 @@ static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
        return (struct crypto_ablkcipher *)tfm;
 }
 
-static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher(
-       const char *alg_name, u32 type, u32 mask)
+static inline u32 crypto_skcipher_type(u32 type)
 {
-       type &= ~CRYPTO_ALG_TYPE_MASK;
+       type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
        type |= CRYPTO_ALG_TYPE_BLKCIPHER;
-       mask |= CRYPTO_ALG_TYPE_MASK;
+       return type;
+}
 
-       return __crypto_ablkcipher_cast(
-               crypto_alloc_base(alg_name, type, mask));
+static inline u32 crypto_skcipher_mask(u32 mask)
+{
+       mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
+       mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK;
+       return mask;
 }
 
+struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name,
+                                                 u32 type, u32 mask);
+
 static inline struct crypto_tfm *crypto_ablkcipher_tfm(
        struct crypto_ablkcipher *tfm)
 {
@@ -550,11 +586,8 @@ static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
                                        u32 mask)
 {
-       type &= ~CRYPTO_ALG_TYPE_MASK;
-       type |= CRYPTO_ALG_TYPE_BLKCIPHER;
-       mask |= CRYPTO_ALG_TYPE_MASK;
-
-       return crypto_has_alg(alg_name, type, mask);
+       return crypto_has_alg(alg_name, crypto_skcipher_type(type),
+                             crypto_skcipher_mask(mask));
 }
 
 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(
@@ -601,7 +634,9 @@ static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm,
 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
                                           const u8 *key, unsigned int keylen)
 {
-       return crypto_ablkcipher_crt(tfm)->setkey(tfm, key, keylen);
+       struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm);
+
+       return crt->setkey(crt->base, key, keylen);
 }
 
 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm(
@@ -633,7 +668,7 @@ static inline unsigned int crypto_ablkcipher_reqsize(
 static inline void ablkcipher_request_set_tfm(
        struct ablkcipher_request *req, struct crypto_ablkcipher *tfm)
 {
-       req->base.tfm = crypto_ablkcipher_tfm(tfm);
+       req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base);
 }
 
 static inline struct ablkcipher_request *ablkcipher_request_cast(
@@ -686,15 +721,7 @@ static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm)
        return (struct crypto_aead *)tfm;
 }
 
-static inline struct crypto_aead *crypto_alloc_aead(const char *alg_name,
-                                                   u32 type, u32 mask)
-{
-       type &= ~CRYPTO_ALG_TYPE_MASK;
-       type |= CRYPTO_ALG_TYPE_AEAD;
-       mask |= CRYPTO_ALG_TYPE_MASK;
-
-       return __crypto_aead_cast(crypto_alloc_base(alg_name, type, mask));
-}
+struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask);
 
 static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm)
 {
@@ -749,9 +776,13 @@ static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags)
 static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key,
                                     unsigned int keylen)
 {
-       return crypto_aead_crt(tfm)->setkey(tfm, key, keylen);
+       struct aead_tfm *crt = crypto_aead_crt(tfm);
+
+       return crt->setkey(crt->base, key, keylen);
 }
 
+int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize);
+
 static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req)
 {
        return __crypto_aead_cast(req->base.tfm);
@@ -775,7 +806,7 @@ static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm)
 static inline void aead_request_set_tfm(struct aead_request *req,
                                        struct crypto_aead *tfm)
 {
-       req->base.tfm = crypto_aead_tfm(tfm);
+       req->base.tfm = crypto_aead_tfm(crypto_aead_crt(tfm)->base);
 }
 
 static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm,
@@ -841,9 +872,9 @@ static inline struct crypto_blkcipher *crypto_blkcipher_cast(
 static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
        const char *alg_name, u32 type, u32 mask)
 {
-       type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
+       type &= ~CRYPTO_ALG_TYPE_MASK;
        type |= CRYPTO_ALG_TYPE_BLKCIPHER;
-       mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC;
+       mask |= CRYPTO_ALG_TYPE_MASK;
 
        return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
 }
@@ -861,9 +892,9 @@ static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
 
 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
 {
-       type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
+       type &= ~CRYPTO_ALG_TYPE_MASK;
        type |= CRYPTO_ALG_TYPE_BLKCIPHER;
-       mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC;
+       mask |= CRYPTO_ALG_TYPE_MASK;
 
        return crypto_has_alg(alg_name, type, mask);
 }
@@ -1081,6 +1112,7 @@ static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name,
                                                    u32 type, u32 mask)
 {
        type &= ~CRYPTO_ALG_TYPE_MASK;
+       mask &= ~CRYPTO_ALG_TYPE_MASK;
        type |= CRYPTO_ALG_TYPE_HASH;
        mask |= CRYPTO_ALG_TYPE_HASH_MASK;
 
@@ -1100,6 +1132,7 @@ static inline void crypto_free_hash(struct crypto_hash *tfm)
 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask)
 {
        type &= ~CRYPTO_ALG_TYPE_MASK;
+       mask &= ~CRYPTO_ALG_TYPE_MASK;
        type |= CRYPTO_ALG_TYPE_HASH;
        mask |= CRYPTO_ALG_TYPE_HASH_MASK;
 
index 21ea761..85d1191 100644 (file)
@@ -33,7 +33,7 @@ struct hwrng {
        const char *name;
        int (*init)(struct hwrng *rng);
        void (*cleanup)(struct hwrng *rng);
-       int (*data_present)(struct hwrng *rng);
+       int (*data_present)(struct hwrng *rng, int wait);
        int (*data_read)(struct hwrng *rng, u32 *data);
        unsigned long priv;