mirror of
https://github.com/armbian/linux.git
synced 2026-01-06 10:13:00 -08:00
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (29 commits) crypto: sha512-s390 - Add missing block size hwrng: timeriomem - Breaks an allyesconfig build on s390: nlattr: Fix build error with NET off crypto: testmgr - add zlib test crypto: zlib - New zlib crypto module, using pcomp crypto: testmgr - Add support for the pcomp interface crypto: compress - Add pcomp interface netlink: Move netlink attribute parsing support to lib crypto: Fix dead links hwrng: timeriomem - New driver crypto: chainiv - Use kcrypto_wq instead of keventd_wq crypto: cryptd - Per-CPU thread implementation based on kcrypto_wq crypto: api - Use dedicated workqueue for crypto subsystem crypto: testmgr - Test skciphers with no IVs crypto: aead - Avoid infinite loop when nivaead fails selftest crypto: skcipher - Avoid infinite loop when cipher fails selftest crypto: api - Fix crypto_alloc_tfm/create_create_tfm return convention crypto: api - crypto_alg_mod_lookup either tested or untested crypto: amcc - Add crypt4xx driver crypto: ansi_cprng - Add maintainer ...
This commit is contained in:
@@ -1269,6 +1269,12 @@ L: linux-crypto@vger.kernel.org
|
||||
T: git kernel.org:/pub/scm/linux/kernel/git/herbert/crypto-2.6.git
|
||||
S: Maintained
|
||||
|
||||
CRYPTOGRAPHIC RANDOM NUMBER GENERATOR
|
||||
P: Neil Horman
|
||||
M: nhorman@tuxdriver.com
|
||||
L: linux-crypto@vger.kernel.org
|
||||
S: Maintained
|
||||
|
||||
CS5535 Audio ALSA driver
|
||||
P: Jaya Kumar
|
||||
M: jayakumar.alsa@gmail.com
|
||||
|
||||
@@ -127,6 +127,13 @@
|
||||
dcr-reg = <0x010 0x002>;
|
||||
};
|
||||
|
||||
CRYPTO: crypto@180000 {
|
||||
compatible = "amcc,ppc460ex-crypto", "amcc,ppc4xx-crypto";
|
||||
reg = <4 0x00180000 0x80400>;
|
||||
interrupt-parent = <&UIC0>;
|
||||
interrupts = <0x1d 0x4>;
|
||||
};
|
||||
|
||||
MAL0: mcmal {
|
||||
compatible = "ibm,mcmal-460ex", "ibm,mcmal2";
|
||||
dcr-reg = <0x180 0x062>;
|
||||
|
||||
@@ -97,6 +97,13 @@
|
||||
0x6 0x4>; /* ECC SEC Error */
|
||||
};
|
||||
|
||||
CRYPTO: crypto@ef700000 {
|
||||
compatible = "amcc,ppc405ex-crypto", "amcc,ppc4xx-crypto";
|
||||
reg = <0xef700000 0x80400>;
|
||||
interrupt-parent = <&UIC0>;
|
||||
interrupts = <0x17 0x2>;
|
||||
};
|
||||
|
||||
MAL0: mcmal {
|
||||
compatible = "ibm,mcmal-405ex", "ibm,mcmal2";
|
||||
dcr-reg = <0x180 0x062>;
|
||||
|
||||
@@ -29,7 +29,9 @@ struct s390_sha_ctx {
|
||||
int func; /* KIMD function to use */
|
||||
};
|
||||
|
||||
void s390_sha_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len);
|
||||
void s390_sha_final(struct crypto_tfm *tfm, u8 *out);
|
||||
struct shash_desc;
|
||||
|
||||
int s390_sha_update(struct shash_desc *desc, const u8 *data, unsigned int len);
|
||||
int s390_sha_final(struct shash_desc *desc, u8 *out);
|
||||
|
||||
#endif
|
||||
|
||||
@@ -23,17 +23,17 @@
|
||||
* any later version.
|
||||
*
|
||||
*/
|
||||
#include <crypto/internal/hash.h>
|
||||
#include <linux/init.h>
|
||||
#include <linux/module.h>
|
||||
#include <linux/crypto.h>
|
||||
#include <crypto/sha.h>
|
||||
|
||||
#include "crypt_s390.h"
|
||||
#include "sha.h"
|
||||
|
||||
static void sha1_init(struct crypto_tfm *tfm)
|
||||
static int sha1_init(struct shash_desc *desc)
|
||||
{
|
||||
struct s390_sha_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
|
||||
|
||||
sctx->state[0] = SHA1_H0;
|
||||
sctx->state[1] = SHA1_H1;
|
||||
@@ -42,34 +42,36 @@ static void sha1_init(struct crypto_tfm *tfm)
|
||||
sctx->state[4] = SHA1_H4;
|
||||
sctx->count = 0;
|
||||
sctx->func = KIMD_SHA_1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct crypto_alg alg = {
|
||||
.cra_name = "sha1",
|
||||
.cra_driver_name= "sha1-s390",
|
||||
.cra_priority = CRYPT_S390_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_DIGEST,
|
||||
.cra_blocksize = SHA1_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct s390_sha_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(alg.cra_list),
|
||||
.cra_u = { .digest = {
|
||||
.dia_digestsize = SHA1_DIGEST_SIZE,
|
||||
.dia_init = sha1_init,
|
||||
.dia_update = s390_sha_update,
|
||||
.dia_final = s390_sha_final } }
|
||||
static struct shash_alg alg = {
|
||||
.digestsize = SHA1_DIGEST_SIZE,
|
||||
.init = sha1_init,
|
||||
.update = s390_sha_update,
|
||||
.final = s390_sha_final,
|
||||
.descsize = sizeof(struct s390_sha_ctx),
|
||||
.base = {
|
||||
.cra_name = "sha1",
|
||||
.cra_driver_name= "sha1-s390",
|
||||
.cra_priority = CRYPT_S390_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_SHASH,
|
||||
.cra_blocksize = SHA1_BLOCK_SIZE,
|
||||
.cra_module = THIS_MODULE,
|
||||
}
|
||||
};
|
||||
|
||||
static int __init sha1_s390_init(void)
|
||||
{
|
||||
if (!crypt_s390_func_available(KIMD_SHA_1))
|
||||
return -EOPNOTSUPP;
|
||||
return crypto_register_alg(&alg);
|
||||
return crypto_register_shash(&alg);
|
||||
}
|
||||
|
||||
static void __exit sha1_s390_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
crypto_unregister_shash(&alg);
|
||||
}
|
||||
|
||||
module_init(sha1_s390_init);
|
||||
|
||||
@@ -16,17 +16,17 @@
|
||||
* any later version.
|
||||
*
|
||||
*/
|
||||
#include <crypto/internal/hash.h>
|
||||
#include <linux/init.h>
|
||||
#include <linux/module.h>
|
||||
#include <linux/crypto.h>
|
||||
#include <crypto/sha.h>
|
||||
|
||||
#include "crypt_s390.h"
|
||||
#include "sha.h"
|
||||
|
||||
static void sha256_init(struct crypto_tfm *tfm)
|
||||
static int sha256_init(struct shash_desc *desc)
|
||||
{
|
||||
struct s390_sha_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
|
||||
|
||||
sctx->state[0] = SHA256_H0;
|
||||
sctx->state[1] = SHA256_H1;
|
||||
@@ -38,22 +38,24 @@ static void sha256_init(struct crypto_tfm *tfm)
|
||||
sctx->state[7] = SHA256_H7;
|
||||
sctx->count = 0;
|
||||
sctx->func = KIMD_SHA_256;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct crypto_alg alg = {
|
||||
.cra_name = "sha256",
|
||||
.cra_driver_name = "sha256-s390",
|
||||
.cra_priority = CRYPT_S390_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_DIGEST,
|
||||
.cra_blocksize = SHA256_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct s390_sha_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(alg.cra_list),
|
||||
.cra_u = { .digest = {
|
||||
.dia_digestsize = SHA256_DIGEST_SIZE,
|
||||
.dia_init = sha256_init,
|
||||
.dia_update = s390_sha_update,
|
||||
.dia_final = s390_sha_final } }
|
||||
static struct shash_alg alg = {
|
||||
.digestsize = SHA256_DIGEST_SIZE,
|
||||
.init = sha256_init,
|
||||
.update = s390_sha_update,
|
||||
.final = s390_sha_final,
|
||||
.descsize = sizeof(struct s390_sha_ctx),
|
||||
.base = {
|
||||
.cra_name = "sha256",
|
||||
.cra_driver_name= "sha256-s390",
|
||||
.cra_priority = CRYPT_S390_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_SHASH,
|
||||
.cra_blocksize = SHA256_BLOCK_SIZE,
|
||||
.cra_module = THIS_MODULE,
|
||||
}
|
||||
};
|
||||
|
||||
static int sha256_s390_init(void)
|
||||
@@ -61,12 +63,12 @@ static int sha256_s390_init(void)
|
||||
if (!crypt_s390_func_available(KIMD_SHA_256))
|
||||
return -EOPNOTSUPP;
|
||||
|
||||
return crypto_register_alg(&alg);
|
||||
return crypto_register_shash(&alg);
|
||||
}
|
||||
|
||||
static void __exit sha256_s390_fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&alg);
|
||||
crypto_unregister_shash(&alg);
|
||||
}
|
||||
|
||||
module_init(sha256_s390_init);
|
||||
|
||||
@@ -12,16 +12,16 @@
|
||||
* any later version.
|
||||
*
|
||||
*/
|
||||
#include <crypto/internal/hash.h>
|
||||
#include <linux/init.h>
|
||||
#include <linux/module.h>
|
||||
#include <linux/crypto.h>
|
||||
|
||||
#include "sha.h"
|
||||
#include "crypt_s390.h"
|
||||
|
||||
static void sha512_init(struct crypto_tfm *tfm)
|
||||
static int sha512_init(struct shash_desc *desc)
|
||||
{
|
||||
struct s390_sha_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
struct s390_sha_ctx *ctx = shash_desc_ctx(desc);
|
||||
|
||||
*(__u64 *)&ctx->state[0] = 0x6a09e667f3bcc908ULL;
|
||||
*(__u64 *)&ctx->state[2] = 0xbb67ae8584caa73bULL;
|
||||
@@ -33,29 +33,31 @@ static void sha512_init(struct crypto_tfm *tfm)
|
||||
*(__u64 *)&ctx->state[14] = 0x5be0cd19137e2179ULL;
|
||||
ctx->count = 0;
|
||||
ctx->func = KIMD_SHA_512;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct crypto_alg sha512_alg = {
|
||||
.cra_name = "sha512",
|
||||
.cra_driver_name = "sha512-s390",
|
||||
.cra_priority = CRYPT_S390_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_DIGEST,
|
||||
.cra_blocksize = SHA512_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct s390_sha_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(sha512_alg.cra_list),
|
||||
.cra_u = { .digest = {
|
||||
.dia_digestsize = SHA512_DIGEST_SIZE,
|
||||
.dia_init = sha512_init,
|
||||
.dia_update = s390_sha_update,
|
||||
.dia_final = s390_sha_final } }
|
||||
static struct shash_alg sha512_alg = {
|
||||
.digestsize = SHA512_DIGEST_SIZE,
|
||||
.init = sha512_init,
|
||||
.update = s390_sha_update,
|
||||
.final = s390_sha_final,
|
||||
.descsize = sizeof(struct s390_sha_ctx),
|
||||
.base = {
|
||||
.cra_name = "sha512",
|
||||
.cra_driver_name= "sha512-s390",
|
||||
.cra_priority = CRYPT_S390_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_SHASH,
|
||||
.cra_blocksize = SHA512_BLOCK_SIZE,
|
||||
.cra_module = THIS_MODULE,
|
||||
}
|
||||
};
|
||||
|
||||
MODULE_ALIAS("sha512");
|
||||
|
||||
static void sha384_init(struct crypto_tfm *tfm)
|
||||
static int sha384_init(struct shash_desc *desc)
|
||||
{
|
||||
struct s390_sha_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
struct s390_sha_ctx *ctx = shash_desc_ctx(desc);
|
||||
|
||||
*(__u64 *)&ctx->state[0] = 0xcbbb9d5dc1059ed8ULL;
|
||||
*(__u64 *)&ctx->state[2] = 0x629a292a367cd507ULL;
|
||||
@@ -67,22 +69,25 @@ static void sha384_init(struct crypto_tfm *tfm)
|
||||
*(__u64 *)&ctx->state[14] = 0x47b5481dbefa4fa4ULL;
|
||||
ctx->count = 0;
|
||||
ctx->func = KIMD_SHA_512;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct crypto_alg sha384_alg = {
|
||||
.cra_name = "sha384",
|
||||
.cra_driver_name = "sha384-s390",
|
||||
.cra_priority = CRYPT_S390_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_DIGEST,
|
||||
.cra_blocksize = SHA384_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct s390_sha_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(sha384_alg.cra_list),
|
||||
.cra_u = { .digest = {
|
||||
.dia_digestsize = SHA384_DIGEST_SIZE,
|
||||
.dia_init = sha384_init,
|
||||
.dia_update = s390_sha_update,
|
||||
.dia_final = s390_sha_final } }
|
||||
static struct shash_alg sha384_alg = {
|
||||
.digestsize = SHA384_DIGEST_SIZE,
|
||||
.init = sha384_init,
|
||||
.update = s390_sha_update,
|
||||
.final = s390_sha_final,
|
||||
.descsize = sizeof(struct s390_sha_ctx),
|
||||
.base = {
|
||||
.cra_name = "sha384",
|
||||
.cra_driver_name= "sha384-s390",
|
||||
.cra_priority = CRYPT_S390_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_SHASH,
|
||||
.cra_blocksize = SHA384_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct s390_sha_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
}
|
||||
};
|
||||
|
||||
MODULE_ALIAS("sha384");
|
||||
@@ -93,18 +98,18 @@ static int __init init(void)
|
||||
|
||||
if (!crypt_s390_func_available(KIMD_SHA_512))
|
||||
return -EOPNOTSUPP;
|
||||
if ((ret = crypto_register_alg(&sha512_alg)) < 0)
|
||||
if ((ret = crypto_register_shash(&sha512_alg)) < 0)
|
||||
goto out;
|
||||
if ((ret = crypto_register_alg(&sha384_alg)) < 0)
|
||||
crypto_unregister_alg(&sha512_alg);
|
||||
if ((ret = crypto_register_shash(&sha384_alg)) < 0)
|
||||
crypto_unregister_shash(&sha512_alg);
|
||||
out:
|
||||
return ret;
|
||||
}
|
||||
|
||||
static void __exit fini(void)
|
||||
{
|
||||
crypto_unregister_alg(&sha512_alg);
|
||||
crypto_unregister_alg(&sha384_alg);
|
||||
crypto_unregister_shash(&sha512_alg);
|
||||
crypto_unregister_shash(&sha384_alg);
|
||||
}
|
||||
|
||||
module_init(init);
|
||||
|
||||
@@ -13,14 +13,14 @@
|
||||
*
|
||||
*/
|
||||
|
||||
#include <linux/crypto.h>
|
||||
#include <crypto/internal/hash.h>
|
||||
#include "sha.h"
|
||||
#include "crypt_s390.h"
|
||||
|
||||
void s390_sha_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
|
||||
int s390_sha_update(struct shash_desc *desc, const u8 *data, unsigned int len)
|
||||
{
|
||||
struct s390_sha_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
|
||||
struct s390_sha_ctx *ctx = shash_desc_ctx(desc);
|
||||
unsigned int bsize = crypto_shash_blocksize(desc->tfm);
|
||||
unsigned int index;
|
||||
int ret;
|
||||
|
||||
@@ -51,13 +51,15 @@ void s390_sha_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
|
||||
store:
|
||||
if (len)
|
||||
memcpy(ctx->buf + index , data, len);
|
||||
|
||||
return 0;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(s390_sha_update);
|
||||
|
||||
void s390_sha_final(struct crypto_tfm *tfm, u8 *out)
|
||||
int s390_sha_final(struct shash_desc *desc, u8 *out)
|
||||
{
|
||||
struct s390_sha_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
|
||||
struct s390_sha_ctx *ctx = shash_desc_ctx(desc);
|
||||
unsigned int bsize = crypto_shash_blocksize(desc->tfm);
|
||||
u64 bits;
|
||||
unsigned int index, end, plen;
|
||||
int ret;
|
||||
@@ -87,9 +89,11 @@ void s390_sha_final(struct crypto_tfm *tfm, u8 *out)
|
||||
BUG_ON(ret != end);
|
||||
|
||||
/* copy digest to out */
|
||||
memcpy(out, ctx->state, crypto_hash_digestsize(crypto_hash_cast(tfm)));
|
||||
memcpy(out, ctx->state, crypto_shash_digestsize(desc->tfm));
|
||||
/* wipe context */
|
||||
memset(ctx, 0, sizeof *ctx);
|
||||
|
||||
return 0;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(s390_sha_final);
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ obj-$(CONFIG_CRYPTO_SALSA20_586) += salsa20-i586.o
|
||||
obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o
|
||||
obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o
|
||||
obj-$(CONFIG_CRYPTO_SALSA20_X86_64) += salsa20-x86_64.o
|
||||
obj-$(CONFIG_CRYPTO_AES_NI_INTEL) += aesni-intel.o
|
||||
|
||||
obj-$(CONFIG_CRYPTO_CRC32C_INTEL) += crc32c-intel.o
|
||||
|
||||
@@ -19,3 +20,5 @@ salsa20-i586-y := salsa20-i586-asm_32.o salsa20_glue.o
|
||||
aes-x86_64-y := aes-x86_64-asm_64.o aes_glue.o
|
||||
twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_glue.o
|
||||
salsa20-x86_64-y := salsa20-x86_64-asm_64.o salsa20_glue.o
|
||||
|
||||
aesni-intel-y := aesni-intel_asm.o aesni-intel_glue.o
|
||||
|
||||
@@ -41,14 +41,14 @@
|
||||
#define tlen 1024 // length of each of 4 'xor' arrays (256 32-bit words)
|
||||
|
||||
/* offsets to parameters with one register pushed onto stack */
|
||||
#define tfm 8
|
||||
#define ctx 8
|
||||
#define out_blk 12
|
||||
#define in_blk 16
|
||||
|
||||
/* offsets in crypto_tfm structure */
|
||||
#define klen (crypto_tfm_ctx_offset + 0)
|
||||
#define ekey (crypto_tfm_ctx_offset + 4)
|
||||
#define dkey (crypto_tfm_ctx_offset + 244)
|
||||
/* offsets in crypto_aes_ctx structure */
|
||||
#define klen (480)
|
||||
#define ekey (0)
|
||||
#define dkey (240)
|
||||
|
||||
// register mapping for encrypt and decrypt subroutines
|
||||
|
||||
@@ -217,7 +217,7 @@
|
||||
do_col (table, r5,r0,r1,r4, r2,r3); /* idx=r5 */
|
||||
|
||||
// AES (Rijndael) Encryption Subroutine
|
||||
/* void aes_enc_blk(struct crypto_tfm *tfm, u8 *out_blk, const u8 *in_blk) */
|
||||
/* void aes_enc_blk(struct crypto_aes_ctx *ctx, u8 *out_blk, const u8 *in_blk) */
|
||||
|
||||
.global aes_enc_blk
|
||||
|
||||
@@ -228,7 +228,7 @@
|
||||
|
||||
aes_enc_blk:
|
||||
push %ebp
|
||||
mov tfm(%esp),%ebp
|
||||
mov ctx(%esp),%ebp
|
||||
|
||||
// CAUTION: the order and the values used in these assigns
|
||||
// rely on the register mappings
|
||||
@@ -292,7 +292,7 @@ aes_enc_blk:
|
||||
ret
|
||||
|
||||
// AES (Rijndael) Decryption Subroutine
|
||||
/* void aes_dec_blk(struct crypto_tfm *tfm, u8 *out_blk, const u8 *in_blk) */
|
||||
/* void aes_dec_blk(struct crypto_aes_ctx *ctx, u8 *out_blk, const u8 *in_blk) */
|
||||
|
||||
.global aes_dec_blk
|
||||
|
||||
@@ -303,7 +303,7 @@ aes_enc_blk:
|
||||
|
||||
aes_dec_blk:
|
||||
push %ebp
|
||||
mov tfm(%esp),%ebp
|
||||
mov ctx(%esp),%ebp
|
||||
|
||||
// CAUTION: the order and the values used in these assigns
|
||||
// rely on the register mappings
|
||||
|
||||
@@ -17,8 +17,6 @@
|
||||
|
||||
#include <asm/asm-offsets.h>
|
||||
|
||||
#define BASE crypto_tfm_ctx_offset
|
||||
|
||||
#define R1 %rax
|
||||
#define R1E %eax
|
||||
#define R1X %ax
|
||||
@@ -56,13 +54,13 @@
|
||||
.align 8; \
|
||||
FUNC: movq r1,r2; \
|
||||
movq r3,r4; \
|
||||
leaq BASE+KEY+48+4(r8),r9; \
|
||||
leaq KEY+48(r8),r9; \
|
||||
movq r10,r11; \
|
||||
movl (r7),r5 ## E; \
|
||||
movl 4(r7),r1 ## E; \
|
||||
movl 8(r7),r6 ## E; \
|
||||
movl 12(r7),r7 ## E; \
|
||||
movl BASE+0(r8),r10 ## E; \
|
||||
movl 480(r8),r10 ## E; \
|
||||
xorl -48(r9),r5 ## E; \
|
||||
xorl -44(r9),r1 ## E; \
|
||||
xorl -40(r9),r6 ## E; \
|
||||
|
||||
@@ -5,17 +5,29 @@
|
||||
|
||||
#include <crypto/aes.h>
|
||||
|
||||
asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in);
|
||||
asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in);
|
||||
asmlinkage void aes_enc_blk(struct crypto_aes_ctx *ctx, u8 *out, const u8 *in);
|
||||
asmlinkage void aes_dec_blk(struct crypto_aes_ctx *ctx, u8 *out, const u8 *in);
|
||||
|
||||
void crypto_aes_encrypt_x86(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
|
||||
{
|
||||
aes_enc_blk(ctx, dst, src);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_aes_encrypt_x86);
|
||||
|
||||
void crypto_aes_decrypt_x86(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
|
||||
{
|
||||
aes_dec_blk(ctx, dst, src);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_aes_decrypt_x86);
|
||||
|
||||
static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
|
||||
{
|
||||
aes_enc_blk(tfm, dst, src);
|
||||
aes_enc_blk(crypto_tfm_ctx(tfm), dst, src);
|
||||
}
|
||||
|
||||
static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
|
||||
{
|
||||
aes_dec_blk(tfm, dst, src);
|
||||
aes_dec_blk(crypto_tfm_ctx(tfm), dst, src);
|
||||
}
|
||||
|
||||
static struct crypto_alg aes_alg = {
|
||||
|
||||
896
arch/x86/crypto/aesni-intel_asm.S
Normal file
896
arch/x86/crypto/aesni-intel_asm.S
Normal file
File diff suppressed because it is too large
Load Diff
461
arch/x86/crypto/aesni-intel_glue.c
Normal file
461
arch/x86/crypto/aesni-intel_glue.c
Normal file
@@ -0,0 +1,461 @@
|
||||
/*
|
||||
* Support for Intel AES-NI instructions. This file contains glue
|
||||
* code, the real AES implementation is in intel-aes_asm.S.
|
||||
*
|
||||
* Copyright (C) 2008, Intel Corp.
|
||||
* Author: Huang Ying <ying.huang@intel.com>
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 2 of the License, or
|
||||
* (at your option) any later version.
|
||||
*/
|
||||
|
||||
#include <linux/hardirq.h>
|
||||
#include <linux/types.h>
|
||||
#include <linux/crypto.h>
|
||||
#include <linux/err.h>
|
||||
#include <crypto/algapi.h>
|
||||
#include <crypto/aes.h>
|
||||
#include <crypto/cryptd.h>
|
||||
#include <asm/i387.h>
|
||||
#include <asm/aes.h>
|
||||
|
||||
struct async_aes_ctx {
|
||||
struct cryptd_ablkcipher *cryptd_tfm;
|
||||
};
|
||||
|
||||
#define AESNI_ALIGN 16
|
||||
#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
|
||||
|
||||
asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
|
||||
unsigned int key_len);
|
||||
asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
|
||||
const u8 *in);
|
||||
asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
|
||||
const u8 *in);
|
||||
asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
|
||||
const u8 *in, unsigned int len);
|
||||
asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
|
||||
const u8 *in, unsigned int len);
|
||||
asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
|
||||
const u8 *in, unsigned int len, u8 *iv);
|
||||
asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
|
||||
const u8 *in, unsigned int len, u8 *iv);
|
||||
|
||||
static inline int kernel_fpu_using(void)
|
||||
{
|
||||
if (in_interrupt() && !(read_cr0() & X86_CR0_TS))
|
||||
return 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
|
||||
{
|
||||
unsigned long addr = (unsigned long)raw_ctx;
|
||||
unsigned long align = AESNI_ALIGN;
|
||||
|
||||
if (align <= crypto_tfm_ctx_alignment())
|
||||
align = 1;
|
||||
return (struct crypto_aes_ctx *)ALIGN(addr, align);
|
||||
}
|
||||
|
||||
static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
|
||||
const u8 *in_key, unsigned int key_len)
|
||||
{
|
||||
struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
|
||||
u32 *flags = &tfm->crt_flags;
|
||||
int err;
|
||||
|
||||
if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
|
||||
key_len != AES_KEYSIZE_256) {
|
||||
*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
if (kernel_fpu_using())
|
||||
err = crypto_aes_expand_key(ctx, in_key, key_len);
|
||||
else {
|
||||
kernel_fpu_begin();
|
||||
err = aesni_set_key(ctx, in_key, key_len);
|
||||
kernel_fpu_end();
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
|
||||
unsigned int key_len)
|
||||
{
|
||||
return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
|
||||
}
|
||||
|
||||
static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
|
||||
{
|
||||
struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
|
||||
|
||||
if (kernel_fpu_using())
|
||||
crypto_aes_encrypt_x86(ctx, dst, src);
|
||||
else {
|
||||
kernel_fpu_begin();
|
||||
aesni_enc(ctx, dst, src);
|
||||
kernel_fpu_end();
|
||||
}
|
||||
}
|
||||
|
||||
static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
|
||||
{
|
||||
struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
|
||||
|
||||
if (kernel_fpu_using())
|
||||
crypto_aes_decrypt_x86(ctx, dst, src);
|
||||
else {
|
||||
kernel_fpu_begin();
|
||||
aesni_dec(ctx, dst, src);
|
||||
kernel_fpu_end();
|
||||
}
|
||||
}
|
||||
|
||||
static struct crypto_alg aesni_alg = {
|
||||
.cra_name = "aes",
|
||||
.cra_driver_name = "aes-aesni",
|
||||
.cra_priority = 300,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
|
||||
.cra_alignmask = 0,
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(aesni_alg.cra_list),
|
||||
.cra_u = {
|
||||
.cipher = {
|
||||
.cia_min_keysize = AES_MIN_KEY_SIZE,
|
||||
.cia_max_keysize = AES_MAX_KEY_SIZE,
|
||||
.cia_setkey = aes_set_key,
|
||||
.cia_encrypt = aes_encrypt,
|
||||
.cia_decrypt = aes_decrypt
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
static int ecb_encrypt(struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst, struct scatterlist *src,
|
||||
unsigned int nbytes)
|
||||
{
|
||||
struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
|
||||
struct blkcipher_walk walk;
|
||||
int err;
|
||||
|
||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||
err = blkcipher_walk_virt(desc, &walk);
|
||||
|
||||
kernel_fpu_begin();
|
||||
while ((nbytes = walk.nbytes)) {
|
||||
aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
|
||||
nbytes & AES_BLOCK_MASK);
|
||||
nbytes &= AES_BLOCK_SIZE - 1;
|
||||
err = blkcipher_walk_done(desc, &walk, nbytes);
|
||||
}
|
||||
kernel_fpu_end();
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
static int ecb_decrypt(struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst, struct scatterlist *src,
|
||||
unsigned int nbytes)
|
||||
{
|
||||
struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
|
||||
struct blkcipher_walk walk;
|
||||
int err;
|
||||
|
||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||
err = blkcipher_walk_virt(desc, &walk);
|
||||
|
||||
kernel_fpu_begin();
|
||||
while ((nbytes = walk.nbytes)) {
|
||||
aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
|
||||
nbytes & AES_BLOCK_MASK);
|
||||
nbytes &= AES_BLOCK_SIZE - 1;
|
||||
err = blkcipher_walk_done(desc, &walk, nbytes);
|
||||
}
|
||||
kernel_fpu_end();
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
static struct crypto_alg blk_ecb_alg = {
|
||||
.cra_name = "__ecb-aes-aesni",
|
||||
.cra_driver_name = "__driver-ecb-aes-aesni",
|
||||
.cra_priority = 0,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
|
||||
.cra_alignmask = 0,
|
||||
.cra_type = &crypto_blkcipher_type,
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
|
||||
.cra_u = {
|
||||
.blkcipher = {
|
||||
.min_keysize = AES_MIN_KEY_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.setkey = aes_set_key,
|
||||
.encrypt = ecb_encrypt,
|
||||
.decrypt = ecb_decrypt,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
static int cbc_encrypt(struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst, struct scatterlist *src,
|
||||
unsigned int nbytes)
|
||||
{
|
||||
struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
|
||||
struct blkcipher_walk walk;
|
||||
int err;
|
||||
|
||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||
err = blkcipher_walk_virt(desc, &walk);
|
||||
|
||||
kernel_fpu_begin();
|
||||
while ((nbytes = walk.nbytes)) {
|
||||
aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
|
||||
nbytes & AES_BLOCK_MASK, walk.iv);
|
||||
nbytes &= AES_BLOCK_SIZE - 1;
|
||||
err = blkcipher_walk_done(desc, &walk, nbytes);
|
||||
}
|
||||
kernel_fpu_end();
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
static int cbc_decrypt(struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst, struct scatterlist *src,
|
||||
unsigned int nbytes)
|
||||
{
|
||||
struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
|
||||
struct blkcipher_walk walk;
|
||||
int err;
|
||||
|
||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||
err = blkcipher_walk_virt(desc, &walk);
|
||||
|
||||
kernel_fpu_begin();
|
||||
while ((nbytes = walk.nbytes)) {
|
||||
aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
|
||||
nbytes & AES_BLOCK_MASK, walk.iv);
|
||||
nbytes &= AES_BLOCK_SIZE - 1;
|
||||
err = blkcipher_walk_done(desc, &walk, nbytes);
|
||||
}
|
||||
kernel_fpu_end();
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
static struct crypto_alg blk_cbc_alg = {
|
||||
.cra_name = "__cbc-aes-aesni",
|
||||
.cra_driver_name = "__driver-cbc-aes-aesni",
|
||||
.cra_priority = 0,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
|
||||
.cra_alignmask = 0,
|
||||
.cra_type = &crypto_blkcipher_type,
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
|
||||
.cra_u = {
|
||||
.blkcipher = {
|
||||
.min_keysize = AES_MIN_KEY_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.setkey = aes_set_key,
|
||||
.encrypt = cbc_encrypt,
|
||||
.decrypt = cbc_decrypt,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
|
||||
unsigned int key_len)
|
||||
{
|
||||
struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
|
||||
|
||||
return crypto_ablkcipher_setkey(&ctx->cryptd_tfm->base, key, key_len);
|
||||
}
|
||||
|
||||
static int ablk_encrypt(struct ablkcipher_request *req)
|
||||
{
|
||||
struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
|
||||
struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
|
||||
|
||||
if (kernel_fpu_using()) {
|
||||
struct ablkcipher_request *cryptd_req =
|
||||
ablkcipher_request_ctx(req);
|
||||
memcpy(cryptd_req, req, sizeof(*req));
|
||||
ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
|
||||
return crypto_ablkcipher_encrypt(cryptd_req);
|
||||
} else {
|
||||
struct blkcipher_desc desc;
|
||||
desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
|
||||
desc.info = req->info;
|
||||
desc.flags = 0;
|
||||
return crypto_blkcipher_crt(desc.tfm)->encrypt(
|
||||
&desc, req->dst, req->src, req->nbytes);
|
||||
}
|
||||
}
|
||||
|
||||
static int ablk_decrypt(struct ablkcipher_request *req)
|
||||
{
|
||||
struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
|
||||
struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
|
||||
|
||||
if (kernel_fpu_using()) {
|
||||
struct ablkcipher_request *cryptd_req =
|
||||
ablkcipher_request_ctx(req);
|
||||
memcpy(cryptd_req, req, sizeof(*req));
|
||||
ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
|
||||
return crypto_ablkcipher_decrypt(cryptd_req);
|
||||
} else {
|
||||
struct blkcipher_desc desc;
|
||||
desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
|
||||
desc.info = req->info;
|
||||
desc.flags = 0;
|
||||
return crypto_blkcipher_crt(desc.tfm)->decrypt(
|
||||
&desc, req->dst, req->src, req->nbytes);
|
||||
}
|
||||
}
|
||||
|
||||
static void ablk_exit(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
cryptd_free_ablkcipher(ctx->cryptd_tfm);
|
||||
}
|
||||
|
||||
static void ablk_init_common(struct crypto_tfm *tfm,
|
||||
struct cryptd_ablkcipher *cryptd_tfm)
|
||||
{
|
||||
struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
ctx->cryptd_tfm = cryptd_tfm;
|
||||
tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +
|
||||
crypto_ablkcipher_reqsize(&cryptd_tfm->base);
|
||||
}
|
||||
|
||||
static int ablk_ecb_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct cryptd_ablkcipher *cryptd_tfm;
|
||||
|
||||
cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
|
||||
if (IS_ERR(cryptd_tfm))
|
||||
return PTR_ERR(cryptd_tfm);
|
||||
ablk_init_common(tfm, cryptd_tfm);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct crypto_alg ablk_ecb_alg = {
|
||||
.cra_name = "ecb(aes)",
|
||||
.cra_driver_name = "ecb-aes-aesni",
|
||||
.cra_priority = 400,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct async_aes_ctx),
|
||||
.cra_alignmask = 0,
|
||||
.cra_type = &crypto_ablkcipher_type,
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(ablk_ecb_alg.cra_list),
|
||||
.cra_init = ablk_ecb_init,
|
||||
.cra_exit = ablk_exit,
|
||||
.cra_u = {
|
||||
.ablkcipher = {
|
||||
.min_keysize = AES_MIN_KEY_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.setkey = ablk_set_key,
|
||||
.encrypt = ablk_encrypt,
|
||||
.decrypt = ablk_decrypt,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
static int ablk_cbc_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct cryptd_ablkcipher *cryptd_tfm;
|
||||
|
||||
cryptd_tfm = cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
|
||||
if (IS_ERR(cryptd_tfm))
|
||||
return PTR_ERR(cryptd_tfm);
|
||||
ablk_init_common(tfm, cryptd_tfm);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct crypto_alg ablk_cbc_alg = {
|
||||
.cra_name = "cbc(aes)",
|
||||
.cra_driver_name = "cbc-aes-aesni",
|
||||
.cra_priority = 400,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct async_aes_ctx),
|
||||
.cra_alignmask = 0,
|
||||
.cra_type = &crypto_ablkcipher_type,
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(ablk_cbc_alg.cra_list),
|
||||
.cra_init = ablk_cbc_init,
|
||||
.cra_exit = ablk_exit,
|
||||
.cra_u = {
|
||||
.ablkcipher = {
|
||||
.min_keysize = AES_MIN_KEY_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.ivsize = AES_BLOCK_SIZE,
|
||||
.setkey = ablk_set_key,
|
||||
.encrypt = ablk_encrypt,
|
||||
.decrypt = ablk_decrypt,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
static int __init aesni_init(void)
|
||||
{
|
||||
int err;
|
||||
|
||||
if (!cpu_has_aes) {
|
||||
printk(KERN_ERR "Intel AES-NI instructions are not detected.\n");
|
||||
return -ENODEV;
|
||||
}
|
||||
if ((err = crypto_register_alg(&aesni_alg)))
|
||||
goto aes_err;
|
||||
if ((err = crypto_register_alg(&blk_ecb_alg)))
|
||||
goto blk_ecb_err;
|
||||
if ((err = crypto_register_alg(&blk_cbc_alg)))
|
||||
goto blk_cbc_err;
|
||||
if ((err = crypto_register_alg(&ablk_ecb_alg)))
|
||||
goto ablk_ecb_err;
|
||||
if ((err = crypto_register_alg(&ablk_cbc_alg)))
|
||||
goto ablk_cbc_err;
|
||||
|
||||
return err;
|
||||
|
||||
ablk_cbc_err:
|
||||
crypto_unregister_alg(&ablk_ecb_alg);
|
||||
ablk_ecb_err:
|
||||
crypto_unregister_alg(&blk_cbc_alg);
|
||||
blk_cbc_err:
|
||||
crypto_unregister_alg(&blk_ecb_alg);
|
||||
blk_ecb_err:
|
||||
crypto_unregister_alg(&aesni_alg);
|
||||
aes_err:
|
||||
return err;
|
||||
}
|
||||
|
||||
static void __exit aesni_exit(void)
|
||||
{
|
||||
crypto_unregister_alg(&ablk_cbc_alg);
|
||||
crypto_unregister_alg(&ablk_ecb_alg);
|
||||
crypto_unregister_alg(&blk_cbc_alg);
|
||||
crypto_unregister_alg(&blk_ecb_alg);
|
||||
crypto_unregister_alg(&aesni_alg);
|
||||
}
|
||||
|
||||
module_init(aesni_init);
|
||||
module_exit(aesni_exit);
|
||||
|
||||
MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_ALIAS("aes");
|
||||
11
arch/x86/include/asm/aes.h
Normal file
11
arch/x86/include/asm/aes.h
Normal file
@@ -0,0 +1,11 @@
|
||||
#ifndef ASM_X86_AES_H
|
||||
#define ASM_X86_AES_H
|
||||
|
||||
#include <linux/crypto.h>
|
||||
#include <crypto/aes.h>
|
||||
|
||||
void crypto_aes_encrypt_x86(struct crypto_aes_ctx *ctx, u8 *dst,
|
||||
const u8 *src);
|
||||
void crypto_aes_decrypt_x86(struct crypto_aes_ctx *ctx, u8 *dst,
|
||||
const u8 *src);
|
||||
#endif
|
||||
@@ -213,6 +213,7 @@ extern const char * const x86_power_flags[32];
|
||||
#define cpu_has_xmm boot_cpu_has(X86_FEATURE_XMM)
|
||||
#define cpu_has_xmm2 boot_cpu_has(X86_FEATURE_XMM2)
|
||||
#define cpu_has_xmm3 boot_cpu_has(X86_FEATURE_XMM3)
|
||||
#define cpu_has_aes boot_cpu_has(X86_FEATURE_AES)
|
||||
#define cpu_has_ht boot_cpu_has(X86_FEATURE_HT)
|
||||
#define cpu_has_mp boot_cpu_has(X86_FEATURE_MP)
|
||||
#define cpu_has_nx boot_cpu_has(X86_FEATURE_NX)
|
||||
|
||||
@@ -56,6 +56,7 @@ config CRYPTO_BLKCIPHER2
|
||||
tristate
|
||||
select CRYPTO_ALGAPI2
|
||||
select CRYPTO_RNG2
|
||||
select CRYPTO_WORKQUEUE
|
||||
|
||||
config CRYPTO_HASH
|
||||
tristate
|
||||
@@ -75,6 +76,10 @@ config CRYPTO_RNG2
|
||||
tristate
|
||||
select CRYPTO_ALGAPI2
|
||||
|
||||
config CRYPTO_PCOMP
|
||||
tristate
|
||||
select CRYPTO_ALGAPI2
|
||||
|
||||
config CRYPTO_MANAGER
|
||||
tristate "Cryptographic algorithm manager"
|
||||
select CRYPTO_MANAGER2
|
||||
@@ -87,6 +92,7 @@ config CRYPTO_MANAGER2
|
||||
select CRYPTO_AEAD2
|
||||
select CRYPTO_HASH2
|
||||
select CRYPTO_BLKCIPHER2
|
||||
select CRYPTO_PCOMP
|
||||
|
||||
config CRYPTO_GF128MUL
|
||||
tristate "GF(2^128) multiplication functions (EXPERIMENTAL)"
|
||||
@@ -106,11 +112,15 @@ config CRYPTO_NULL
|
||||
help
|
||||
These are 'Null' algorithms, used by IPsec, which do nothing.
|
||||
|
||||
config CRYPTO_WORKQUEUE
|
||||
tristate
|
||||
|
||||
config CRYPTO_CRYPTD
|
||||
tristate "Software async crypto daemon"
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_HASH
|
||||
select CRYPTO_MANAGER
|
||||
select CRYPTO_WORKQUEUE
|
||||
help
|
||||
This is a generic software asynchronous crypto daemon that
|
||||
converts an arbitrary synchronous software crypto algorithm
|
||||
@@ -470,6 +480,31 @@ config CRYPTO_AES_X86_64
|
||||
|
||||
See <http://csrc.nist.gov/encryption/aes/> for more information.
|
||||
|
||||
config CRYPTO_AES_NI_INTEL
|
||||
tristate "AES cipher algorithms (AES-NI)"
|
||||
depends on (X86 || UML_X86) && 64BIT
|
||||
select CRYPTO_AES_X86_64
|
||||
select CRYPTO_CRYPTD
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Use Intel AES-NI instructions for AES algorithm.
|
||||
|
||||
AES cipher algorithms (FIPS-197). AES uses the Rijndael
|
||||
algorithm.
|
||||
|
||||
Rijndael appears to be consistently a very good performer in
|
||||
both hardware and software across a wide range of computing
|
||||
environments regardless of its use in feedback or non-feedback
|
||||
modes. Its key setup time is excellent, and its key agility is
|
||||
good. Rijndael's very low memory requirements make it very well
|
||||
suited for restricted-space environments, in which it also
|
||||
demonstrates excellent performance. Rijndael's operations are
|
||||
among the easiest to defend against power and timing attacks.
|
||||
|
||||
The AES specifies three key sizes: 128, 192 and 256 bits
|
||||
|
||||
See <http://csrc.nist.gov/encryption/aes/> for more information.
|
||||
|
||||
config CRYPTO_ANUBIS
|
||||
tristate "Anubis cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
@@ -714,6 +749,15 @@ config CRYPTO_DEFLATE
|
||||
|
||||
You will most probably want this if using IPSec.
|
||||
|
||||
config CRYPTO_ZLIB
|
||||
tristate "Zlib compression algorithm"
|
||||
select CRYPTO_PCOMP
|
||||
select ZLIB_INFLATE
|
||||
select ZLIB_DEFLATE
|
||||
select NLATTR
|
||||
help
|
||||
This is the zlib algorithm.
|
||||
|
||||
config CRYPTO_LZO
|
||||
tristate "LZO compression algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
obj-$(CONFIG_CRYPTO) += crypto.o
|
||||
crypto-objs := api.o cipher.o digest.o compress.o
|
||||
|
||||
obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o
|
||||
|
||||
obj-$(CONFIG_CRYPTO_FIPS) += fips.o
|
||||
|
||||
crypto_algapi-$(CONFIG_PROC_FS) += proc.o
|
||||
@@ -25,6 +27,8 @@ crypto_hash-objs += ahash.o
|
||||
crypto_hash-objs += shash.o
|
||||
obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o
|
||||
|
||||
obj-$(CONFIG_CRYPTO_PCOMP) += pcompress.o
|
||||
|
||||
cryptomgr-objs := algboss.o testmgr.o
|
||||
|
||||
obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o
|
||||
@@ -70,6 +74,7 @@ obj-$(CONFIG_CRYPTO_ANUBIS) += anubis.o
|
||||
obj-$(CONFIG_CRYPTO_SEED) += seed.o
|
||||
obj-$(CONFIG_CRYPTO_SALSA20) += salsa20_generic.o
|
||||
obj-$(CONFIG_CRYPTO_DEFLATE) += deflate.o
|
||||
obj-$(CONFIG_CRYPTO_ZLIB) += zlib.o
|
||||
obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o
|
||||
obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o
|
||||
obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o
|
||||
|
||||
@@ -282,6 +282,25 @@ static struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type,
|
||||
alg->cra_ablkcipher.ivsize))
|
||||
return alg;
|
||||
|
||||
crypto_mod_put(alg);
|
||||
alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED,
|
||||
mask & ~CRYPTO_ALG_TESTED);
|
||||
if (IS_ERR(alg))
|
||||
return alg;
|
||||
|
||||
if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
|
||||
CRYPTO_ALG_TYPE_GIVCIPHER) {
|
||||
if ((alg->cra_flags ^ type ^ ~mask) & CRYPTO_ALG_TESTED) {
|
||||
crypto_mod_put(alg);
|
||||
alg = ERR_PTR(-ENOENT);
|
||||
}
|
||||
return alg;
|
||||
}
|
||||
|
||||
BUG_ON(!((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
|
||||
CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize :
|
||||
alg->cra_ablkcipher.ivsize));
|
||||
|
||||
return ERR_PTR(crypto_givcipher_default(alg, type, mask));
|
||||
}
|
||||
|
||||
|
||||
@@ -422,6 +422,22 @@ static struct crypto_alg *crypto_lookup_aead(const char *name, u32 type,
|
||||
if (!alg->cra_aead.ivsize)
|
||||
return alg;
|
||||
|
||||
crypto_mod_put(alg);
|
||||
alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED,
|
||||
mask & ~CRYPTO_ALG_TESTED);
|
||||
if (IS_ERR(alg))
|
||||
return alg;
|
||||
|
||||
if (alg->cra_type == &crypto_aead_type) {
|
||||
if ((alg->cra_flags ^ type ^ ~mask) & CRYPTO_ALG_TESTED) {
|
||||
crypto_mod_put(alg);
|
||||
alg = ERR_PTR(-ENOENT);
|
||||
}
|
||||
return alg;
|
||||
}
|
||||
|
||||
BUG_ON(!alg->cra_aead.ivsize);
|
||||
|
||||
return ERR_PTR(crypto_nivaead_default(alg, type, mask));
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user