diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2015-04-15 10:42:15 -0700 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2015-04-15 10:42:15 -0700 |
commit | cb906953d2c3fd450655d9fa833f03690ad50c23 (patch) | |
tree | 06c5665afb24baee3ac49f62db61ca97918079b4 /arch/x86/crypto | |
parent | 6c373ca89399c5a3f7ef210ad8f63dc3437da345 (diff) | |
parent | 3abafaf2192b1712079edfd4232b19877d6f41a5 (diff) | |
download | talos-obmc-linux-cb906953d2c3fd450655d9fa833f03690ad50c23.tar.gz talos-obmc-linux-cb906953d2c3fd450655d9fa833f03690ad50c23.zip |
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto update from Herbert Xu:
"Here is the crypto update for 4.1:
New interfaces:
- user-space interface for AEAD
- user-space interface for RNG (i.e., pseudo RNG)
New hashes:
- ARMv8 SHA1/256
- ARMv8 AES
- ARMv8 GHASH
- ARM assembler and NEON SHA256
- MIPS OCTEON SHA1/256/512
- MIPS img-hash SHA1/256 and MD5
- Power 8 VMX AES/CBC/CTR/GHASH
- PPC assembler AES, SHA1/256 and MD5
- Broadcom IPROC RNG driver
Cleanups/fixes:
- prevent internal helper algos from being exposed to user-space
- merge common code from assembly/C SHA implementations
- misc fixes"
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (169 commits)
crypto: arm - workaround for building with old binutils
crypto: arm/sha256 - avoid sha256 code on ARMv7-M
crypto: x86/sha512_ssse3 - move SHA-384/512 SSSE3 implementation to base layer
crypto: x86/sha256_ssse3 - move SHA-224/256 SSSE3 implementation to base layer
crypto: x86/sha1_ssse3 - move SHA-1 SSSE3 implementation to base layer
crypto: arm64/sha2-ce - move SHA-224/256 ARMv8 implementation to base layer
crypto: arm64/sha1-ce - move SHA-1 ARMv8 implementation to base layer
crypto: arm/sha2-ce - move SHA-224/256 ARMv8 implementation to base layer
crypto: arm/sha256 - move SHA-224/256 ASM/NEON implementation to base layer
crypto: arm/sha1-ce - move SHA-1 ARMv8 implementation to base layer
crypto: arm/sha1_neon - move SHA-1 NEON implementation to base layer
crypto: arm/sha1 - move SHA-1 ARM asm implementation to base layer
crypto: sha512-generic - move to generic glue implementation
crypto: sha256-generic - move to generic glue implementation
crypto: sha1-generic - move to generic glue implementation
crypto: sha512 - implement base layer for SHA-512
crypto: sha256 - implement base layer for SHA-256
crypto: sha1 - implement base layer for SHA-1
crypto: api - remove instance when test failed
crypto: api - Move alg ref count init to crypto_check_alg
...
Diffstat (limited to 'arch/x86/crypto')
22 files changed, 335 insertions, 567 deletions
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c index 54f60ab41c63..112cefacf2af 100644 --- a/arch/x86/crypto/aesni-intel_glue.c +++ b/arch/x86/crypto/aesni-intel_glue.c @@ -797,7 +797,9 @@ static int rfc4106_init(struct crypto_tfm *tfm) PTR_ALIGN((u8 *)crypto_tfm_ctx(tfm), AESNI_ALIGN); struct crypto_aead *cryptd_child; struct aesni_rfc4106_gcm_ctx *child_ctx; - cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni", 0, 0); + cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni", + CRYPTO_ALG_INTERNAL, + CRYPTO_ALG_INTERNAL); if (IS_ERR(cryptd_tfm)) return PTR_ERR(cryptd_tfm); @@ -890,15 +892,12 @@ out_free_ablkcipher: return ret; } -static int rfc4106_set_key(struct crypto_aead *parent, const u8 *key, - unsigned int key_len) +static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key, + unsigned int key_len) { int ret = 0; - struct crypto_tfm *tfm = crypto_aead_tfm(parent); - struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(parent); - struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm); - struct aesni_rfc4106_gcm_ctx *child_ctx = - aesni_rfc4106_gcm_ctx_get(cryptd_child); + struct crypto_tfm *tfm = crypto_aead_tfm(aead); + struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead); u8 *new_key_align, *new_key_mem = NULL; if (key_len < 4) { @@ -943,20 +942,31 @@ static int rfc4106_set_key(struct crypto_aead *parent, const u8 *key, goto exit; } ret = rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len); - memcpy(child_ctx, ctx, sizeof(*ctx)); exit: kfree(new_key_mem); return ret; } -/* This is the Integrity Check Value (aka the authentication tag length and can - * be 8, 12 or 16 bytes long. */ -static int rfc4106_set_authsize(struct crypto_aead *parent, - unsigned int authsize) +static int rfc4106_set_key(struct crypto_aead *parent, const u8 *key, + unsigned int key_len) { struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(parent); - struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm); + struct crypto_aead *child = cryptd_aead_child(ctx->cryptd_tfm); + struct aesni_rfc4106_gcm_ctx *c_ctx = aesni_rfc4106_gcm_ctx_get(child); + struct cryptd_aead *cryptd_tfm = ctx->cryptd_tfm; + int ret; + ret = crypto_aead_setkey(child, key, key_len); + if (!ret) { + memcpy(ctx, c_ctx, sizeof(*ctx)); + ctx->cryptd_tfm = cryptd_tfm; + } + return ret; +} + +static int common_rfc4106_set_authsize(struct crypto_aead *aead, + unsigned int authsize) +{ switch (authsize) { case 8: case 12: @@ -965,51 +975,23 @@ static int rfc4106_set_authsize(struct crypto_aead *parent, default: return -EINVAL; } - crypto_aead_crt(parent)->authsize = authsize; - crypto_aead_crt(cryptd_child)->authsize = authsize; + crypto_aead_crt(aead)->authsize = authsize; return 0; } -static int rfc4106_encrypt(struct aead_request *req) -{ - int ret; - struct crypto_aead *tfm = crypto_aead_reqtfm(req); - struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm); - - if (!irq_fpu_usable()) { - struct aead_request *cryptd_req = - (struct aead_request *) aead_request_ctx(req); - memcpy(cryptd_req, req, sizeof(*req)); - aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base); - return crypto_aead_encrypt(cryptd_req); - } else { - struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm); - kernel_fpu_begin(); - ret = cryptd_child->base.crt_aead.encrypt(req); - kernel_fpu_end(); - return ret; - } -} - -static int rfc4106_decrypt(struct aead_request *req) +/* This is the Integrity Check Value (aka the authentication tag length and can + * be 8, 12 or 16 bytes long. */ +static int rfc4106_set_authsize(struct crypto_aead *parent, + unsigned int authsize) { + struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(parent); + struct crypto_aead *child = cryptd_aead_child(ctx->cryptd_tfm); int ret; - struct crypto_aead *tfm = crypto_aead_reqtfm(req); - struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm); - if (!irq_fpu_usable()) { - struct aead_request *cryptd_req = - (struct aead_request *) aead_request_ctx(req); - memcpy(cryptd_req, req, sizeof(*req)); - aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base); - return crypto_aead_decrypt(cryptd_req); - } else { - struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm); - kernel_fpu_begin(); - ret = cryptd_child->base.crt_aead.decrypt(req); - kernel_fpu_end(); - return ret; - } + ret = crypto_aead_setauthsize(child, authsize); + if (!ret) + crypto_aead_crt(parent)->authsize = authsize; + return ret; } static int __driver_rfc4106_encrypt(struct aead_request *req) @@ -1185,6 +1167,78 @@ static int __driver_rfc4106_decrypt(struct aead_request *req) } return retval; } + +static int rfc4106_encrypt(struct aead_request *req) +{ + int ret; + struct crypto_aead *tfm = crypto_aead_reqtfm(req); + struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm); + + if (!irq_fpu_usable()) { + struct aead_request *cryptd_req = + (struct aead_request *) aead_request_ctx(req); + + memcpy(cryptd_req, req, sizeof(*req)); + aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base); + ret = crypto_aead_encrypt(cryptd_req); + } else { + kernel_fpu_begin(); + ret = __driver_rfc4106_encrypt(req); + kernel_fpu_end(); + } + return ret; +} + +static int rfc4106_decrypt(struct aead_request *req) +{ + int ret; + struct crypto_aead *tfm = crypto_aead_reqtfm(req); + struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm); + + if (!irq_fpu_usable()) { + struct aead_request *cryptd_req = + (struct aead_request *) aead_request_ctx(req); + + memcpy(cryptd_req, req, sizeof(*req)); + aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base); + ret = crypto_aead_decrypt(cryptd_req); + } else { + kernel_fpu_begin(); + ret = __driver_rfc4106_decrypt(req); + kernel_fpu_end(); + } + return ret; +} + +static int helper_rfc4106_encrypt(struct aead_request *req) +{ + int ret; + + if (unlikely(!irq_fpu_usable())) { + WARN_ONCE(1, "__gcm-aes-aesni alg used in invalid context"); + ret = -EINVAL; + } else { + kernel_fpu_begin(); + ret = __driver_rfc4106_encrypt(req); + kernel_fpu_end(); + } + return ret; +} + +static int helper_rfc4106_decrypt(struct aead_request *req) +{ + int ret; + + if (unlikely(!irq_fpu_usable())) { + WARN_ONCE(1, "__gcm-aes-aesni alg used in invalid context"); + ret = -EINVAL; + } else { + kernel_fpu_begin(); + ret = __driver_rfc4106_decrypt(req); + kernel_fpu_end(); + } + return ret; +} #endif static struct crypto_alg aesni_algs[] = { { @@ -1210,7 +1264,7 @@ static struct crypto_alg aesni_algs[] = { { .cra_name = "__aes-aesni", .cra_driver_name = "__driver-aes-aesni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_INTERNAL, .cra_blocksize = AES_BLOCK_SIZE, .cra_ctxsize = sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1, @@ -1229,7 +1283,8 @@ static struct crypto_alg aesni_algs[] = { { .cra_name = "__ecb-aes-aesni", .cra_driver_name = "__driver-ecb-aes-aesni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = AES_BLOCK_SIZE, .cra_ctxsize = sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1, @@ -1249,7 +1304,8 @@ static struct crypto_alg aesni_algs[] = { { .cra_name = "__cbc-aes-aesni", .cra_driver_name = "__driver-cbc-aes-aesni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = AES_BLOCK_SIZE, .cra_ctxsize = sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1, @@ -1313,7 +1369,8 @@ static struct crypto_alg aesni_algs[] = { { .cra_name = "__ctr-aes-aesni", .cra_driver_name = "__driver-ctr-aes-aesni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = 1, .cra_ctxsize = sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1, @@ -1357,7 +1414,7 @@ static struct crypto_alg aesni_algs[] = { { .cra_name = "__gcm-aes-aesni", .cra_driver_name = "__driver-gcm-aes-aesni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_AEAD, + .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_INTERNAL, .cra_blocksize = 1, .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) + AESNI_ALIGN, @@ -1366,8 +1423,12 @@ static struct crypto_alg aesni_algs[] = { { .cra_module = THIS_MODULE, .cra_u = { .aead = { - .encrypt = __driver_rfc4106_encrypt, - .decrypt = __driver_rfc4106_decrypt, + .setkey = common_rfc4106_set_key, + .setauthsize = common_rfc4106_set_authsize, + .encrypt = helper_rfc4106_encrypt, + .decrypt = helper_rfc4106_decrypt, + .ivsize = 8, + .maxauthsize = 16, }, }, }, { @@ -1423,7 +1484,8 @@ static struct crypto_alg aesni_algs[] = { { .cra_name = "__lrw-aes-aesni", .cra_driver_name = "__driver-lrw-aes-aesni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = AES_BLOCK_SIZE, .cra_ctxsize = sizeof(struct aesni_lrw_ctx), .cra_alignmask = 0, @@ -1444,7 +1506,8 @@ static struct crypto_alg aesni_algs[] = { { .cra_name = "__xts-aes-aesni", .cra_driver_name = "__driver-xts-aes-aesni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = AES_BLOCK_SIZE, .cra_ctxsize = sizeof(struct aesni_xts_ctx), .cra_alignmask = 0, diff --git a/arch/x86/crypto/camellia_aesni_avx2_glue.c b/arch/x86/crypto/camellia_aesni_avx2_glue.c index 9a07fafe3831..baf0ac21ace5 100644 --- a/arch/x86/crypto/camellia_aesni_avx2_glue.c +++ b/arch/x86/crypto/camellia_aesni_avx2_glue.c @@ -343,7 +343,8 @@ static struct crypto_alg cmll_algs[10] = { { .cra_name = "__ecb-camellia-aesni-avx2", .cra_driver_name = "__driver-ecb-camellia-aesni-avx2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAMELLIA_BLOCK_SIZE, .cra_ctxsize = sizeof(struct camellia_ctx), .cra_alignmask = 0, @@ -362,7 +363,8 @@ static struct crypto_alg cmll_algs[10] = { { .cra_name = "__cbc-camellia-aesni-avx2", .cra_driver_name = "__driver-cbc-camellia-aesni-avx2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAMELLIA_BLOCK_SIZE, .cra_ctxsize = sizeof(struct camellia_ctx), .cra_alignmask = 0, @@ -381,7 +383,8 @@ static struct crypto_alg cmll_algs[10] = { { .cra_name = "__ctr-camellia-aesni-avx2", .cra_driver_name = "__driver-ctr-camellia-aesni-avx2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = 1, .cra_ctxsize = sizeof(struct camellia_ctx), .cra_alignmask = 0, @@ -401,7 +404,8 @@ static struct crypto_alg cmll_algs[10] = { { .cra_name = "__lrw-camellia-aesni-avx2", .cra_driver_name = "__driver-lrw-camellia-aesni-avx2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAMELLIA_BLOCK_SIZE, .cra_ctxsize = sizeof(struct camellia_lrw_ctx), .cra_alignmask = 0, @@ -424,7 +428,8 @@ static struct crypto_alg cmll_algs[10] = { { .cra_name = "__xts-camellia-aesni-avx2", .cra_driver_name = "__driver-xts-camellia-aesni-avx2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAMELLIA_BLOCK_SIZE, .cra_ctxsize = sizeof(struct camellia_xts_ctx), .cra_alignmask = 0, diff --git a/arch/x86/crypto/camellia_aesni_avx_glue.c b/arch/x86/crypto/camellia_aesni_avx_glue.c index ed38d959add6..78818a1e73e3 100644 --- a/arch/x86/crypto/camellia_aesni_avx_glue.c +++ b/arch/x86/crypto/camellia_aesni_avx_glue.c @@ -335,7 +335,8 @@ static struct crypto_alg cmll_algs[10] = { { .cra_name = "__ecb-camellia-aesni", .cra_driver_name = "__driver-ecb-camellia-aesni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAMELLIA_BLOCK_SIZE, .cra_ctxsize = sizeof(struct camellia_ctx), .cra_alignmask = 0, @@ -354,7 +355,8 @@ static struct crypto_alg cmll_algs[10] = { { .cra_name = "__cbc-camellia-aesni", .cra_driver_name = "__driver-cbc-camellia-aesni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAMELLIA_BLOCK_SIZE, .cra_ctxsize = sizeof(struct camellia_ctx), .cra_alignmask = 0, @@ -373,7 +375,8 @@ static struct crypto_alg cmll_algs[10] = { { .cra_name = "__ctr-camellia-aesni", .cra_driver_name = "__driver-ctr-camellia-aesni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = 1, .cra_ctxsize = sizeof(struct camellia_ctx), .cra_alignmask = 0, @@ -393,7 +396,8 @@ static struct crypto_alg cmll_algs[10] = { { .cra_name = "__lrw-camellia-aesni", .cra_driver_name = "__driver-lrw-camellia-aesni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAMELLIA_BLOCK_SIZE, .cra_ctxsize = sizeof(struct camellia_lrw_ctx), .cra_alignmask = 0, @@ -416,7 +420,8 @@ static struct crypto_alg cmll_algs[10] = { { .cra_name = "__xts-camellia-aesni", .cra_driver_name = "__driver-xts-camellia-aesni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAMELLIA_BLOCK_SIZE, .cra_ctxsize = sizeof(struct camellia_xts_ctx), .cra_alignmask = 0, diff --git a/arch/x86/crypto/cast5_avx_glue.c b/arch/x86/crypto/cast5_avx_glue.c index 60ada677a928..236c80974457 100644 --- a/arch/x86/crypto/cast5_avx_glue.c +++ b/arch/x86/crypto/cast5_avx_glue.c @@ -341,7 +341,8 @@ static struct crypto_alg cast5_algs[6] = { { .cra_name = "__ecb-cast5-avx", .cra_driver_name = "__driver-ecb-cast5-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAST5_BLOCK_SIZE, .cra_ctxsize = sizeof(struct cast5_ctx), .cra_alignmask = 0, @@ -360,7 +361,8 @@ static struct crypto_alg cast5_algs[6] = { { .cra_name = "__cbc-cast5-avx", .cra_driver_name = "__driver-cbc-cast5-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAST5_BLOCK_SIZE, .cra_ctxsize = sizeof(struct cast5_ctx), .cra_alignmask = 0, @@ -379,7 +381,8 @@ static struct crypto_alg cast5_algs[6] = { { .cra_name = "__ctr-cast5-avx", .cra_driver_name = "__driver-ctr-cast5-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = 1, .cra_ctxsize = sizeof(struct cast5_ctx), .cra_alignmask = 0, diff --git a/arch/x86/crypto/cast6_avx_glue.c b/arch/x86/crypto/cast6_avx_glue.c index 0160f68a57ff..f448810ca4ac 100644 --- a/arch/x86/crypto/cast6_avx_glue.c +++ b/arch/x86/crypto/cast6_avx_glue.c @@ -372,7 +372,8 @@ static struct crypto_alg cast6_algs[10] = { { .cra_name = "__ecb-cast6-avx", .cra_driver_name = "__driver-ecb-cast6-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAST6_BLOCK_SIZE, .cra_ctxsize = sizeof(struct cast6_ctx), .cra_alignmask = 0, @@ -391,7 +392,8 @@ static struct crypto_alg cast6_algs[10] = { { .cra_name = "__cbc-cast6-avx", .cra_driver_name = "__driver-cbc-cast6-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAST6_BLOCK_SIZE, .cra_ctxsize = sizeof(struct cast6_ctx), .cra_alignmask = 0, @@ -410,7 +412,8 @@ static struct crypto_alg cast6_algs[10] = { { .cra_name = "__ctr-cast6-avx", .cra_driver_name = "__driver-ctr-cast6-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = 1, .cra_ctxsize = sizeof(struct cast6_ctx), .cra_alignmask = 0, @@ -430,7 +433,8 @@ static struct crypto_alg cast6_algs[10] = { { .cra_name = "__lrw-cast6-avx", .cra_driver_name = "__driver-lrw-cast6-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAST6_BLOCK_SIZE, .cra_ctxsize = sizeof(struct cast6_lrw_ctx), .cra_alignmask = 0, @@ -453,7 +457,8 @@ static struct crypto_alg cast6_algs[10] = { { .cra_name = "__xts-cast6-avx", .cra_driver_name = "__driver-xts-cast6-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = CAST6_BLOCK_SIZE, .cra_ctxsize = sizeof(struct cast6_xts_ctx), .cra_alignmask = 0, diff --git a/arch/x86/crypto/ghash-clmulni-intel_glue.c b/arch/x86/crypto/ghash-clmulni-intel_glue.c index 8253d85aa165..2079baf06bdd 100644 --- a/arch/x86/crypto/ghash-clmulni-intel_glue.c +++ b/arch/x86/crypto/ghash-clmulni-intel_glue.c @@ -154,7 +154,8 @@ static struct shash_alg ghash_alg = { .cra_name = "__ghash", .cra_driver_name = "__ghash-pclmulqdqni", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_SHASH, + .cra_flags = CRYPTO_ALG_TYPE_SHASH | + CRYPTO_ALG_INTERNAL, .cra_blocksize = GHASH_BLOCK_SIZE, .cra_ctxsize = sizeof(struct ghash_ctx), .cra_module = THIS_MODULE, @@ -261,7 +262,9 @@ static int ghash_async_init_tfm(struct crypto_tfm *tfm) struct cryptd_ahash *cryptd_tfm; struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm); - cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni", 0, 0); + cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni", + CRYPTO_ALG_INTERNAL, + CRYPTO_ALG_INTERNAL); if (IS_ERR(cryptd_tfm)) return PTR_ERR(cryptd_tfm); ctx->cryptd_tfm = cryptd_tfm; diff --git a/arch/x86/crypto/glue_helper.c b/arch/x86/crypto/glue_helper.c index 432f1d76ceb8..6a85598931b5 100644 --- a/arch/x86/crypto/glue_helper.c +++ b/arch/x86/crypto/glue_helper.c @@ -232,7 +232,6 @@ static void glue_ctr_crypt_final_128bit(const common_glue_ctr_func_t fn_ctr, le128_to_be128((be128 *)walk->iv, &ctrblk); } -EXPORT_SYMBOL_GPL(glue_ctr_crypt_final_128bit); static unsigned int __glue_ctr_crypt_128bit(const struct common_glue_ctx *gctx, struct blkcipher_desc *desc, diff --git a/arch/x86/crypto/serpent_avx2_glue.c b/arch/x86/crypto/serpent_avx2_glue.c index 437e47a4d302..2f63dc89e7a9 100644 --- a/arch/x86/crypto/serpent_avx2_glue.c +++ b/arch/x86/crypto/serpent_avx2_glue.c @@ -309,7 +309,8 @@ static struct crypto_alg srp_algs[10] = { { .cra_name = "__ecb-serpent-avx2", .cra_driver_name = "__driver-ecb-serpent-avx2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, @@ -329,7 +330,8 @@ static struct crypto_alg srp_algs[10] = { { .cra_name = "__cbc-serpent-avx2", .cra_driver_name = "__driver-cbc-serpent-avx2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, @@ -349,7 +351,8 @@ static struct crypto_alg srp_algs[10] = { { .cra_name = "__ctr-serpent-avx2", .cra_driver_name = "__driver-ctr-serpent-avx2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = 1, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, @@ -370,7 +373,8 @@ static struct crypto_alg srp_algs[10] = { { .cra_name = "__lrw-serpent-avx2", .cra_driver_name = "__driver-lrw-serpent-avx2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_lrw_ctx), .cra_alignmask = 0, @@ -394,7 +398,8 @@ static struct crypto_alg srp_algs[10] = { { .cra_name = "__xts-serpent-avx2", .cra_driver_name = "__driver-xts-serpent-avx2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_xts_ctx), .cra_alignmask = 0, diff --git a/arch/x86/crypto/serpent_avx_glue.c b/arch/x86/crypto/serpent_avx_glue.c index 7e217398b4eb..c8d478af8456 100644 --- a/arch/x86/crypto/serpent_avx_glue.c +++ b/arch/x86/crypto/serpent_avx_glue.c @@ -378,7 +378,8 @@ static struct crypto_alg serpent_algs[10] = { { .cra_name = "__ecb-serpent-avx", .cra_driver_name = "__driver-ecb-serpent-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, @@ -397,7 +398,8 @@ static struct crypto_alg serpent_algs[10] = { { .cra_name = "__cbc-serpent-avx", .cra_driver_name = "__driver-cbc-serpent-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, @@ -416,7 +418,8 @@ static struct crypto_alg serpent_algs[10] = { { .cra_name = "__ctr-serpent-avx", .cra_driver_name = "__driver-ctr-serpent-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = 1, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, @@ -436,7 +439,8 @@ static struct crypto_alg serpent_algs[10] = { { .cra_name = "__lrw-serpent-avx", .cra_driver_name = "__driver-lrw-serpent-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_lrw_ctx), .cra_alignmask = 0, @@ -459,7 +463,8 @@ static struct crypto_alg serpent_algs[10] = { { .cra_name = "__xts-serpent-avx", .cra_driver_name = "__driver-xts-serpent-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_xts_ctx), .cra_alignmask = 0, diff --git a/arch/x86/crypto/serpent_sse2_glue.c b/arch/x86/crypto/serpent_sse2_glue.c index bf025adaea01..3643dd508f45 100644 --- a/arch/x86/crypto/serpent_sse2_glue.c +++ b/arch/x86/crypto/serpent_sse2_glue.c @@ -387,7 +387,8 @@ static struct crypto_alg serpent_algs[10] = { { .cra_name = "__ecb-serpent-sse2", .cra_driver_name = "__driver-ecb-serpent-sse2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, @@ -406,7 +407,8 @@ static struct crypto_alg serpent_algs[10] = { { .cra_name = "__cbc-serpent-sse2", .cra_driver_name = "__driver-cbc-serpent-sse2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, @@ -425,7 +427,8 @@ static struct crypto_alg serpent_algs[10] = { { .cra_name = "__ctr-serpent-sse2", .cra_driver_name = "__driver-ctr-serpent-sse2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = 1, .cra_ctxsize = sizeof(struct serpent_ctx), .cra_alignmask = 0, @@ -445,7 +448,8 @@ static struct crypto_alg serpent_algs[10] = { { .cra_name = "__lrw-serpent-sse2", .cra_driver_name = "__driver-lrw-serpent-sse2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_lrw_ctx), .cra_alignmask = 0, @@ -468,7 +472,8 @@ static struct crypto_alg serpent_algs[10] = { { .cra_name = "__xts-serpent-sse2", .cra_driver_name = "__driver-xts-serpent-sse2", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_xts_ctx), .cra_alignmask = 0, diff --git a/arch/x86/crypto/sha-mb/sha1_mb.c b/arch/x86/crypto/sha-mb/sha1_mb.c index fd9f6b035b16..e510b1c5d690 100644 --- a/arch/x86/crypto/sha-mb/sha1_mb.c +++ b/arch/x86/crypto/sha-mb/sha1_mb.c @@ -694,7 +694,8 @@ static struct shash_alg sha1_mb_shash_alg = { * use ASYNC flag as some buffers in multi-buffer * algo may not have completed before hashing thread sleep */ - .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_ASYNC, + .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_ASYNC | + CRYPTO_ALG_INTERNAL, .cra_blocksize = SHA1_BLOCK_SIZE, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(sha1_mb_shash_alg.base.cra_list), @@ -770,7 +771,9 @@ static int sha1_mb_async_init_tfm(struct crypto_tfm *tfm) struct sha1_mb_ctx *ctx = crypto_tfm_ctx(tfm); struct mcryptd_hash_ctx *mctx; - mcryptd_tfm = mcryptd_alloc_ahash("__intel_sha1-mb", 0, 0); + mcryptd_tfm = mcryptd_alloc_ahash("__intel_sha1-mb", + CRYPTO_ALG_INTERNAL, + CRYPTO_ALG_INTERNAL); if (IS_ERR(mcryptd_tfm)) return PTR_ERR(mcryptd_tfm); mctx = crypto_ahash_ctx(&mcryptd_tfm->base); @@ -828,7 +831,7 @@ static unsigned long sha1_mb_flusher(struct mcryptd_alg_cstate *cstate) while (!list_empty(&cstate->work_list)) { rctx = list_entry(cstate->work_list.next, struct mcryptd_hash_request_ctx, waiter); - if time_before(cur_time, rctx->tag.expire) + if (time_before(cur_time, rctx->tag.expire)) break; kernel_fpu_begin(); sha_ctx = (struct sha1_hash_ctx *) sha1_ctx_mgr_flush(cstate->mgr); diff --git a/arch/x86/crypto/sha-mb/sha1_mb_mgr_init_avx2.c b/arch/x86/crypto/sha-mb/sha1_mb_mgr_init_avx2.c index 4ca7e166a2aa..822acb5b464c 100644 --- a/arch/x86/crypto/sha-mb/sha1_mb_mgr_init_avx2.c +++ b/arch/x86/crypto/sha-mb/sha1_mb_mgr_init_avx2.c @@ -56,7 +56,7 @@ void sha1_mb_mgr_init_avx2(struct sha1_mb_mgr *state) { unsigned int j; - state->unused_lanes = 0xF76543210; + state->unused_lanes = 0xF76543210ULL; for (j = 0; j < 8; j++) { state->lens[j] = 0xFFFFFFFF; state->ldata[j].job_in_lane = NULL; diff --git a/arch/x86/crypto/sha1_ssse3_glue.c b/arch/x86/crypto/sha1_ssse3_glue.c index 6c20fe04a738..33d1b9dc14cc 100644 --- a/arch/x86/crypto/sha1_ssse3_glue.c +++ b/arch/x86/crypto/sha1_ssse3_glue.c @@ -28,7 +28,7 @@ #include <linux/cryptohash.h> #include <linux/types.h> #include <crypto/sha.h> -#include <asm/byteorder.h> +#include <crypto/sha1_base.h> #include <asm/i387.h> #include <asm/xcr.h> #include <asm/xsave.h> @@ -44,132 +44,51 @@ asmlinkage void sha1_transform_avx(u32 *digest, const char *data, #define SHA1_AVX2_BLOCK_OPTSIZE 4 /* optimal 4*64 bytes of SHA1 blocks */ asmlinkage void sha1_transform_avx2(u32 *digest, const char *data, - unsigned int rounds); + unsigned int rounds); #endif -static asmlinkage void (*sha1_transform_asm)(u32 *, const char *, unsigned int); - - -static int sha1_ssse3_init(struct shash_desc *desc) -{ - struct sha1_state *sctx = shash_desc_ctx(desc); - - *sctx = (struct sha1_state){ - .state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 }, - }; - - return 0; -} - -static int __sha1_ssse3_update(struct shash_desc *desc, const u8 *data, - unsigned int len, unsigned int partial) -{ - struct sha1_state *sctx = shash_desc_ctx(desc); - unsigned int done = 0; - - sctx->count += len; - - if (partial) { - done = SHA1_BLOCK_SIZE - partial; - memcpy(sctx->buffer + partial, data, done); - sha1_transform_asm(sctx->state, sctx->buffer, 1); - } - - if (len - done >= SHA1_BLOCK_SIZE) { - const unsigned int rounds = (len - done) / SHA1_BLOCK_SIZE; - - sha1_transform_asm(sctx->state, data + done, rounds); - done += rounds * SHA1_BLOCK_SIZE; - } - - memcpy(sctx->buffer, data + done, len - done); - - return 0; -} +static void (*sha1_transform_asm)(u32 *, const char *, unsigned int); static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data, unsigned int len) { struct sha1_state *sctx = shash_desc_ctx(desc); - unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; - int res; - /* Handle the fast case right here */ - if (partial + len < SHA1_BLOCK_SIZE) { - sctx->count += len; - memcpy(sctx->buffer + partial, data, len); + if (!irq_fpu_usable() || + (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE) + return crypto_sha1_update(desc, data, len); - return 0; - } + /* make sure casting to sha1_block_fn() is safe */ + BUILD_BUG_ON(offsetof(struct sha1_state, state) != 0); - if (!irq_fpu_usable()) { - res = crypto_sha1_update(desc, data, len); - } else { - kernel_fpu_begin(); - res = __sha1_ssse3_update(desc, data, len, partial); - kernel_fpu_end(); - } - - return res; -} - - -/* Add padding and return the message digest. */ -static int sha1_ssse3_final(struct shash_desc *desc, u8 *out) -{ - struct sha1_state *sctx = shash_desc_ctx(desc); - unsigned int i, index, padlen; - __be32 *dst = (__be32 *)out; - __be64 bits; - static const u8 padding[SHA1_BLOCK_SIZE] = { 0x80, }; - - bits = cpu_to_be64(sctx->count << 3); - - /* Pad out to 56 mod 64 and append length */ - index = sctx->count % SHA1_BLOCK_SIZE; - padlen = (index < 56) ? (56 - index) : ((SHA1_BLOCK_SIZE+56) - index); - if (!irq_fpu_usable()) { - crypto_sha1_update(desc, padding, padlen); - crypto_sha1_update(desc, (const u8 *)&bits, sizeof(bits)); - } else { - kernel_fpu_begin(); - /* We need to fill a whole block for __sha1_ssse3_update() */ - if (padlen <= 56) { - sctx->count += padlen; - memcpy(sctx->buffer + index, padding, padlen); - } else { - __sha1_ssse3_update(desc, padding, padlen, index); - } - __sha1_ssse3_update(desc, (const u8 *)&bits, sizeof(bits), 56); - kernel_fpu_end(); - } - - /* Store state in digest */ - for (i = 0; i < 5; i++) - dst[i] = cpu_to_be32(sctx->state[i]); - - /* Wipe context */ - memset(sctx, 0, sizeof(*sctx)); + kernel_fpu_begin(); + sha1_base_do_update(desc, data, len, + (sha1_block_fn *)sha1_transform_asm); + kernel_fpu_end(); return 0; } -static int sha1_ssse3_export(struct shash_desc *desc, void *out) +static int sha1_ssse3_finup(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) { - struct sha1_state *sctx = shash_desc_ctx(desc); + if (!irq_fpu_usable()) + return crypto_sha1_finup(desc, data, len, out); - memcpy(out, sctx, sizeof(*sctx)); + kernel_fpu_begin(); + if (len) + sha1_base_do_update(desc, data, len, + (sha1_block_fn *)sha1_transform_asm); + sha1_base_do_finalize(desc, (sha1_block_fn *)sha1_transform_asm); + kernel_fpu_end(); - return 0; + return sha1_base_finish(desc, out); } -static int sha1_ssse3_import(struct shash_desc *desc, const void *in) +/* Add padding and return the message digest. */ +static int sha1_ssse3_final(struct shash_desc *desc, u8 *out) { - struct sha1_state *sctx = shash_desc_ctx(desc); - - memcpy(sctx, in, sizeof(*sctx)); - - return 0; + return sha1_ssse3_finup(desc, NULL, 0, out); } #ifdef CONFIG_AS_AVX2 @@ -186,13 +105,11 @@ static void sha1_apply_transform_avx2(u32 *digest, const char *data, static struct shash_alg alg = { .digestsize = SHA1_DIGEST_SIZE, - .init = sha1_ssse3_init, + .init = sha1_base_init, .update = sha1_ssse3_update, .final = sha1_ssse3_final, - .export = sha1_ssse3_export, - .import = sha1_ssse3_import, + .finup = sha1_ssse3_finup, .descsize = sizeof(struct sha1_state), - .statesize = sizeof(struct sha1_state), .base = { .cra_name = "sha1", .cra_driver_name= "sha1-ssse3", diff --git a/arch/x86/crypto/sha256-avx-asm.S b/arch/x86/crypto/sha256-avx-asm.S index 642f15687a0a..92b3b5d75ba9 100644 --- a/arch/x86/crypto/sha256-avx-asm.S +++ b/arch/x86/crypto/sha256-avx-asm.S @@ -96,10 +96,10 @@ SHUF_DC00 = %xmm12 # shuffle xDxC -> DC00 BYTE_FLIP_MASK = %xmm13 NUM_BLKS = %rdx # 3rd arg -CTX = %rsi # 2nd arg -INP = %rdi # 1st arg +INP = %rsi # 2nd arg +CTX = %rdi # 1st arg -SRND = %rdi # clobbers INP +SRND = %rsi # clobbers INP c = %ecx d = %r8d e = %edx @@ -342,8 +342,8 @@ a = TMP_ ######################################################################## ## void sha256_transform_avx(void *input_data, UINT32 digest[8], UINT64 num_blks) -## arg 1 : pointer to input data -## arg 2 : pointer to digest +## arg 1 : pointer to digest +## arg 2 : pointer to input data ## arg 3 : Num blocks ######################################################################## .text diff --git a/arch/x86/crypto/sha256-avx2-asm.S b/arch/x86/crypto/sha256-avx2-asm.S index 9e86944c539d..570ec5ec62d7 100644 --- a/arch/x86/crypto/sha256-avx2-asm.S +++ b/arch/x86/crypto/sha256-avx2-asm.S @@ -91,12 +91,12 @@ BYTE_FLIP_MASK = %ymm13 X_BYTE_FLIP_MASK = %xmm13 # XMM version of BYTE_FLIP_MASK NUM_BLKS = %rdx # 3rd arg -CTX = %rsi # 2nd arg -INP = %rdi # 1st arg +INP = %rsi # 2nd arg +CTX = %rdi # 1st arg c = %ecx d = %r8d e = %edx # clobbers NUM_BLKS -y3 = %edi # clobbers INP +y3 = %esi # clobbers INP TBL = %rbp @@ -523,8 +523,8 @@ STACK_SIZE = _RSP + _RSP_SIZE ######################################################################## ## void sha256_transform_rorx(void *input_data, UINT32 digest[8], UINT64 num_blks) -## arg 1 : pointer to input data -## arg 2 : pointer to digest +## arg 1 : pointer to digest +## arg 2 : pointer to input data ## arg 3 : Num blocks ######################################################################## .text diff --git a/arch/x86/crypto/sha256-ssse3-asm.S b/arch/x86/crypto/sha256-ssse3-asm.S index f833b74d902b..2cedc44e8121 100644 --- a/arch/x86/crypto/sha256-ssse3-asm.S +++ b/arch/x86/crypto/sha256-ssse3-asm.S @@ -88,10 +88,10 @@ SHUF_DC00 = %xmm11 # shuffle xDxC -> DC00 BYTE_FLIP_MASK = %xmm12 NUM_BLKS = %rdx # 3rd arg -CTX = %rsi # 2nd arg -INP = %rdi # 1st arg +INP = %rsi # 2nd arg +CTX = %rdi # 1st arg -SRND = %rdi # clobbers INP +SRND = %rsi # clobbers INP c = %ecx d = %r8d e = %edx @@ -348,8 +348,8 @@ a = TMP_ ######################################################################## ## void sha256_transform_ssse3(void *input_data, UINT32 digest[8], UINT64 num_blks) -## arg 1 : pointer to input data -## arg 2 : pointer to digest +## arg 1 : pointer to digest +## arg 2 : pointer to input data ## arg 3 : Num blocks ######################################################################## .text diff --git a/arch/x86/crypto/sha256_ssse3_glue.c b/arch/x86/crypto/sha256_ssse3_glue.c index 8fad72f4dfd2..ccc338881ee8 100644 --- a/arch/x86/crypto/sha256_ssse3_glue.c +++ b/arch/x86/crypto/sha256_ssse3_glue.c @@ -36,195 +36,74 @@ #include <linux/cryptohash.h> #include <linux/types.h> #include <crypto/sha.h> -#include <asm/byteorder.h> +#include <crypto/sha256_base.h> #include <asm/i387.h> #include <asm/xcr.h> #include <asm/xsave.h> #include <linux/string.h> -asmlinkage void sha256_transform_ssse3(const char *data, u32 *digest, - u64 rounds); +asmlinkage void sha256_transform_ssse3(u32 *digest, const char *data, + u64 rounds); #ifdef CONFIG_AS_AVX -asmlinkage void sha256_transform_avx(const char *data, u32 *digest, +asmlinkage void sha256_transform_avx(u32 *digest, const char *data, u64 rounds); #endif #ifdef CONFIG_AS_AVX2 -asmlinkage void sha256_transform_rorx(const char *data, u32 *digest, - u64 rounds); +asmlinkage void sha256_transform_rorx(u32 *digest, const char *data, + u64 rounds); #endif -static asmlinkage void (*sha256_transform_asm)(const char *, u32 *, u64); - - -static int sha256_ssse3_init(struct shash_desc *desc) -{ - struct sha256_state *sctx = shash_desc_ctx(desc); - - sctx->state[0] = SHA256_H0; - sctx->state[1] = SHA256_H1; - sctx->state[2] = SHA256_H2; - sctx->state[3] = SHA256_H3; - sctx->state[4] = SHA256_H4; - sctx->state[5] = SHA256_H5; - sctx->state[6] = SHA256_H6; - sctx->state[7] = SHA256_H7; - sctx->count = 0; - - return 0; -} - -static int __sha256_ssse3_update(struct shash_desc *desc, const u8 *data, - unsigned int len, unsigned int partial) -{ - struct sha256_state *sctx = shash_desc_ctx(desc); - unsigned int done = 0; - - sctx->count += len; - - if (partial) { - done = SHA256_BLOCK_SIZE - partial; - memcpy(sctx->buf + partial, data, done); - sha256_transform_asm(sctx->buf, sctx->state, 1); - } - - if (len - done >= SHA256_BLOCK_SIZE) { - const unsigned int rounds = (len - done) / SHA256_BLOCK_SIZE; - - sha256_transform_asm(data + done, sctx->state, (u64) rounds); - - done += rounds * SHA256_BLOCK_SIZE; - } - - memcpy(sctx->buf, data + done, len - done); - - return 0; -} +static void (*sha256_transform_asm)(u32 *, const char *, u64); static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data, unsigned int len) { struct sha256_state *sctx = shash_desc_ctx(desc); - unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; - int res; - /* Handle the fast case right here */ - if (partial + len < SHA256_BLOCK_SIZE) { - sctx->count += len; - memcpy(sctx->buf + partial, data, len); + if (!irq_fpu_usable() || + (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) + return crypto_sha256_update(desc, data, len); - return 0; - } - - if (!irq_fpu_usable()) { - res = crypto_sha256_update(desc, data, len); - } else { - kernel_fpu_begin(); - res = __sha256_ssse3_update(desc, data, len, partial); - kernel_fpu_end(); - } - - return res; -} + /* make sure casting to sha256_block_fn() is safe */ + BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0); - -/* Add padding and return the message digest. */ -static int sha256_ssse3_final(struct shash_desc *desc, u8 *out) -{ - struct sha256_state *sctx = shash_desc_ctx(desc); - unsigned int i, index, padlen; - __be32 *dst = (__be32 *)out; - __be64 bits; - static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, }; - - bits = cpu_to_be64(sctx->count << 3); - - /* Pad out to 56 mod 64 and append length */ - index = sctx->count % SHA256_BLOCK_SIZE; - padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56)-index); - - if (!irq_fpu_usable()) { - crypto_sha256_update(desc, padding, padlen); - crypto_sha256_update(desc, (const u8 *)&bits, sizeof(bits)); - } else { - kernel_fpu_begin(); - /* We need to fill a whole block for __sha256_ssse3_update() */ - if (padlen <= 56) { - sctx->count += padlen; - memcpy(sctx->buf + index, padding, padlen); - } else { - __sha256_ssse3_update(desc, padding, padlen, index); - } - __sha256_ssse3_update(desc, (const u8 *)&bits, - sizeof(bits), 56); - kernel_fpu_end(); - } - - /* Store state in digest */ - for (i = 0; i < 8; i++) - dst[i] = cpu_to_be32(sctx->state[i]); - - /* Wipe context */ - memset(sctx, 0, sizeof(*sctx)); + kernel_fpu_begin(); + sha256_base_do_update(desc, data, len, + (sha256_block_fn *)sha256_transform_asm); + kernel_fpu_end(); return 0; } -static int sha256_ssse3_export(struct shash_desc *desc, void *out) +static int sha256_ssse3_finup(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) { - struct sha256_state *sctx = shash_desc_ctx(desc); + if (!irq_fpu_usable()) + return crypto_sha256_finup(desc, data, len, out); - memcpy(out, sctx, sizeof(*sctx)); + kernel_fpu_begin(); + if (len) + sha256_base_do_update(desc, data, len, + (sha256_block_fn *)sha256_transform_asm); + sha256_base_do_finalize(desc, (sha256_block_fn *)sha256_transform_asm); + kernel_fpu_end(); - return 0; + return sha256_base_finish(desc, out); } -static int sha256_ssse3_import(struct shash_desc *desc, const void *in) -{ - struct sha256_state *sctx = shash_desc_ctx(desc); - - memcpy(sctx, in, sizeof(*sctx)); - - return 0; -} - -static int sha224_ssse3_init(struct shash_desc *desc) -{ - struct sha256_state *sctx = shash_desc_ctx(desc); - - sctx->state[0] = SHA224_H0; - sctx->state[1] = SHA224_H1; - sctx->state[2] = SHA224_H2; - sctx->state[3] = SHA224_H3; - sctx->state[4] = SHA224_H4; - sctx->state[5] = SHA224_H5; - sctx->state[6] = SHA224_H6; - sctx->state[7] = SHA224_H7; - sctx->count = 0; - - return 0; -} - -static int sha224_ssse3_final(struct shash_desc *desc, u8 *hash) +/* Add padding and return the message digest. */ +static int sha256_ssse3_final(struct shash_desc *desc, u8 *out) { - u8 D[SHA256_DIGEST_SIZE]; - - sha256_ssse3_final(desc, D); - - memcpy(hash, D, SHA224_DIGEST_SIZE); - memzero_explicit(D, SHA256_DIGEST_SIZE); - - return 0; + return sha256_ssse3_finup(desc, NULL, 0, out); } static struct shash_alg algs[] = { { .digestsize = SHA256_DIGEST_SIZE, - .init = sha256_ssse3_init, + .init = sha256_base_init, .update = sha256_ssse3_update, .final = sha256_ssse3_final, - .export = sha256_ssse3_export, - .import = sha256_ssse3_import, + .finup = sha256_ssse3_finup, .descsize = sizeof(struct sha256_state), - .statesize = sizeof(struct sha256_state), .base = { .cra_name = "sha256", .cra_driver_name = "sha256-ssse3", @@ -235,13 +114,11 @@ static struct shash_alg algs[] = { { } }, { .digestsize = SHA224_DIGEST_SIZE, - .init = sha224_ssse3_init, + .init = sha224_base_init, .update = sha256_ssse3_update, - .final = sha224_ssse3_final, - .export = sha256_ssse3_export, - .import = sha256_ssse3_import, + .final = sha256_ssse3_final, + .finup = sha256_ssse3_finup, .descsize = sizeof(struct sha256_state), - .statesize = sizeof(struct sha256_state), .base = { .cra_name = "sha224", .cra_driver_name = "sha224-ssse3", diff --git a/arch/x86/crypto/sha512-avx-asm.S b/arch/x86/crypto/sha512-avx-asm.S index 974dde9bc6cd..565274d6a641 100644 --- a/arch/x86/crypto/sha512-avx-asm.S +++ b/arch/x86/crypto/sha512-avx-asm.S @@ -54,9 +54,9 @@ # Virtual Registers # ARG1 -msg = %rdi +digest = %rdi # ARG2 -digest = %rsi +msg = %rsi # ARG3 msglen = %rdx T1 = %rcx @@ -271,7 +271,7 @@ frame_size = frame_GPRSAVE + GPRSAVE_SIZE .endm ######################################################################## -# void sha512_transform_avx(const void* M, void* D, u64 L) +# void sha512_transform_avx(void* D, const void* M, u64 L) # Purpose: Updates the SHA512 digest stored at D with the message stored in M. # The size of the message pointed to by M must be an integer multiple of SHA512 # message blocks. diff --git a/arch/x86/crypto/sha512-avx2-asm.S b/arch/x86/crypto/sha512-avx2-asm.S index 568b96105f5c..a4771dcd1fcf 100644 --- a/arch/x86/crypto/sha512-avx2-asm.S +++ b/arch/x86/crypto/sha512-avx2-asm.S @@ -70,9 +70,9 @@ XFER = YTMP0 BYTE_FLIP_MASK = %ymm9 # 1st arg -INP = %rdi +CTX = %rdi # 2nd arg -CTX = %rsi +INP = %rsi # 3rd arg NUM_BLKS = %rdx @@ -562,7 +562,7 @@ frame_size = frame_GPRSAVE + GPRSAVE_SIZE .endm ######################################################################## -# void sha512_transform_rorx(const void* M, void* D, uint64_t L)# +# void sha512_transform_rorx(void* D, const void* M, uint64_t L)# # Purpose: Updates the SHA512 digest stored at D with the message stored in M. # The size of the message pointed to by M must be an integer multiple of SHA512 # message blocks. diff --git a/arch/x86/crypto/sha512-ssse3-asm.S b/arch/x86/crypto/sha512-ssse3-asm.S index fb56855d51f5..e610e29cbc81 100644 --- a/arch/x86/crypto/sha512-ssse3-asm.S +++ b/arch/x86/crypto/sha512-ssse3-asm.S @@ -53,9 +53,9 @@ # Virtual Registers # ARG1 -msg = %rdi +digest = %rdi # ARG2 -digest = %rsi +msg = %rsi # ARG3 msglen = %rdx T1 = %rcx @@ -269,7 +269,7 @@ frame_size = frame_GPRSAVE + GPRSAVE_SIZE .endm ######################################################################## -# void sha512_transform_ssse3(const void* M, void* D, u64 L)# +# void sha512_transform_ssse3(void* D, const void* M, u64 L)# # Purpose: Updates the SHA512 digest stored at D with the message stored in M. # The size of the message pointed to by M must be an integer multiple of SHA512 # message blocks. diff --git a/arch/x86/crypto/sha512_ssse3_glue.c b/arch/x86/crypto/sha512_ssse3_glue.c index 0b6af26832bf..d9fa4c1e063f 100644 --- a/arch/x86/crypto/sha512_ssse3_glue.c +++ b/arch/x86/crypto/sha512_ssse3_glue.c @@ -34,205 +34,75 @@ #include <linux/cryptohash.h> #include <linux/types.h> #include <crypto/sha.h> -#include <asm/byteorder.h> +#include <crypto/sha512_base.h> #include <asm/i387.h> #include <asm/xcr.h> #include <asm/xsave.h> #include <linux/string.h> -asmlinkage void sha512_transform_ssse3(const char *data, u64 *digest, - u64 rounds); +asmlinkage void sha512_transform_ssse3(u64 *digest, const char *data, + u64 rounds); #ifdef CONFIG_AS_AVX -asmlinkage void sha512_transform_avx(const char *data, u64 *digest, +asmlinkage void sha512_transform_avx(u64 *digest, const char *data, u64 rounds); #endif #ifdef CONFIG_AS_AVX2 -asmlinkage void sha512_transform_rorx(const char *data, u64 *digest, - u64 rounds); +asmlinkage void sha512_transform_rorx(u64 *digest, const char *data, + u64 rounds); #endif -static asmlinkage void (*sha512_transform_asm)(const char *, u64 *, u64); - - -static int sha512_ssse3_init(struct shash_desc *desc) -{ - struct sha512_state *sctx = shash_desc_ctx(desc); - - sctx->state[0] = SHA512_H0; - sctx->state[1] = SHA512_H1; - sctx->state[2] = SHA512_H2; - sctx->state[3] = SHA512_H3; - sctx->state[4] = SHA512_H4; - sctx->state[5] = SHA512_H5; - sctx->state[6] = SHA512_H6; - sctx->state[7] = SHA512_H7; - sctx->count[0] = sctx->count[1] = 0; - - return 0; -} +static void (*sha512_transform_asm)(u64 *, const char *, u64); -static int __sha512_ssse3_update(struct shash_desc *desc, const u8 *data, - unsigned int len, unsigned int partial) +static int sha512_ssse3_update(struct shash_desc *desc, const u8 *data, + unsigned int len) { struct sha512_state *sctx = shash_desc_ctx(desc); - unsigned int done = 0; - - sctx->count[0] += len; - if (sctx->count[0] < len) - sctx->count[1]++; - if (partial) { - done = SHA512_BLOCK_SIZE - partial; - memcpy(sctx->buf + partial, data, done); - sha512_transform_asm(sctx->buf, sctx->state, 1); - } - - if (len - done >= SHA512_BLOCK_SIZE) { - const unsigned int rounds = (len - done) / SHA512_BLOCK_SIZE; + if (!irq_fpu_usable() || + (sctx->count[0] % SHA512_BLOCK_SIZE) + len < SHA512_BLOCK_SIZE) + return crypto_sha512_update(desc, data, len); - sha512_transform_asm(data + done, sctx->state, (u64) rounds); - - done += rounds * SHA512_BLOCK_SIZE; - } + /* make sure casting to sha512_block_fn() is safe */ + BUILD_BUG_ON(offsetof(struct sha512_state, state) != 0); - memcpy(sctx->buf, data + done, len - done); + kernel_fpu_begin(); + sha512_base_do_update(desc, data, len, + (sha512_block_fn *)sha512_transform_asm); + kernel_fpu_end(); return 0; } -static int sha512_ssse3_update(struct shash_desc *desc, const u8 *data, - unsigned int len) +static int sha512_ssse3_finup(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) { - struct sha512_state *sctx = shash_desc_ctx(desc); - unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; - int res; - - /* Handle the fast case right here */ - if (partial + len < SHA512_BLOCK_SIZE) { - sctx->count[0] += len; - if (sctx->count[0] < len) - sctx->count[1]++; - memcpy(sctx->buf + partial, data, len); - - return 0; - } + if (!irq_fpu_usable()) + return crypto_sha512_finup(desc, data, len, out); - if (!irq_fpu_usable()) { - res = crypto_sha512_update(desc, data, len); - } else { - kernel_fpu_begin(); - res = __sha512_ssse3_update(desc, data, len, partial); - kernel_fpu_end(); - } + kernel_fpu_begin(); + if (len) + sha512_base_do_update(desc, data, len, + (sha512_block_fn *)sha512_transform_asm); + sha512_base_do_finalize(desc, (sha512_block_fn *)sha512_transform_asm); + kernel_fpu_end(); - return res; + return sha512_base_finish(desc, out); } - /* Add padding and return the message digest. */ static int sha512_ssse3_final(struct shash_desc *desc, u8 *out) { - struct sha512_state *sctx = shash_desc_ctx(desc); - unsigned int i, index, padlen; - __be64 *dst = (__be64 *)out; - __be64 bits[2]; - static const u8 padding[SHA512_BLOCK_SIZE] = { 0x80, }; - - /* save number of bits */ - bits[1] = cpu_to_be64(sctx->count[0] << 3); - bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61); - - /* Pad out to 112 mod 128 and append length */ - index = sctx->count[0] & 0x7f; - padlen = (index < 112) ? (112 - index) : ((128+112) - index); - - if (!irq_fpu_usable()) { - crypto_sha512_update(desc, padding, padlen); - crypto_sha512_update(desc, (const u8 *)&bits, sizeof(bits)); - } else { - kernel_fpu_begin(); - /* We need to fill a whole block for __sha512_ssse3_update() */ - if (padlen <= 112) { - sctx->count[0] += padlen; - if (sctx->count[0] < padlen) - sctx->count[1]++; - memcpy(sctx->buf + index, padding, padlen); - } else { - __sha512_ssse3_update(desc, padding, padlen, index); - } - __sha512_ssse3_update(desc, (const u8 *)&bits, - sizeof(bits), 112); - kernel_fpu_end(); - } - - /* Store state in digest */ - for (i = 0; i < 8; i++) - dst[i] = cpu_to_be64(sctx->state[i]); - - /* Wipe context */ - memset(sctx, 0, sizeof(*sctx)); - - return 0; -} - -static int sha512_ssse3_export(struct shash_desc *desc, void *out) -{ - struct sha512_state *sctx = shash_desc_ctx(desc); - - memcpy(out, sctx, sizeof(*sctx)); - - return 0; -} - -static int sha512_ssse3_import(struct shash_desc *desc, const void *in) -{ - struct sha512_state *sctx = shash_desc_ctx(desc); - - memcpy(sctx, in, sizeof(*sctx)); - - return 0; -} - -static int sha384_ssse3_init(struct shash_desc *desc) -{ - struct sha512_state *sctx = shash_desc_ctx(desc); - - sctx->state[0] = SHA384_H0; - sctx->state[1] = SHA384_H1; - sctx->state[2] = SHA384_H2; - sctx->state[3] = SHA384_H3; - sctx->state[4] = SHA384_H4; - sctx->state[5] = SHA384_H5; - sctx->state[6] = SHA384_H6; - sctx->state[7] = SHA384_H7; - - sctx->count[0] = sctx->count[1] = 0; - - return 0; -} - -static int sha384_ssse3_final(struct shash_desc *desc, u8 *hash) -{ - u8 D[SHA512_DIGEST_SIZE]; - - sha512_ssse3_final(desc, D); - - memcpy(hash, D, SHA384_DIGEST_SIZE); - memzero_explicit(D, SHA512_DIGEST_SIZE); - - return 0; + return sha512_ssse3_finup(desc, NULL, 0, out); } static struct shash_alg algs[] = { { .digestsize = SHA512_DIGEST_SIZE, - .init = sha512_ssse3_init, + .init = sha512_base_init, .update = sha512_ssse3_update, .final = sha512_ssse3_final, - .export = sha512_ssse3_export, - .import = sha512_ssse3_import, + .finup = sha512_ssse3_finup, .descsize = sizeof(struct sha512_state), - .statesize = sizeof(struct sha512_state), .base = { .cra_name = "sha512", .cra_driver_name = "sha512-ssse3", @@ -243,13 +113,11 @@ static struct shash_alg algs[] = { { } }, { .digestsize = SHA384_DIGEST_SIZE, - .init = sha384_ssse3_init, + .init = sha384_base_init, .update = sha512_ssse3_update, - .final = sha384_ssse3_final, - .export = sha512_ssse3_export, - .import = sha512_ssse3_import, + .final = sha512_ssse3_final, + .finup = sha512_ssse3_finup, .descsize = sizeof(struct sha512_state), - .statesize = sizeof(struct sha512_state), .base = { .cra_name = "sha384", .cra_driver_name = "sha384-ssse3", diff --git a/arch/x86/crypto/twofish_avx_glue.c b/arch/x86/crypto/twofish_avx_glue.c index 1ac531ea9bcc..b5e2d5651851 100644 --- a/arch/x86/crypto/twofish_avx_glue.c +++ b/arch/x86/crypto/twofish_avx_glue.c @@ -340,7 +340,8 @@ static struct crypto_alg twofish_algs[10] = { { .cra_name = "__ecb-twofish-avx", .cra_driver_name = "__driver-ecb-twofish-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = TF_BLOCK_SIZE, .cra_ctxsize = sizeof(struct twofish_ctx), .cra_alignmask = 0, @@ -359,7 +360,8 @@ static struct crypto_alg twofish_algs[10] = { { .cra_name = "__cbc-twofish-avx", .cra_driver_name = "__driver-cbc-twofish-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = TF_BLOCK_SIZE, .cra_ctxsize = sizeof(struct twofish_ctx), .cra_alignmask = 0, @@ -378,7 +380,8 @@ static struct crypto_alg twofish_algs[10] = { { .cra_name = "__ctr-twofish-avx", .cra_driver_name = "__driver-ctr-twofish-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = 1, .cra_ctxsize = sizeof(struct twofish_ctx), .cra_alignmask = 0, @@ -398,7 +401,8 @@ static struct crypto_alg twofish_algs[10] = { { .cra_name = "__lrw-twofish-avx", .cra_driver_name = "__driver-lrw-twofish-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = TF_BLOCK_SIZE, .cra_ctxsize = sizeof(struct twofish_lrw_ctx), .cra_alignmask = 0, @@ -421,7 +425,8 @@ static struct crypto_alg twofish_algs[10] = { { .cra_name = "__xts-twofish-avx", .cra_driver_name = "__driver-xts-twofish-avx", .cra_priority = 0, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | + CRYPTO_ALG_INTERNAL, .cra_blocksize = TF_BLOCK_SIZE, .cra_ctxsize = sizeof(struct twofish_xts_ctx), .cra_alignmask = 0, |