summaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'crypto')
-rw-r--r--crypto/Kconfig349
-rw-r--r--crypto/Makefile36
-rw-r--r--crypto/ablkcipher.c407
-rw-r--r--crypto/acompress.c4
-rw-r--r--crypto/adiantum.c111
-rw-r--r--crypto/aead.c183
-rw-r--r--crypto/aegis.h39
-rw-r--r--crypto/aegis128-core.c (renamed from crypto/aegis128.c)163
-rw-r--r--crypto/aegis128-neon-inner.c262
-rw-r--r--crypto/aegis128-neon.c70
-rw-r--r--crypto/aegis128l.c522
-rw-r--r--crypto/aegis256.c473
-rw-r--r--crypto/aes_generic.c183
-rw-r--r--crypto/aes_ti.c313
-rw-r--r--crypto/af_alg.c10
-rw-r--r--crypto/ahash.c54
-rw-r--r--crypto/akcipher.c9
-rw-r--r--crypto/algapi.c274
-rw-r--r--crypto/algboss.c12
-rw-r--r--crypto/algif_skcipher.c2
-rw-r--r--crypto/anubis.c2
-rw-r--r--crypto/api.c27
-rw-r--r--crypto/asymmetric_keys/asym_tpm.c102
-rw-r--r--crypto/asymmetric_keys/pkcs7_verify.c33
-rw-r--r--crypto/asymmetric_keys/public_key.c1
-rw-r--r--crypto/asymmetric_keys/verify_pefile.c4
-rw-r--r--crypto/authenc.c70
-rw-r--r--crypto/authencesn.c70
-rw-r--r--crypto/blake2b_generic.c318
-rw-r--r--crypto/blake2s_generic.c169
-rw-r--r--crypto/blkcipher.c548
-rw-r--r--crypto/camellia_generic.c5
-rw-r--r--crypto/cast6_generic.c28
-rw-r--r--crypto/cbc.c15
-rw-r--r--crypto/ccm.c136
-rw-r--r--crypto/cfb.c5
-rw-r--r--crypto/chacha20poly1305.c96
-rw-r--r--crypto/chacha_generic.c94
-rw-r--r--crypto/cipher.c93
-rw-r--r--crypto/cmac.c40
-rw-r--r--crypto/compress.c31
-rw-r--r--crypto/crc32_generic.c4
-rw-r--r--crypto/crc32c_generic.c4
-rw-r--r--crypto/cryptd.c177
-rw-r--r--crypto/crypto_engine.c31
-rw-r--r--crypto/crypto_user_base.c44
-rw-r--r--crypto/crypto_user_stat.c12
-rw-r--r--crypto/ctr.c26
-rw-r--r--crypto/cts.c15
-rw-r--r--crypto/curve25519-generic.c90
-rw-r--r--crypto/des_generic.c943
-rw-r--r--crypto/ecb.c5
-rw-r--r--crypto/ecc.c5
-rw-r--r--crypto/echainiv.c20
-rw-r--r--crypto/essiv.c642
-rw-r--r--crypto/fips.c11
-rw-r--r--crypto/gcm.c143
-rw-r--r--crypto/geniv.c177
-rw-r--r--crypto/ghash-generic.c35
-rw-r--r--crypto/hmac.c62
-rw-r--r--crypto/internal.h4
-rw-r--r--crypto/jitterentropy-kcapi.c8
-rw-r--r--crypto/jitterentropy.c13
-rw-r--r--crypto/jitterentropy.h17
-rw-r--r--crypto/keywrap.c15
-rw-r--r--crypto/lrw.c17
-rw-r--r--crypto/michael_mic.c4
-rw-r--r--crypto/morus1280.c542
-rw-r--r--crypto/morus640.c533
-rw-r--r--crypto/nhpoly1305.c5
-rw-r--r--crypto/ofb.c5
-rw-r--r--crypto/pcbc.c5
-rw-r--r--crypto/pcrypt.c201
-rw-r--r--crypto/poly1305_generic.c213
-rw-r--r--crypto/rsa-pkcs1pad.c8
-rw-r--r--crypto/scompress.c4
-rw-r--r--crypto/seqiv.c20
-rw-r--r--crypto/serpent_generic.c6
-rw-r--r--crypto/sha256_generic.c224
-rw-r--r--crypto/shash.c95
-rw-r--r--crypto/simd.c12
-rw-r--r--crypto/skcipher.c369
-rw-r--r--crypto/sm4_generic.c16
-rw-r--r--crypto/streebog_generic.c46
-rw-r--r--crypto/tcrypt.c20
-rw-r--r--crypto/testmgr.c754
-rw-r--r--crypto/testmgr.h5406
-rw-r--r--crypto/tgr192.c4
-rw-r--r--crypto/twofish_common.c8
-rw-r--r--crypto/vmac.c44
-rw-r--r--crypto/xcbc.c45
-rw-r--r--crypto/xts.c169
-rw-r--r--crypto/xxhash_generic.c4
93 files changed, 6620 insertions, 10025 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig
index e801450bcb1c..c24a47406f8f 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -52,12 +52,12 @@ config CRYPTO_AEAD2
select CRYPTO_NULL2
select CRYPTO_RNG2
-config CRYPTO_BLKCIPHER
+config CRYPTO_SKCIPHER
tristate
- select CRYPTO_BLKCIPHER2
+ select CRYPTO_SKCIPHER2
select CRYPTO_ALGAPI
-config CRYPTO_BLKCIPHER2
+config CRYPTO_SKCIPHER2
tristate
select CRYPTO_ALGAPI2
select CRYPTO_RNG2
@@ -123,7 +123,7 @@ config CRYPTO_MANAGER2
def_tristate CRYPTO_MANAGER || (CRYPTO_MANAGER!=n && CRYPTO_ALGAPI=y)
select CRYPTO_AEAD2
select CRYPTO_HASH2
- select CRYPTO_BLKCIPHER2
+ select CRYPTO_SKCIPHER2
select CRYPTO_AKCIPHER2
select CRYPTO_KPP2
select CRYPTO_ACOMP2
@@ -136,8 +136,6 @@ config CRYPTO_USER
Userspace configuration for cryptographic instantiations such as
cbc(aes).
-if CRYPTO_MANAGER2
-
config CRYPTO_MANAGER_DISABLE_TESTS
bool "Disable run-time self tests"
default y
@@ -155,8 +153,6 @@ config CRYPTO_MANAGER_EXTRA_TESTS
This is intended for developer use only, as these tests take much
longer to run than the normal self tests.
-endif # if CRYPTO_MANAGER2
-
config CRYPTO_GF128MUL
tristate
@@ -169,7 +165,7 @@ config CRYPTO_NULL
config CRYPTO_NULL2
tristate
select CRYPTO_ALGAPI2
- select CRYPTO_BLKCIPHER2
+ select CRYPTO_SKCIPHER2
select CRYPTO_HASH2
config CRYPTO_PCRYPT
@@ -184,7 +180,7 @@ config CRYPTO_PCRYPT
config CRYPTO_CRYPTD
tristate "Software async crypto daemon"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_HASH
select CRYPTO_MANAGER
help
@@ -195,7 +191,7 @@ config CRYPTO_CRYPTD
config CRYPTO_AUTHENC
tristate "Authenc support"
select CRYPTO_AEAD
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_MANAGER
select CRYPTO_HASH
select CRYPTO_NULL
@@ -217,7 +213,7 @@ config CRYPTO_SIMD
config CRYPTO_GLUE_HELPER_X86
tristate
depends on X86
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
config CRYPTO_ENGINE
tristate
@@ -264,6 +260,17 @@ config CRYPTO_ECRDSA
standard algorithms (called GOST algorithms). Only signature verification
is implemented.
+config CRYPTO_CURVE25519
+ tristate "Curve25519 algorithm"
+ select CRYPTO_KPP
+ select CRYPTO_LIB_CURVE25519_GENERIC
+
+config CRYPTO_CURVE25519_X86
+ tristate "x86_64 accelerated Curve25519 scalar multiplication library"
+ depends on X86 && 64BIT
+ select CRYPTO_LIB_CURVE25519_GENERIC
+ select CRYPTO_ARCH_HAVE_LIB_CURVE25519
+
comment "Authenticated Encryption with Associated Data"
config CRYPTO_CCM
@@ -306,19 +313,11 @@ config CRYPTO_AEGIS128
help
Support for the AEGIS-128 dedicated AEAD algorithm.
-config CRYPTO_AEGIS128L
- tristate "AEGIS-128L AEAD algorithm"
- select CRYPTO_AEAD
- select CRYPTO_AES # for AES S-box tables
- help
- Support for the AEGIS-128L dedicated AEAD algorithm.
-
-config CRYPTO_AEGIS256
- tristate "AEGIS-256 AEAD algorithm"
- select CRYPTO_AEAD
- select CRYPTO_AES # for AES S-box tables
- help
- Support for the AEGIS-256 dedicated AEAD algorithm.
+config CRYPTO_AEGIS128_SIMD
+ bool "Support SIMD acceleration for AEGIS-128"
+ depends on CRYPTO_AEGIS128 && ((ARM || ARM64) && KERNEL_MODE_NEON)
+ depends on !ARM || CC_IS_CLANG || GCC_VERSION >= 40800
+ default y
config CRYPTO_AEGIS128_AESNI_SSE2
tristate "AEGIS-128 AEAD algorithm (x86_64 AESNI+SSE2 implementation)"
@@ -328,82 +327,10 @@ config CRYPTO_AEGIS128_AESNI_SSE2
help
AESNI+SSE2 implementation of the AEGIS-128 dedicated AEAD algorithm.
-config CRYPTO_AEGIS128L_AESNI_SSE2
- tristate "AEGIS-128L AEAD algorithm (x86_64 AESNI+SSE2 implementation)"
- depends on X86 && 64BIT
- select CRYPTO_AEAD
- select CRYPTO_SIMD
- help
- AESNI+SSE2 implementation of the AEGIS-128L dedicated AEAD algorithm.
-
-config CRYPTO_AEGIS256_AESNI_SSE2
- tristate "AEGIS-256 AEAD algorithm (x86_64 AESNI+SSE2 implementation)"
- depends on X86 && 64BIT
- select CRYPTO_AEAD
- select CRYPTO_SIMD
- help
- AESNI+SSE2 implementation of the AEGIS-256 dedicated AEAD algorithm.
-
-config CRYPTO_MORUS640
- tristate "MORUS-640 AEAD algorithm"
- select CRYPTO_AEAD
- help
- Support for the MORUS-640 dedicated AEAD algorithm.
-
-config CRYPTO_MORUS640_GLUE
- tristate
- depends on X86
- select CRYPTO_AEAD
- select CRYPTO_SIMD
- help
- Common glue for SIMD optimizations of the MORUS-640 dedicated AEAD
- algorithm.
-
-config CRYPTO_MORUS640_SSE2
- tristate "MORUS-640 AEAD algorithm (x86_64 SSE2 implementation)"
- depends on X86 && 64BIT
- select CRYPTO_AEAD
- select CRYPTO_MORUS640_GLUE
- help
- SSE2 implementation of the MORUS-640 dedicated AEAD algorithm.
-
-config CRYPTO_MORUS1280
- tristate "MORUS-1280 AEAD algorithm"
- select CRYPTO_AEAD
- help
- Support for the MORUS-1280 dedicated AEAD algorithm.
-
-config CRYPTO_MORUS1280_GLUE
- tristate
- depends on X86
- select CRYPTO_AEAD
- select CRYPTO_SIMD
- help
- Common glue for SIMD optimizations of the MORUS-1280 dedicated AEAD
- algorithm.
-
-config CRYPTO_MORUS1280_SSE2
- tristate "MORUS-1280 AEAD algorithm (x86_64 SSE2 implementation)"
- depends on X86 && 64BIT
- select CRYPTO_AEAD
- select CRYPTO_MORUS1280_GLUE
- help
- SSE2 optimizedimplementation of the MORUS-1280 dedicated AEAD
- algorithm.
-
-config CRYPTO_MORUS1280_AVX2
- tristate "MORUS-1280 AEAD algorithm (x86_64 AVX2 implementation)"
- depends on X86 && 64BIT
- select CRYPTO_AEAD
- select CRYPTO_MORUS1280_GLUE
- help
- AVX2 optimized implementation of the MORUS-1280 dedicated AEAD
- algorithm.
-
config CRYPTO_SEQIV
tristate "Sequence Number IV Generator"
select CRYPTO_AEAD
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_NULL
select CRYPTO_RNG_DEFAULT
select CRYPTO_MANAGER
@@ -426,7 +353,7 @@ comment "Block modes"
config CRYPTO_CBC
tristate "CBC support"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_MANAGER
help
CBC: Cipher Block Chaining mode
@@ -434,7 +361,7 @@ config CRYPTO_CBC
config CRYPTO_CFB
tristate "CFB support"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_MANAGER
help
CFB: Cipher FeedBack mode
@@ -442,7 +369,7 @@ config CRYPTO_CFB
config CRYPTO_CTR
tristate "CTR support"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_SEQIV
select CRYPTO_MANAGER
help
@@ -451,7 +378,7 @@ config CRYPTO_CTR
config CRYPTO_CTS
tristate "CTS support"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_MANAGER
help
CTS: Cipher Text Stealing
@@ -466,7 +393,7 @@ config CRYPTO_CTS
config CRYPTO_ECB
tristate "ECB support"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_MANAGER
help
ECB: Electronic CodeBook mode
@@ -475,7 +402,7 @@ config CRYPTO_ECB
config CRYPTO_LRW
tristate "LRW support"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_MANAGER
select CRYPTO_GF128MUL
help
@@ -487,7 +414,7 @@ config CRYPTO_LRW
config CRYPTO_OFB
tristate "OFB support"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_MANAGER
help
OFB: the Output Feedback mode makes a block cipher into a synchronous
@@ -499,7 +426,7 @@ config CRYPTO_OFB
config CRYPTO_PCBC
tristate "PCBC support"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_MANAGER
help
PCBC: Propagating Cipher Block Chaining mode
@@ -507,7 +434,7 @@ config CRYPTO_PCBC
config CRYPTO_XTS
tristate "XTS support"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_MANAGER
select CRYPTO_ECB
help
@@ -517,7 +444,7 @@ config CRYPTO_XTS
config CRYPTO_KEYWRAP
tristate "Key wrapping support"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_MANAGER
help
Support for key wrapping (NIST SP800-38F / RFC3394) without
@@ -526,7 +453,7 @@ config CRYPTO_KEYWRAP
config CRYPTO_NHPOLY1305
tristate
select CRYPTO_HASH
- select CRYPTO_POLY1305
+ select CRYPTO_LIB_POLY1305_GENERIC
config CRYPTO_NHPOLY1305_SSE2
tristate "NHPoly1305 hash function (x86_64 SSE2 implementation)"
@@ -547,7 +474,7 @@ config CRYPTO_NHPOLY1305_AVX2
config CRYPTO_ADIANTUM
tristate "Adiantum support"
select CRYPTO_CHACHA20
- select CRYPTO_POLY1305
+ select CRYPTO_LIB_POLY1305_GENERIC
select CRYPTO_NHPOLY1305
select CRYPTO_MANAGER
help
@@ -568,6 +495,34 @@ config CRYPTO_ADIANTUM
If unsure, say N.
+config CRYPTO_ESSIV
+ tristate "ESSIV support for block encryption"
+ select CRYPTO_AUTHENC
+ help
+ Encrypted salt-sector initialization vector (ESSIV) is an IV
+ generation method that is used in some cases by fscrypt and/or
+ dm-crypt. It uses the hash of the block encryption key as the
+ symmetric key for a block encryption pass applied to the input
+ IV, making low entropy IV sources more suitable for block
+ encryption.
+
+ This driver implements a crypto API template that can be
+ instantiated either as an skcipher or as an AEAD (depending on the
+ type of the first template argument), and which defers encryption
+ and decryption requests to the encapsulated cipher after applying
+ ESSIV to the input IV. Note that in the AEAD case, it is assumed
+ that the keys are presented in the same format used by the authenc
+ template, and that the IV appears at the end of the authenticated
+ associated data (AAD) region (which is how dm-crypt uses it.)
+
+ Note that the use of ESSIV is not recommended for new deployments,
+ and so this only needs to be enabled when interoperability with
+ existing encrypted volumes of filesystems is required, or when
+ building for a particular system that requires it (e.g., when
+ the SoC in question has accelerated CBC but not XTS, making CBC
+ combined with ESSIV the only feasible mode for h/w accelerated
+ block encryption)
+
comment "Hash modes"
config CRYPTO_CMAC
@@ -691,6 +646,47 @@ config CRYPTO_XXHASH
xxHash non-cryptographic hash algorithm. Extremely fast, working at
speeds close to RAM limits.
+config CRYPTO_BLAKE2B
+ tristate "BLAKE2b digest algorithm"
+ select CRYPTO_HASH
+ help
+ Implementation of cryptographic hash function BLAKE2b (or just BLAKE2),
+ optimized for 64bit platforms and can produce digests of any size
+ between 1 to 64. The keyed hash is also implemented.
+
+ This module provides the following algorithms:
+
+ - blake2b-160
+ - blake2b-256
+ - blake2b-384
+ - blake2b-512
+
+ See https://blake2.net for further information.
+
+config CRYPTO_BLAKE2S
+ tristate "BLAKE2s digest algorithm"
+ select CRYPTO_LIB_BLAKE2S_GENERIC
+ select CRYPTO_HASH
+ help
+ Implementation of cryptographic hash function BLAKE2s
+ optimized for 8-32bit platforms and can produce digests of any size
+ between 1 to 32. The keyed hash is also implemented.
+
+ This module provides the following algorithms:
+
+ - blake2s-128
+ - blake2s-160
+ - blake2s-224
+ - blake2s-256
+
+ See https://blake2.net for further information.
+
+config CRYPTO_BLAKE2S_X86
+ tristate "BLAKE2s digest algorithm (x86 accelerated version)"
+ depends on X86 && 64BIT
+ select CRYPTO_LIB_BLAKE2S_GENERIC
+ select CRYPTO_ARCH_HAVE_LIB_BLAKE2S
+
config CRYPTO_CRCT10DIF
tristate "CRCT10DIF algorithm"
select CRYPTO_HASH
@@ -728,15 +724,17 @@ config CRYPTO_VPMSUM_TESTER
Unless you are testing these algorithms, you don't need this.
config CRYPTO_GHASH
- tristate "GHASH digest algorithm"
+ tristate "GHASH hash function"
select CRYPTO_GF128MUL
select CRYPTO_HASH
help
- GHASH is message digest algorithm for GCM (Galois/Counter Mode).
+ GHASH is the hash function used in GCM (Galois/Counter Mode).
+ It is not a general-purpose cryptographic hash function.
config CRYPTO_POLY1305
tristate "Poly1305 authenticator algorithm"
select CRYPTO_HASH
+ select CRYPTO_LIB_POLY1305_GENERIC
help
Poly1305 authenticator algorithm, RFC7539.
@@ -747,7 +745,8 @@ config CRYPTO_POLY1305
config CRYPTO_POLY1305_X86_64
tristate "Poly1305 authenticator algorithm (x86_64/SSE2/AVX2)"
depends on X86 && 64BIT
- select CRYPTO_POLY1305
+ select CRYPTO_LIB_POLY1305_GENERIC
+ select CRYPTO_ARCH_HAVE_LIB_POLY1305
help
Poly1305 authenticator algorithm, RFC7539.
@@ -756,6 +755,11 @@ config CRYPTO_POLY1305_X86_64
in IETF protocols. This is the x86_64 assembler implementation using SIMD
instructions.
+config CRYPTO_POLY1305_MIPS
+ tristate "Poly1305 authenticator algorithm (MIPS optimized)"
+ depends on CPU_MIPS32 || (CPU_MIPS64 && 64BIT)
+ select CRYPTO_ARCH_HAVE_LIB_POLY1305
+
config CRYPTO_MD4
tristate "MD4 digest algorithm"
select CRYPTO_HASH
@@ -932,6 +936,7 @@ config CRYPTO_SHA1_PPC_SPE
config CRYPTO_SHA256
tristate "SHA224 and SHA256 digest algorithm"
select CRYPTO_HASH
+ select CRYPTO_LIB_SHA256
help
SHA256 secure hash standard (DFIPS 180-2).
@@ -1057,18 +1062,19 @@ config CRYPTO_WP512
<http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html>
config CRYPTO_GHASH_CLMUL_NI_INTEL
- tristate "GHASH digest algorithm (CLMUL-NI accelerated)"
+ tristate "GHASH hash function (CLMUL-NI accelerated)"
depends on X86 && 64BIT
select CRYPTO_CRYPTD
help
- GHASH is message digest algorithm for GCM (Galois/Counter Mode).
- The implementation is accelerated by CLMUL-NI of Intel.
+ This is the x86_64 CLMUL-NI accelerated implementation of
+ GHASH, the hash function used in GCM (Galois/Counter mode).
comment "Ciphers"
config CRYPTO_AES
tristate "AES cipher algorithms"
select CRYPTO_ALGAPI
+ select CRYPTO_LIB_AES
help
AES cipher algorithms (FIPS-197). AES uses the Rijndael
algorithm.
@@ -1089,6 +1095,7 @@ config CRYPTO_AES
config CRYPTO_AES_TI
tristate "Fixed time AES cipher"
select CRYPTO_ALGAPI
+ select CRYPTO_LIB_AES
help
This is a generic implementation of AES that attempts to eliminate
data dependent latencies as much as possible without affecting
@@ -1104,58 +1111,13 @@ config CRYPTO_AES_TI
block. Interrupts are also disabled to avoid races where cachelines
are evicted when the CPU is interrupted to do something else.
-config CRYPTO_AES_586
- tristate "AES cipher algorithms (i586)"
- depends on (X86 || UML_X86) && !64BIT
- select CRYPTO_ALGAPI
- select CRYPTO_AES
- help
- AES cipher algorithms (FIPS-197). AES uses the Rijndael
- algorithm.
-
- Rijndael appears to be consistently a very good performer in
- both hardware and software across a wide range of computing
- environments regardless of its use in feedback or non-feedback
- modes. Its key setup time is excellent, and its key agility is
- good. Rijndael's very low memory requirements make it very well
- suited for restricted-space environments, in which it also
- demonstrates excellent performance. Rijndael's operations are
- among the easiest to defend against power and timing attacks.
-
- The AES specifies three key sizes: 128, 192 and 256 bits
-
- See <http://csrc.nist.gov/encryption/aes/> for more information.
-
-config CRYPTO_AES_X86_64
- tristate "AES cipher algorithms (x86_64)"
- depends on (X86 || UML_X86) && 64BIT
- select CRYPTO_ALGAPI
- select CRYPTO_AES
- help
- AES cipher algorithms (FIPS-197). AES uses the Rijndael
- algorithm.
-
- Rijndael appears to be consistently a very good performer in
- both hardware and software across a wide range of computing
- environments regardless of its use in feedback or non-feedback
- modes. Its key setup time is excellent, and its key agility is
- good. Rijndael's very low memory requirements make it very well
- suited for restricted-space environments, in which it also
- demonstrates excellent performance. Rijndael's operations are
- among the easiest to defend against power and timing attacks.
-
- The AES specifies three key sizes: 128, 192 and 256 bits
-
- See <http://csrc.nist.gov/encryption/aes/> for more information.
-
config CRYPTO_AES_NI_INTEL
tristate "AES cipher algorithms (AES-NI)"
depends on X86
select CRYPTO_AEAD
- select CRYPTO_AES_X86_64 if 64BIT
- select CRYPTO_AES_586 if !64BIT
+ select CRYPTO_LIB_AES
select CRYPTO_ALGAPI
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_GLUE_HELPER_X86 if 64BIT
select CRYPTO_SIMD
help
@@ -1185,8 +1147,7 @@ config CRYPTO_AES_NI_INTEL
config CRYPTO_AES_SPARC64
tristate "AES cipher algorithms (SPARC64)"
depends on SPARC64
- select CRYPTO_CRYPTD
- select CRYPTO_ALGAPI
+ select CRYPTO_SKCIPHER
help
Use SPARC64 crypto opcodes for AES algorithm.
@@ -1213,6 +1174,7 @@ config CRYPTO_AES_SPARC64
config CRYPTO_AES_PPC_SPE
tristate "AES cipher algorithms (PPC SPE)"
depends on PPC && SPE
+ select CRYPTO_SKCIPHER
help
AES cipher algorithms (FIPS-197). Additionally the acceleration
for popular block cipher modes ECB, CBC, CTR and XTS is supported.
@@ -1237,12 +1199,9 @@ config CRYPTO_ANUBIS
<https://www.cosic.esat.kuleuven.be/nessie/reports/>
<http://www.larc.usp.br/~pbarreto/AnubisPage.html>
-config CRYPTO_LIB_ARC4
- tristate
-
config CRYPTO_ARC4
tristate "ARC4 cipher algorithm"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_LIB_ARC4
help
ARC4 cipher algorithm.
@@ -1278,7 +1237,7 @@ config CRYPTO_BLOWFISH_COMMON
config CRYPTO_BLOWFISH_X86_64
tristate "Blowfish cipher algorithm (x86_64)"
depends on X86 && 64BIT
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_BLOWFISH_COMMON
help
Blowfish cipher algorithm (x86_64), by Bruce Schneier.
@@ -1309,7 +1268,7 @@ config CRYPTO_CAMELLIA_X86_64
tristate "Camellia cipher algorithm (x86_64)"
depends on X86 && 64BIT
depends on CRYPTO
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_GLUE_HELPER_X86
help
Camellia cipher algorithm module (x86_64).
@@ -1326,7 +1285,7 @@ config CRYPTO_CAMELLIA_AESNI_AVX_X86_64
tristate "Camellia cipher algorithm (x86_64/AES-NI/AVX)"
depends on X86 && 64BIT
depends on CRYPTO
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_CAMELLIA_X86_64
select CRYPTO_GLUE_HELPER_X86
select CRYPTO_SIMD
@@ -1363,6 +1322,7 @@ config CRYPTO_CAMELLIA_SPARC64
depends on SPARC64
depends on CRYPTO
select CRYPTO_ALGAPI
+ select CRYPTO_SKCIPHER
help
Camellia cipher algorithm module (SPARC64).
@@ -1391,7 +1351,7 @@ config CRYPTO_CAST5
config CRYPTO_CAST5_AVX_X86_64
tristate "CAST5 (CAST-128) cipher algorithm (x86_64/AVX)"
depends on X86 && 64BIT
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_CAST5
select CRYPTO_CAST_COMMON
select CRYPTO_SIMD
@@ -1413,7 +1373,7 @@ config CRYPTO_CAST6
config CRYPTO_CAST6_AVX_X86_64
tristate "CAST6 (CAST-256) cipher algorithm (x86_64/AVX)"
depends on X86 && 64BIT
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_CAST6
select CRYPTO_CAST_COMMON
select CRYPTO_GLUE_HELPER_X86
@@ -1429,6 +1389,7 @@ config CRYPTO_CAST6_AVX_X86_64
config CRYPTO_DES
tristate "DES and Triple DES EDE cipher algorithms"
select CRYPTO_ALGAPI
+ select CRYPTO_LIB_DES
help
DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3).
@@ -1436,7 +1397,8 @@ config CRYPTO_DES_SPARC64
tristate "DES and Triple DES EDE cipher algorithms (SPARC64)"
depends on SPARC64
select CRYPTO_ALGAPI
- select CRYPTO_DES
+ select CRYPTO_LIB_DES
+ select CRYPTO_SKCIPHER
help
DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3),
optimized using SPARC64 crypto opcodes.
@@ -1444,8 +1406,8 @@ config CRYPTO_DES_SPARC64
config CRYPTO_DES3_EDE_X86_64
tristate "Triple DES EDE cipher algorithm (x86-64)"
depends on X86 && 64BIT
- select CRYPTO_BLKCIPHER
- select CRYPTO_DES
+ select CRYPTO_SKCIPHER
+ select CRYPTO_LIB_DES
help
Triple DES EDE (FIPS 46-3) algorithm.
@@ -1457,7 +1419,7 @@ config CRYPTO_DES3_EDE_X86_64
config CRYPTO_FCRYPT
tristate "FCrypt cipher algorithm"
select CRYPTO_ALGAPI
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
help
FCrypt algorithm used by RxRPC.
@@ -1476,7 +1438,7 @@ config CRYPTO_KHAZAD
config CRYPTO_SALSA20
tristate "Salsa20 stream cipher algorithm"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
help
Salsa20 stream cipher algorithm.
@@ -1488,7 +1450,8 @@ config CRYPTO_SALSA20
config CRYPTO_CHACHA20
tristate "ChaCha stream cipher algorithms"
- select CRYPTO_BLKCIPHER
+ select CRYPTO_LIB_CHACHA_GENERIC
+ select CRYPTO_SKCIPHER
help
The ChaCha20, XChaCha20, and XChaCha12 stream cipher algorithms.
@@ -1510,12 +1473,19 @@ config CRYPTO_CHACHA20
config CRYPTO_CHACHA20_X86_64
tristate "ChaCha stream cipher algorithms (x86_64/SSSE3/AVX2/AVX-512VL)"
depends on X86 && 64BIT
- select CRYPTO_BLKCIPHER
- select CRYPTO_CHACHA20
+ select CRYPTO_SKCIPHER
+ select CRYPTO_LIB_CHACHA_GENERIC
+ select CRYPTO_ARCH_HAVE_LIB_CHACHA
help
SSSE3, AVX2, and AVX-512VL optimized implementations of the ChaCha20,
XChaCha20, and XChaCha12 stream ciphers.
+config CRYPTO_CHACHA_MIPS
+ tristate "ChaCha stream cipher algorithms (MIPS 32r2 optimized)"
+ depends on CPU_MIPS32_R2
+ select CRYPTO_SKCIPHER
+ select CRYPTO_ARCH_HAVE_LIB_CHACHA
+
config CRYPTO_SEED
tristate "SEED cipher algorithm"
select CRYPTO_ALGAPI
@@ -1546,7 +1516,7 @@ config CRYPTO_SERPENT
config CRYPTO_SERPENT_SSE2_X86_64
tristate "Serpent cipher algorithm (x86_64/SSE2)"
depends on X86 && 64BIT
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_GLUE_HELPER_X86
select CRYPTO_SERPENT
select CRYPTO_SIMD
@@ -1565,7 +1535,7 @@ config CRYPTO_SERPENT_SSE2_X86_64
config CRYPTO_SERPENT_SSE2_586
tristate "Serpent cipher algorithm (i586/SSE2)"
depends on X86 && !64BIT
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_GLUE_HELPER_X86
select CRYPTO_SERPENT
select CRYPTO_SIMD
@@ -1584,7 +1554,7 @@ config CRYPTO_SERPENT_SSE2_586
config CRYPTO_SERPENT_AVX_X86_64
tristate "Serpent cipher algorithm (x86_64/AVX)"
depends on X86 && 64BIT
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_GLUE_HELPER_X86
select CRYPTO_SERPENT
select CRYPTO_SIMD
@@ -1715,7 +1685,7 @@ config CRYPTO_TWOFISH_X86_64
config CRYPTO_TWOFISH_X86_64_3WAY
tristate "Twofish cipher algorithm (x86_64, 3-way parallel)"
depends on X86 && 64BIT
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_TWOFISH_COMMON
select CRYPTO_TWOFISH_X86_64
select CRYPTO_GLUE_HELPER_X86
@@ -1736,7 +1706,7 @@ config CRYPTO_TWOFISH_X86_64_3WAY
config CRYPTO_TWOFISH_AVX_X86_64
tristate "Twofish cipher algorithm (x86_64/AVX)"
depends on X86 && 64BIT
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_GLUE_HELPER_X86
select CRYPTO_SIMD
select CRYPTO_TWOFISH_COMMON
@@ -1887,7 +1857,7 @@ config CRYPTO_USER_API_HASH
config CRYPTO_USER_API_SKCIPHER
tristate "User-space interface for symmetric key cipher algorithms"
depends on NET
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_USER_API
help
This option enables the user-spaces interface for symmetric
@@ -1906,7 +1876,7 @@ config CRYPTO_USER_API_AEAD
tristate "User-space interface for AEAD cipher algorithms"
depends on NET
select CRYPTO_AEAD
- select CRYPTO_BLKCIPHER
+ select CRYPTO_SKCIPHER
select CRYPTO_NULL
select CRYPTO_USER_API
help
@@ -1928,6 +1898,7 @@ config CRYPTO_STATS
config CRYPTO_HASH_INFO
bool
+source "lib/crypto/Kconfig"
source "drivers/crypto/Kconfig"
source "crypto/asymmetric_keys/Kconfig"
source "certs/Kconfig"
diff --git a/crypto/Makefile b/crypto/Makefile
index 9479e1a45d8c..4ca12b6044f7 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -14,11 +14,9 @@ crypto_algapi-y := algapi.o scatterwalk.o $(crypto_algapi-y)
obj-$(CONFIG_CRYPTO_ALGAPI2) += crypto_algapi.o
obj-$(CONFIG_CRYPTO_AEAD2) += aead.o
+obj-$(CONFIG_CRYPTO_AEAD2) += geniv.o
-crypto_blkcipher-y := ablkcipher.o
-crypto_blkcipher-y += blkcipher.o
-crypto_blkcipher-y += skcipher.o
-obj-$(CONFIG_CRYPTO_BLKCIPHER2) += crypto_blkcipher.o
+obj-$(CONFIG_CRYPTO_SKCIPHER2) += skcipher.o
obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
obj-$(CONFIG_CRYPTO_ECHAINIV) += echainiv.o
@@ -74,6 +72,8 @@ obj-$(CONFIG_CRYPTO_STREEBOG) += streebog_generic.o
obj-$(CONFIG_CRYPTO_WP512) += wp512.o
CFLAGS_wp512.o := $(call cc-option,-fno-schedule-insns) # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=79149
obj-$(CONFIG_CRYPTO_TGR192) += tgr192.o
+obj-$(CONFIG_CRYPTO_BLAKE2B) += blake2b_generic.o
+obj-$(CONFIG_CRYPTO_BLAKE2S) += blake2s_generic.o
obj-$(CONFIG_CRYPTO_GF128MUL) += gf128mul.o
obj-$(CONFIG_CRYPTO_ECB) += ecb.o
obj-$(CONFIG_CRYPTO_CBC) += cbc.o
@@ -90,10 +90,26 @@ obj-$(CONFIG_CRYPTO_GCM) += gcm.o
obj-$(CONFIG_CRYPTO_CCM) += ccm.o
obj-$(CONFIG_CRYPTO_CHACHA20POLY1305) += chacha20poly1305.o
obj-$(CONFIG_CRYPTO_AEGIS128) += aegis128.o
-obj-$(CONFIG_CRYPTO_AEGIS128L) += aegis128l.o
-obj-$(CONFIG_CRYPTO_AEGIS256) += aegis256.o
-obj-$(CONFIG_CRYPTO_MORUS640) += morus640.o
-obj-$(CONFIG_CRYPTO_MORUS1280) += morus1280.o
+aegis128-y := aegis128-core.o
+
+ifeq ($(ARCH),arm)
+CFLAGS_aegis128-neon-inner.o += -ffreestanding -march=armv8-a -mfloat-abi=softfp
+CFLAGS_aegis128-neon-inner.o += -mfpu=crypto-neon-fp-armv8
+aegis128-$(CONFIG_CRYPTO_AEGIS128_SIMD) += aegis128-neon.o aegis128-neon-inner.o
+endif
+ifeq ($(ARCH),arm64)
+aegis128-cflags-y := -ffreestanding -mcpu=generic+crypto
+aegis128-cflags-$(CONFIG_CC_IS_GCC) += -ffixed-q16 -ffixed-q17 -ffixed-q18 \
+ -ffixed-q19 -ffixed-q20 -ffixed-q21 \
+ -ffixed-q22 -ffixed-q23 -ffixed-q24 \
+ -ffixed-q25 -ffixed-q26 -ffixed-q27 \
+ -ffixed-q28 -ffixed-q29 -ffixed-q30 \
+ -ffixed-q31
+CFLAGS_aegis128-neon-inner.o += $(aegis128-cflags-y)
+CFLAGS_REMOVE_aegis128-neon-inner.o += -mgeneral-regs-only
+aegis128-$(CONFIG_CRYPTO_AEGIS128_SIMD) += aegis128-neon.o aegis128-neon-inner.o
+endif
+
obj-$(CONFIG_CRYPTO_PCRYPT) += pcrypt.o
obj-$(CONFIG_CRYPTO_CRYPTD) += cryptd.o
obj-$(CONFIG_CRYPTO_DES) += des_generic.o
@@ -136,6 +152,8 @@ obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o
obj-$(CONFIG_CRYPTO_DRBG) += drbg.o
obj-$(CONFIG_CRYPTO_JITTERENTROPY) += jitterentropy_rng.o
CFLAGS_jitterentropy.o = -O0
+KASAN_SANITIZE_jitterentropy.o = n
+UBSAN_SANITIZE_jitterentropy.o = n
jitterentropy_rng-y := jitterentropy.o jitterentropy-kcapi.o
obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o
obj-$(CONFIG_CRYPTO_GHASH) += ghash-generic.o
@@ -147,6 +165,8 @@ obj-$(CONFIG_CRYPTO_USER_API_AEAD) += algif_aead.o
obj-$(CONFIG_CRYPTO_ZSTD) += zstd.o
obj-$(CONFIG_CRYPTO_OFB) += ofb.o
obj-$(CONFIG_CRYPTO_ECC) += ecc.o
+obj-$(CONFIG_CRYPTO_ESSIV) += essiv.o
+obj-$(CONFIG_CRYPTO_CURVE25519) += curve25519-generic.o
ecdh_generic-y += ecdh.o
ecdh_generic-y += ecdh_helper.o
diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c
deleted file mode 100644
index 072b5646a0a3..000000000000
--- a/crypto/ablkcipher.c
+++ /dev/null
@@ -1,407 +0,0 @@
-// SPDX-License-Identifier: GPL-2.0-or-later
-/*
- * Asynchronous block chaining cipher operations.
- *
- * This is the asynchronous version of blkcipher.c indicating completion
- * via a callback.
- *
- * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
- */
-
-#include <crypto/internal/skcipher.h>
-#include <linux/err.h>
-#include <linux/kernel.h>
-#include <linux/slab.h>
-#include <linux/seq_file.h>
-#include <linux/cryptouser.h>
-#include <linux/compiler.h>
-#include <net/netlink.h>
-
-#include <crypto/scatterwalk.h>
-
-#include "internal.h"
-
-struct ablkcipher_buffer {
- struct list_head entry;
- struct scatter_walk dst;
- unsigned int len;
- void *data;
-};
-
-enum {
- ABLKCIPHER_WALK_SLOW = 1 << 0,
-};
-
-static inline void ablkcipher_buffer_write(struct ablkcipher_buffer *p)
-{
- scatterwalk_copychunks(p->data, &p->dst, p->len, 1);
-}
-
-void __ablkcipher_walk_complete(struct ablkcipher_walk *walk)
-{
- struct ablkcipher_buffer *p, *tmp;
-
- list_for_each_entry_safe(p, tmp, &walk->buffers, entry) {
- ablkcipher_buffer_write(p);
- list_del(&p->entry);
- kfree(p);
- }
-}
-EXPORT_SYMBOL_GPL(__ablkcipher_walk_complete);
-
-static inline void ablkcipher_queue_write(struct ablkcipher_walk *walk,
- struct ablkcipher_buffer *p)
-{
- p->dst = walk->out;
- list_add_tail(&p->entry, &walk->buffers);
-}
-
-/* Get a spot of the specified length that does not straddle a page.
- * The caller needs to ensure that there is enough space for this operation.
- */
-static inline u8 *ablkcipher_get_spot(u8 *start, unsigned int len)
-{
- u8 *end_page = (u8 *)(((unsigned long)(start + len - 1)) & PAGE_MASK);
-
- return max(start, end_page);
-}
-
-static inline void ablkcipher_done_slow(struct ablkcipher_walk *walk,
- unsigned int n)
-{
- for (;;) {
- unsigned int len_this_page = scatterwalk_pagelen(&walk->out);
-
- if (len_this_page > n)
- len_this_page = n;
- scatterwalk_advance(&walk->out, n);
- if (n == len_this_page)
- break;
- n -= len_this_page;
- scatterwalk_start(&walk->out, sg_next(walk->out.sg));
- }
-}
-
-static inline void ablkcipher_done_fast(struct ablkcipher_walk *walk,
- unsigned int n)
-{
- scatterwalk_advance(&walk->in, n);
- scatterwalk_advance(&walk->out, n);
-}
-
-static int ablkcipher_walk_next(struct ablkcipher_request *req,
- struct ablkcipher_walk *walk);
-
-int ablkcipher_walk_done(struct ablkcipher_request *req,
- struct ablkcipher_walk *walk, int err)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- unsigned int n; /* bytes processed */
- bool more;
-
- if (unlikely(err < 0))
- goto finish;
-
- n = walk->nbytes - err;
- walk->total -= n;
- more = (walk->total != 0);
-
- if (likely(!(walk->flags & ABLKCIPHER_WALK_SLOW))) {
- ablkcipher_done_fast(walk, n);
- } else {
- if (WARN_ON(err)) {
- /* unexpected case; didn't process all bytes */
- err = -EINVAL;
- goto finish;
- }
- ablkcipher_done_slow(walk, n);
- }
-
- scatterwalk_done(&walk->in, 0, more);
- scatterwalk_done(&walk->out, 1, more);
-
- if (more) {
- crypto_yield(req->base.flags);
- return ablkcipher_walk_next(req, walk);
- }
- err = 0;
-finish:
- walk->nbytes = 0;
- if (walk->iv != req->info)
- memcpy(req->info, walk->iv, tfm->crt_ablkcipher.ivsize);
- kfree(walk->iv_buffer);
- return err;
-}
-EXPORT_SYMBOL_GPL(ablkcipher_walk_done);
-
-static inline int ablkcipher_next_slow(struct ablkcipher_request *req,
- struct ablkcipher_walk *walk,
- unsigned int bsize,
- unsigned int alignmask,
- void **src_p, void **dst_p)
-{
- unsigned aligned_bsize = ALIGN(bsize, alignmask + 1);
- struct ablkcipher_buffer *p;
- void *src, *dst, *base;
- unsigned int n;
-
- n = ALIGN(sizeof(struct ablkcipher_buffer), alignmask + 1);
- n += (aligned_bsize * 3 - (alignmask + 1) +
- (alignmask & ~(crypto_tfm_ctx_alignment() - 1)));
-
- p = kmalloc(n, GFP_ATOMIC);
- if (!p)
- return ablkcipher_walk_done(req, walk, -ENOMEM);
-
- base = p + 1;
-
- dst = (u8 *)ALIGN((unsigned long)base, alignmask + 1);
- src = dst = ablkcipher_get_spot(dst, bsize);
-
- p->len = bsize;
- p->data = dst;
-
- scatterwalk_copychunks(src, &walk->in, bsize, 0);
-
- ablkcipher_queue_write(walk, p);
-
- walk->nbytes = bsize;
- walk->flags |= ABLKCIPHER_WALK_SLOW;
-
- *src_p = src;
- *dst_p = dst;
-
- return 0;
-}
-
-static inline int ablkcipher_copy_iv(struct ablkcipher_walk *walk,
- struct crypto_tfm *tfm,
- unsigned int alignmask)
-{
- unsigned bs = walk->blocksize;
- unsigned int ivsize = tfm->crt_ablkcipher.ivsize;
- unsigned aligned_bs = ALIGN(bs, alignmask + 1);
- unsigned int size = aligned_bs * 2 + ivsize + max(aligned_bs, ivsize) -
- (alignmask + 1);
- u8 *iv;
-
- size += alignmask & ~(crypto_tfm_ctx_alignment() - 1);
- walk->iv_buffer = kmalloc(size, GFP_ATOMIC);
- if (!walk->iv_buffer)
- return -ENOMEM;
-
- iv = (u8 *)ALIGN((unsigned long)walk->iv_buffer, alignmask + 1);
- iv = ablkcipher_get_spot(iv, bs) + aligned_bs;
- iv = ablkcipher_get_spot(iv, bs) + aligned_bs;
- iv = ablkcipher_get_spot(iv, ivsize);
-
- walk->iv = memcpy(iv, walk->iv, ivsize);
- return 0;
-}
-
-static inline int ablkcipher_next_fast(struct ablkcipher_request *req,
- struct ablkcipher_walk *walk)
-{
- walk->src.page = scatterwalk_page(&walk->in);
- walk->src.offset = offset_in_page(walk->in.offset);
- walk->dst.page = scatterwalk_page(&walk->out);
- walk->dst.offset = offset_in_page(walk->out.offset);
-
- return 0;
-}
-
-static int ablkcipher_walk_next(struct ablkcipher_request *req,
- struct ablkcipher_walk *walk)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- unsigned int alignmask, bsize, n;
- void *src, *dst;
- int err;
-
- alignmask = crypto_tfm_alg_alignmask(tfm);
- n = walk->total;
- if (unlikely(n < crypto_tfm_alg_blocksize(tfm))) {
- req->base.flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
- return ablkcipher_walk_done(req, walk, -EINVAL);
- }
-
- walk->flags &= ~ABLKCIPHER_WALK_SLOW;
- src = dst = NULL;
-
- bsize = min(walk->blocksize, n);
- n = scatterwalk_clamp(&walk->in, n);
- n = scatterwalk_clamp(&walk->out, n);
-
- if (n < bsize ||
- !scatterwalk_aligned(&walk->in, alignmask) ||
- !scatterwalk_aligned(&walk->out, alignmask)) {
- err = ablkcipher_next_slow(req, walk, bsize, alignmask,
- &src, &dst);
- goto set_phys_lowmem;
- }
-
- walk->nbytes = n;
-
- return ablkcipher_next_fast(req, walk);
-
-set_phys_lowmem:
- if (err >= 0) {
- walk->src.page = virt_to_page(src);
- walk->dst.page = virt_to_page(dst);
- walk->src.offset = ((unsigned long)src & (PAGE_SIZE - 1));
- walk->dst.offset = ((unsigned long)dst & (PAGE_SIZE - 1));
- }
-
- return err;
-}
-
-static int ablkcipher_walk_first(struct ablkcipher_request *req,
- struct ablkcipher_walk *walk)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- unsigned int alignmask;
-
- alignmask = crypto_tfm_alg_alignmask(tfm);
- if (WARN_ON_ONCE(in_irq()))
- return -EDEADLK;
-
- walk->iv = req->info;
- walk->nbytes = walk->total;
- if (unlikely(!walk->total))
- return 0;
-
- walk->iv_buffer = NULL;
- if (unlikely(((unsigned long)walk->iv & alignmask))) {
- int err = ablkcipher_copy_iv(walk, tfm, alignmask);
-
- if (err)
- return err;
- }
-
- scatterwalk_start(&walk->in, walk->in.sg);
- scatterwalk_start(&walk->out, walk->out.sg);
-
- return ablkcipher_walk_next(req, walk);
-}
-
-int ablkcipher_walk_phys(struct ablkcipher_request *req,
- struct ablkcipher_walk *walk)
-{
- walk->blocksize = crypto_tfm_alg_blocksize(req->base.tfm);
- return ablkcipher_walk_first(req, walk);
-}
-EXPORT_SYMBOL_GPL(ablkcipher_walk_phys);
-
-static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key,
- unsigned int keylen)
-{
- struct ablkcipher_alg *cipher = crypto_ablkcipher_alg(tfm);
- unsigned long alignmask = crypto_ablkcipher_alignmask(tfm);
- int ret;
- u8 *buffer, *alignbuffer;
- unsigned long absize;
-
- absize = keylen + alignmask;
- buffer = kmalloc(absize, GFP_ATOMIC);
- if (!buffer)
- return -ENOMEM;
-
- alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
- memcpy(alignbuffer, key, keylen);
- ret = cipher->setkey(tfm, alignbuffer, keylen);
- memset(alignbuffer, 0, keylen);
- kfree(buffer);
- return ret;
-}
-
-static int setkey(struct crypto_ablkcipher *tfm, const u8 *key,
- unsigned int keylen)
-{
- struct ablkcipher_alg *cipher = crypto_ablkcipher_alg(tfm);
- unsigned long alignmask = crypto_ablkcipher_alignmask(tfm);
-
- if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) {
- crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
- return -EINVAL;
- }
-
- if ((unsigned long)key & alignmask)
- return setkey_unaligned(tfm, key, keylen);
-
- return cipher->setkey(tfm, key, keylen);
-}
-
-static unsigned int crypto_ablkcipher_ctxsize(struct crypto_alg *alg, u32 type,
- u32 mask)
-{
- return alg->cra_ctxsize;
-}
-
-static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type,
- u32 mask)
-{
- struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher;
- struct ablkcipher_tfm *crt = &tfm->crt_ablkcipher;
-
- if (alg->ivsize > PAGE_SIZE / 8)
- return -EINVAL;
-
- crt->setkey = setkey;
- crt->encrypt = alg->encrypt;
- crt->decrypt = alg->decrypt;
- crt->base = __crypto_ablkcipher_cast(tfm);
- crt->ivsize = alg->ivsize;
-
- return 0;
-}
-
-#ifdef CONFIG_NET
-static int crypto_ablkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
-{
- struct crypto_report_blkcipher rblkcipher;
-
- memset(&rblkcipher, 0, sizeof(rblkcipher));
-
- strscpy(rblkcipher.type, "ablkcipher", sizeof(rblkcipher.type));
- strscpy(rblkcipher.geniv, "<default>", sizeof(rblkcipher.geniv));
-
- rblkcipher.blocksize = alg->cra_blocksize;
- rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
- rblkcipher.max_keysize = alg->cra_ablkcipher.max_keysize;
- rblkcipher.ivsize = alg->cra_ablkcipher.ivsize;
-
- return nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
- sizeof(rblkcipher), &rblkcipher);
-}
-#else
-static int crypto_ablkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
-{
- return -ENOSYS;
-}
-#endif
-
-static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg)
- __maybe_unused;
-static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg)
-{
- struct ablkcipher_alg *ablkcipher = &alg->cra_ablkcipher;
-
- seq_printf(m, "type : ablkcipher\n");
- seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
- "yes" : "no");
- seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
- seq_printf(m, "min keysize : %u\n", ablkcipher->min_keysize);
- seq_printf(m, "max keysize : %u\n", ablkcipher->max_keysize);
- seq_printf(m, "ivsize : %u\n", ablkcipher->ivsize);
- seq_printf(m, "geniv : <default>\n");
-}
-
-const struct crypto_type crypto_ablkcipher_type = {
- .ctxsize = crypto_ablkcipher_ctxsize,
- .init = crypto_init_ablkcipher_ops,
-#ifdef CONFIG_PROC_FS
- .show = crypto_ablkcipher_show,
-#endif
- .report = crypto_ablkcipher_report,
-};
-EXPORT_SYMBOL_GPL(crypto_ablkcipher_type);
diff --git a/crypto/acompress.c b/crypto/acompress.c
index abadcb035a41..84a76723e851 100644
--- a/crypto/acompress.c
+++ b/crypto/acompress.c
@@ -151,9 +151,9 @@ int crypto_register_acomp(struct acomp_alg *alg)
}
EXPORT_SYMBOL_GPL(crypto_register_acomp);
-int crypto_unregister_acomp(struct acomp_alg *alg)
+void crypto_unregister_acomp(struct acomp_alg *alg)
{
- return crypto_unregister_alg(&alg->base);
+ crypto_unregister_alg(&alg->base);
}
EXPORT_SYMBOL_GPL(crypto_unregister_acomp);
diff --git a/crypto/adiantum.c b/crypto/adiantum.c
index 395a3ddd3707..cf2b9f4103dd 100644
--- a/crypto/adiantum.c
+++ b/crypto/adiantum.c
@@ -33,13 +33,12 @@
#include <crypto/b128ops.h>
#include <crypto/chacha.h>
#include <crypto/internal/hash.h>
+#include <crypto/internal/poly1305.h>
#include <crypto/internal/skcipher.h>
#include <crypto/nhpoly1305.h>
#include <crypto/scatterwalk.h>
#include <linux/module.h>
-#include "internal.h"
-
/*
* Size of right-hand part of input data, in bytes; also the size of the block
* cipher's block size and the hash function's output.
@@ -63,7 +62,7 @@
struct adiantum_instance_ctx {
struct crypto_skcipher_spawn streamcipher_spawn;
- struct crypto_spawn blockcipher_spawn;
+ struct crypto_cipher_spawn blockcipher_spawn;
struct crypto_shash_spawn hash_spawn;
};
@@ -71,7 +70,7 @@ struct adiantum_tfm_ctx {
struct crypto_skcipher *streamcipher;
struct crypto_cipher *blockcipher;
struct crypto_shash *hash;
- struct poly1305_key header_hash_key;
+ struct poly1305_core_key header_hash_key;
};
struct adiantum_request_ctx {
@@ -134,9 +133,6 @@ static int adiantum_setkey(struct crypto_skcipher *tfm, const u8 *key,
crypto_skcipher_get_flags(tfm) &
CRYPTO_TFM_REQ_MASK);
err = crypto_skcipher_setkey(tctx->streamcipher, key, keylen);
- crypto_skcipher_set_flags(tfm,
- crypto_skcipher_get_flags(tctx->streamcipher) &
- CRYPTO_TFM_RES_MASK);
if (err)
return err;
@@ -166,9 +162,6 @@ static int adiantum_setkey(struct crypto_skcipher *tfm, const u8 *key,
CRYPTO_TFM_REQ_MASK);
err = crypto_cipher_setkey(tctx->blockcipher, keyp,
BLOCKCIPHER_KEY_SIZE);
- crypto_skcipher_set_flags(tfm,
- crypto_cipher_get_flags(tctx->blockcipher) &
- CRYPTO_TFM_RES_MASK);
if (err)
goto out;
keyp += BLOCKCIPHER_KEY_SIZE;
@@ -181,8 +174,6 @@ static int adiantum_setkey(struct crypto_skcipher *tfm, const u8 *key,
crypto_shash_set_flags(tctx->hash, crypto_skcipher_get_flags(tfm) &
CRYPTO_TFM_REQ_MASK);
err = crypto_shash_setkey(tctx->hash, keyp, NHPOLY1305_KEY_SIZE);
- crypto_skcipher_set_flags(tfm, crypto_shash_get_flags(tctx->hash) &
- CRYPTO_TFM_RES_MASK);
keyp += NHPOLY1305_KEY_SIZE;
WARN_ON(keyp != &data->derived_keys[ARRAY_SIZE(data->derived_keys)]);
out:
@@ -242,13 +233,13 @@ static void adiantum_hash_header(struct skcipher_request *req)
BUILD_BUG_ON(sizeof(header) % POLY1305_BLOCK_SIZE != 0);
poly1305_core_blocks(&state, &tctx->header_hash_key,
- &header, sizeof(header) / POLY1305_BLOCK_SIZE);
+ &header, sizeof(header) / POLY1305_BLOCK_SIZE, 1);
BUILD_BUG_ON(TWEAK_SIZE % POLY1305_BLOCK_SIZE != 0);
poly1305_core_blocks(&state, &tctx->header_hash_key, req->iv,
- TWEAK_SIZE / POLY1305_BLOCK_SIZE);
+ TWEAK_SIZE / POLY1305_BLOCK_SIZE, 1);
- poly1305_core_emit(&state, &rctx->header_hash);
+ poly1305_core_emit(&state, NULL, &rctx->header_hash);
}
/* Hash the left-hand part (the "bulk") of the message using NHPoly1305 */
@@ -435,10 +426,10 @@ static int adiantum_init_tfm(struct crypto_skcipher *tfm)
BUILD_BUG_ON(offsetofend(struct adiantum_request_ctx, u) !=
sizeof(struct adiantum_request_ctx));
- subreq_size = max(FIELD_SIZEOF(struct adiantum_request_ctx,
+ subreq_size = max(sizeof_field(struct adiantum_request_ctx,
u.hash_desc) +
crypto_shash_descsize(hash),
- FIELD_SIZEOF(struct adiantum_request_ctx,
+ sizeof_field(struct adiantum_request_ctx,
u.streamcipher_req) +
crypto_skcipher_reqsize(streamcipher));
@@ -468,7 +459,7 @@ static void adiantum_free_instance(struct skcipher_instance *inst)
struct adiantum_instance_ctx *ictx = skcipher_instance_ctx(inst);
crypto_drop_skcipher(&ictx->streamcipher_spawn);
- crypto_drop_spawn(&ictx->blockcipher_spawn);
+ crypto_drop_cipher(&ictx->blockcipher_spawn);
crypto_drop_shash(&ictx->hash_spawn);
kfree(inst);
}
@@ -500,14 +491,12 @@ static bool adiantum_supported_algorithms(struct skcipher_alg *streamcipher_alg,
static int adiantum_create(struct crypto_template *tmpl, struct rtattr **tb)
{
struct crypto_attr_type *algt;
- const char *streamcipher_name;
- const char *blockcipher_name;
+ u32 mask;
const char *nhpoly1305_name;
struct skcipher_instance *inst;
struct adiantum_instance_ctx *ictx;
struct skcipher_alg *streamcipher_alg;
struct crypto_alg *blockcipher_alg;
- struct crypto_alg *_hash_alg;
struct shash_alg *hash_alg;
int err;
@@ -518,19 +507,7 @@ static int adiantum_create(struct crypto_template *tmpl, struct rtattr **tb)
if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
return -EINVAL;
- streamcipher_name = crypto_attr_alg_name(tb[1]);
- if (IS_ERR(streamcipher_name))
- return PTR_ERR(streamcipher_name);
-
- blockcipher_name = crypto_attr_alg_name(tb[2]);
- if (IS_ERR(blockcipher_name))
- return PTR_ERR(blockcipher_name);
-
- nhpoly1305_name = crypto_attr_alg_name(tb[3]);
- if (nhpoly1305_name == ERR_PTR(-ENOENT))
- nhpoly1305_name = "nhpoly1305";
- if (IS_ERR(nhpoly1305_name))
- return PTR_ERR(nhpoly1305_name);
+ mask = crypto_requires_sync(algt->type, algt->mask);
inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
if (!inst)
@@ -538,37 +515,31 @@ static int adiantum_create(struct crypto_template *tmpl, struct rtattr **tb)
ictx = skcipher_instance_ctx(inst);
/* Stream cipher, e.g. "xchacha12" */
- crypto_set_skcipher_spawn(&ictx->streamcipher_spawn,
- skcipher_crypto_instance(inst));
- err = crypto_grab_skcipher(&ictx->streamcipher_spawn, streamcipher_name,
- 0, crypto_requires_sync(algt->type,
- algt->mask));
+ err = crypto_grab_skcipher(&ictx->streamcipher_spawn,
+ skcipher_crypto_instance(inst),
+ crypto_attr_alg_name(tb[1]), 0, mask);
if (err)
- goto out_free_inst;
+ goto err_free_inst;
streamcipher_alg = crypto_spawn_skcipher_alg(&ictx->streamcipher_spawn);
/* Block cipher, e.g. "aes" */
- crypto_set_spawn(&ictx->blockcipher_spawn,
- skcipher_crypto_instance(inst));
- err = crypto_grab_spawn(&ictx->blockcipher_spawn, blockcipher_name,
- CRYPTO_ALG_TYPE_CIPHER, CRYPTO_ALG_TYPE_MASK);
+ err = crypto_grab_cipher(&ictx->blockcipher_spawn,
+ skcipher_crypto_instance(inst),
+ crypto_attr_alg_name(tb[2]), 0, mask);
if (err)
- goto out_drop_streamcipher;
- blockcipher_alg = ictx->blockcipher_spawn.alg;
+ goto err_free_inst;
+ blockcipher_alg = crypto_spawn_cipher_alg(&ictx->blockcipher_spawn);
/* NHPoly1305 ε-∆U hash function */
- _hash_alg = crypto_alg_mod_lookup(nhpoly1305_name,
- CRYPTO_ALG_TYPE_SHASH,
- CRYPTO_ALG_TYPE_MASK);
- if (IS_ERR(_hash_alg)) {
- err = PTR_ERR(_hash_alg);
- goto out_drop_blockcipher;
- }
- hash_alg = __crypto_shash_alg(_hash_alg);
- err = crypto_init_shash_spawn(&ictx->hash_spawn, hash_alg,
- skcipher_crypto_instance(inst));
+ nhpoly1305_name = crypto_attr_alg_name(tb[3]);
+ if (nhpoly1305_name == ERR_PTR(-ENOENT))
+ nhpoly1305_name = "nhpoly1305";
+ err = crypto_grab_shash(&ictx->hash_spawn,
+ skcipher_crypto_instance(inst),
+ nhpoly1305_name, 0, mask);
if (err)
- goto out_put_hash;
+ goto err_free_inst;
+ hash_alg = crypto_spawn_shash_alg(&ictx->hash_spawn);
/* Check the set of algorithms */
if (!adiantum_supported_algorithms(streamcipher_alg, blockcipher_alg,
@@ -577,7 +548,7 @@ static int adiantum_create(struct crypto_template *tmpl, struct rtattr **tb)
streamcipher_alg->base.cra_name,
blockcipher_alg->cra_name, hash_alg->base.cra_name);
err = -EINVAL;
- goto out_drop_hash;
+ goto err_free_inst;
}
/* Instance fields */
@@ -586,13 +557,13 @@ static int adiantum_create(struct crypto_template *tmpl, struct rtattr **tb)
if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
"adiantum(%s,%s)", streamcipher_alg->base.cra_name,
blockcipher_alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
- goto out_drop_hash;
+ goto err_free_inst;
if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
"adiantum(%s,%s,%s)",
streamcipher_alg->base.cra_driver_name,
blockcipher_alg->cra_driver_name,
hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
- goto out_drop_hash;
+ goto err_free_inst;
inst->alg.base.cra_flags = streamcipher_alg->base.cra_flags &
CRYPTO_ALG_ASYNC;
@@ -622,22 +593,10 @@ static int adiantum_create(struct crypto_template *tmpl, struct rtattr **tb)
inst->free = adiantum_free_instance;
err = skcipher_register_instance(tmpl, inst);
- if (err)
- goto out_drop_hash;
-
- crypto_mod_put(_hash_alg);
- return 0;
-
-out_drop_hash:
- crypto_drop_shash(&ictx->hash_spawn);
-out_put_hash:
- crypto_mod_put(_hash_alg);
-out_drop_blockcipher:
- crypto_drop_spawn(&ictx->blockcipher_spawn);
-out_drop_streamcipher:
- crypto_drop_skcipher(&ictx->streamcipher_spawn);
-out_free_inst:
- kfree(inst);
+ if (err) {
+err_free_inst:
+ adiantum_free_instance(inst);
+ }
return err;
}
diff --git a/crypto/aead.c b/crypto/aead.c
index fbf0ec93bc8e..16991095270d 100644
--- a/crypto/aead.c
+++ b/crypto/aead.c
@@ -7,19 +7,14 @@
* Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
*/
-#include <crypto/internal/geniv.h>
-#include <crypto/internal/rng.h>
-#include <crypto/null.h>
-#include <crypto/scatterwalk.h>
-#include <linux/err.h>
+#include <crypto/internal/aead.h>
+#include <linux/errno.h>
#include <linux/init.h>
#include <linux/kernel.h>
#include <linux/module.h>
-#include <linux/rtnetlink.h>
#include <linux/slab.h>
#include <linux/seq_file.h>
#include <linux/cryptouser.h>
-#include <linux/compiler.h>
#include <net/netlink.h>
#include "internal.h"
@@ -70,7 +65,8 @@ int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
{
int err;
- if (authsize > crypto_aead_maxauthsize(tfm))
+ if ((!authsize && crypto_aead_maxauthsize(tfm)) ||
+ authsize > crypto_aead_maxauthsize(tfm))
return -EINVAL;
if (crypto_aead_alg(tfm)->setauthsize) {
@@ -189,11 +185,6 @@ static void crypto_aead_free_instance(struct crypto_instance *inst)
{
struct aead_instance *aead = aead_instance(inst);
- if (!aead->free) {
- inst->tmpl->free(inst);
- return;
- }
-
aead->free(aead);
}
@@ -211,167 +202,12 @@ static const struct crypto_type crypto_aead_type = {
.tfmsize = offsetof(struct crypto_aead, base),
};
-static int aead_geniv_setkey(struct crypto_aead *tfm,
- const u8 *key, unsigned int keylen)
-{
- struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
-
- return crypto_aead_setkey(ctx->child, key, keylen);
-}
-
-static int aead_geniv_setauthsize(struct crypto_aead *tfm,
- unsigned int authsize)
-{
- struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
-
- return crypto_aead_setauthsize(ctx->child, authsize);
-}
-
-struct aead_instance *aead_geniv_alloc(struct crypto_template *tmpl,
- struct rtattr **tb, u32 type, u32 mask)
-{
- const char *name;
- struct crypto_aead_spawn *spawn;
- struct crypto_attr_type *algt;
- struct aead_instance *inst;
- struct aead_alg *alg;
- unsigned int ivsize;
- unsigned int maxauthsize;
- int err;
-
- algt = crypto_get_attr_type(tb);
- if (IS_ERR(algt))
- return ERR_CAST(algt);
-
- if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
- return ERR_PTR(-EINVAL);
-
- name = crypto_attr_alg_name(tb[1]);
- if (IS_ERR(name))
- return ERR_CAST(name);
-
- inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
- if (!inst)
- return ERR_PTR(-ENOMEM);
-
- spawn = aead_instance_ctx(inst);
-
- /* Ignore async algorithms if necessary. */
- mask |= crypto_requires_sync(algt->type, algt->mask);
-
- crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
- err = crypto_grab_aead(spawn, name, type, mask);
- if (err)
- goto err_free_inst;
-
- alg = crypto_spawn_aead_alg(spawn);
-
- ivsize = crypto_aead_alg_ivsize(alg);
- maxauthsize = crypto_aead_alg_maxauthsize(alg);
-
- err = -EINVAL;
- if (ivsize < sizeof(u64))
- goto err_drop_alg;
-
- err = -ENAMETOOLONG;
- if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
- "%s(%s)", tmpl->name, alg->base.cra_name) >=
- CRYPTO_MAX_ALG_NAME)
- goto err_drop_alg;
- if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
- "%s(%s)", tmpl->name, alg->base.cra_driver_name) >=
- CRYPTO_MAX_ALG_NAME)
- goto err_drop_alg;
-
- inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
- inst->alg.base.cra_priority = alg->base.cra_priority;
- inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
- inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
- inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx);
-
- inst->alg.setkey = aead_geniv_setkey;
- inst->alg.setauthsize = aead_geniv_setauthsize;
-
- inst->alg.ivsize = ivsize;
- inst->alg.maxauthsize = maxauthsize;
-
-out:
- return inst;
-
-err_drop_alg:
- crypto_drop_aead(spawn);
-err_free_inst:
- kfree(inst);
- inst = ERR_PTR(err);
- goto out;
-}
-EXPORT_SYMBOL_GPL(aead_geniv_alloc);
-
-void aead_geniv_free(struct aead_instance *inst)
-{
- crypto_drop_aead(aead_instance_ctx(inst));
- kfree(inst);
-}
-EXPORT_SYMBOL_GPL(aead_geniv_free);
-
-int aead_init_geniv(struct crypto_aead *aead)
-{
- struct aead_geniv_ctx *ctx = crypto_aead_ctx(aead);
- struct aead_instance *inst = aead_alg_instance(aead);
- struct crypto_aead *child;
- int err;
-
- spin_lock_init(&ctx->lock);
-
- err = crypto_get_default_rng();
- if (err)
- goto out;
-
- err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
- crypto_aead_ivsize(aead));
- crypto_put_default_rng();
- if (err)
- goto out;
-
- ctx->sknull = crypto_get_default_null_skcipher();
- err = PTR_ERR(ctx->sknull);
- if (IS_ERR(ctx->sknull))
- goto out;
-
- child = crypto_spawn_aead(aead_instance_ctx(inst));
- err = PTR_ERR(child);
- if (IS_ERR(child))
- goto drop_null;
-
- ctx->child = child;
- crypto_aead_set_reqsize(aead, crypto_aead_reqsize(child) +
- sizeof(struct aead_request));
-
- err = 0;
-
-out:
- return err;
-
-drop_null:
- crypto_put_default_null_skcipher();
- goto out;
-}
-EXPORT_SYMBOL_GPL(aead_init_geniv);
-
-void aead_exit_geniv(struct crypto_aead *tfm)
-{
- struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
-
- crypto_free_aead(ctx->child);
- crypto_put_default_null_skcipher();
-}
-EXPORT_SYMBOL_GPL(aead_exit_geniv);
-
-int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
- u32 type, u32 mask)
+int crypto_grab_aead(struct crypto_aead_spawn *spawn,
+ struct crypto_instance *inst,
+ const char *name, u32 type, u32 mask)
{
spawn->base.frontend = &crypto_aead_type;
- return crypto_grab_spawn(&spawn->base, name, type, mask);
+ return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
}
EXPORT_SYMBOL_GPL(crypto_grab_aead);
@@ -452,6 +288,9 @@ int aead_register_instance(struct crypto_template *tmpl,
{
int err;
+ if (WARN_ON(!inst->free))
+ return -EINVAL;
+
err = aead_prepare_alg(&inst->alg);
if (err)
return err;
diff --git a/crypto/aegis.h b/crypto/aegis.h
index 41a3090cda8e..6920ebe77679 100644
--- a/crypto/aegis.h
+++ b/crypto/aegis.h
@@ -10,6 +10,7 @@
#define _CRYPTO_AEGIS_H
#include <crypto/aes.h>
+#include <linux/bitops.h>
#include <linux/types.h>
#define AEGIS_BLOCK_SIZE 16
@@ -23,46 +24,32 @@ union aegis_block {
#define AEGIS_BLOCK_ALIGN (__alignof__(union aegis_block))
#define AEGIS_ALIGNED(p) IS_ALIGNED((uintptr_t)p, AEGIS_BLOCK_ALIGN)
-static const union aegis_block crypto_aegis_const[2] = {
- { .words64 = {
- cpu_to_le64(U64_C(0x0d08050302010100)),
- cpu_to_le64(U64_C(0x6279e99059372215)),
- } },
- { .words64 = {
- cpu_to_le64(U64_C(0xf12fc26d55183ddb)),
- cpu_to_le64(U64_C(0xdd28b57342311120)),
- } },
-};
-
-static void crypto_aegis_block_xor(union aegis_block *dst,
- const union aegis_block *src)
+static __always_inline void crypto_aegis_block_xor(union aegis_block *dst,
+ const union aegis_block *src)
{
dst->words64[0] ^= src->words64[0];
dst->words64[1] ^= src->words64[1];
}
-static void crypto_aegis_block_and(union aegis_block *dst,
- const union aegis_block *src)
+static __always_inline void crypto_aegis_block_and(union aegis_block *dst,
+ const union aegis_block *src)
{
dst->words64[0] &= src->words64[0];
dst->words64[1] &= src->words64[1];
}
-static void crypto_aegis_aesenc(union aegis_block *dst,
- const union aegis_block *src,
- const union aegis_block *key)
+static __always_inline void crypto_aegis_aesenc(union aegis_block *dst,
+ const union aegis_block *src,
+ const union aegis_block *key)
{
const u8 *s = src->bytes;
- const u32 *t0 = crypto_ft_tab[0];
- const u32 *t1 = crypto_ft_tab[1];
- const u32 *t2 = crypto_ft_tab[2];
- const u32 *t3 = crypto_ft_tab[3];
+ const u32 *t = crypto_ft_tab[0];
u32 d0, d1, d2, d3;
- d0 = t0[s[ 0]] ^ t1[s[ 5]] ^ t2[s[10]] ^ t3[s[15]];
- d1 = t0[s[ 4]] ^ t1[s[ 9]] ^ t2[s[14]] ^ t3[s[ 3]];
- d2 = t0[s[ 8]] ^ t1[s[13]] ^ t2[s[ 2]] ^ t3[s[ 7]];
- d3 = t0[s[12]] ^ t1[s[ 1]] ^ t2[s[ 6]] ^ t3[s[11]];
+ d0 = t[s[ 0]] ^ rol32(t[s[ 5]], 8) ^ rol32(t[s[10]], 16) ^ rol32(t[s[15]], 24);
+ d1 = t[s[ 4]] ^ rol32(t[s[ 9]], 8) ^ rol32(t[s[14]], 16) ^ rol32(t[s[ 3]], 24);
+ d2 = t[s[ 8]] ^ rol32(t[s[13]], 8) ^ rol32(t[s[ 2]], 16) ^ rol32(t[s[ 7]], 24);
+ d3 = t[s[12]] ^ rol32(t[s[ 1]], 8) ^ rol32(t[s[ 6]], 16) ^ rol32(t[s[11]], 24);
dst->words32[0] = cpu_to_le32(d0) ^ key->words32[0];
dst->words32[1] = cpu_to_le32(d1) ^ key->words32[1];
diff --git a/crypto/aegis128.c b/crypto/aegis128-core.c
index d78f77fc5dd1..44fb4956f0dd 100644
--- a/crypto/aegis128.c
+++ b/crypto/aegis128-core.c
@@ -8,14 +8,18 @@
#include <crypto/algapi.h>
#include <crypto/internal/aead.h>
+#include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h>
#include <crypto/scatterwalk.h>
#include <linux/err.h>
#include <linux/init.h>
+#include <linux/jump_label.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/scatterlist.h>
+#include <asm/simd.h>
+
#include "aegis.h"
#define AEGIS128_NONCE_SIZE 16
@@ -32,14 +36,41 @@ struct aegis_ctx {
union aegis_block key;
};
-struct aegis128_ops {
- int (*skcipher_walk_init)(struct skcipher_walk *walk,
- struct aead_request *req, bool atomic);
-
- void (*crypt_chunk)(struct aegis_state *state, u8 *dst,
- const u8 *src, unsigned int size);
+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_simd);
+
+static const union aegis_block crypto_aegis_const[2] = {
+ { .words64 = {
+ cpu_to_le64(U64_C(0x0d08050302010100)),
+ cpu_to_le64(U64_C(0x6279e99059372215)),
+ } },
+ { .words64 = {
+ cpu_to_le64(U64_C(0xf12fc26d55183ddb)),
+ cpu_to_le64(U64_C(0xdd28b57342311120)),
+ } },
};
+static bool aegis128_do_simd(void)
+{
+#ifdef CONFIG_CRYPTO_AEGIS128_SIMD
+ if (static_branch_likely(&have_simd))
+ return crypto_simd_usable();
+#endif
+ return false;
+}
+
+bool crypto_aegis128_have_simd(void);
+void crypto_aegis128_update_simd(struct aegis_state *state, const void *msg);
+void crypto_aegis128_init_simd(struct aegis_state *state,
+ const union aegis_block *key,
+ const u8 *iv);
+void crypto_aegis128_encrypt_chunk_simd(struct aegis_state *state, u8 *dst,
+ const u8 *src, unsigned int size);
+void crypto_aegis128_decrypt_chunk_simd(struct aegis_state *state, u8 *dst,
+ const u8 *src, unsigned int size);
+void crypto_aegis128_final_simd(struct aegis_state *state,
+ union aegis_block *tag_xor,
+ u64 assoclen, u64 cryptlen);
+
static void crypto_aegis128_update(struct aegis_state *state)
{
union aegis_block tmp;
@@ -55,12 +86,22 @@ static void crypto_aegis128_update(struct aegis_state *state)
static void crypto_aegis128_update_a(struct aegis_state *state,
const union aegis_block *msg)
{
+ if (aegis128_do_simd()) {
+ crypto_aegis128_update_simd(state, msg);
+ return;
+ }
+
crypto_aegis128_update(state);
crypto_aegis_block_xor(&state->blocks[0], msg);
}
static void crypto_aegis128_update_u(struct aegis_state *state, const void *msg)
{
+ if (aegis128_do_simd()) {
+ crypto_aegis128_update_simd(state, msg);
+ return;
+ }
+
crypto_aegis128_update(state);
crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE);
}
@@ -281,25 +322,27 @@ static void crypto_aegis128_process_ad(struct aegis_state *state,
}
}
-static void crypto_aegis128_process_crypt(struct aegis_state *state,
- struct aead_request *req,
- const struct aegis128_ops *ops)
+static __always_inline
+int crypto_aegis128_process_crypt(struct aegis_state *state,
+ struct aead_request *req,
+ struct skcipher_walk *walk,
+ void (*crypt)(struct aegis_state *state,
+ u8 *dst, const u8 *src,
+ unsigned int size))
{
- struct skcipher_walk walk;
+ int err = 0;
- ops->skcipher_walk_init(&walk, req, false);
+ while (walk->nbytes) {
+ unsigned int nbytes = walk->nbytes;
- while (walk.nbytes) {
- unsigned int nbytes = walk.nbytes;
+ if (nbytes < walk->total)
+ nbytes = round_down(nbytes, walk->stride);
- if (nbytes < walk.total)
- nbytes = round_down(nbytes, walk.stride);
+ crypt(state, walk->dst.virt.addr, walk->src.virt.addr, nbytes);
- ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
- nbytes);
-
- skcipher_walk_done(&walk, walk.nbytes - nbytes);
+ err = skcipher_walk_done(walk, walk->nbytes - nbytes);
}
+ return err;
}
static void crypto_aegis128_final(struct aegis_state *state,
@@ -329,10 +372,8 @@ static int crypto_aegis128_setkey(struct crypto_aead *aead, const u8 *key,
{
struct aegis_ctx *ctx = crypto_aead_ctx(aead);
- if (keylen != AEGIS128_KEY_SIZE) {
- crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ if (keylen != AEGIS128_KEY_SIZE)
return -EINVAL;
- }
memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE);
return 0;
@@ -348,34 +389,31 @@ static int crypto_aegis128_setauthsize(struct crypto_aead *tfm,
return 0;
}
-static void crypto_aegis128_crypt(struct aead_request *req,
- union aegis_block *tag_xor,
- unsigned int cryptlen,
- const struct aegis128_ops *ops)
-{
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
- struct aegis_state state;
-
- crypto_aegis128_init(&state, &ctx->key, req->iv);
- crypto_aegis128_process_ad(&state, req->src, req->assoclen);
- crypto_aegis128_process_crypt(&state, req, ops);
- crypto_aegis128_final(&state, tag_xor, req->assoclen, cryptlen);
-}
-
static int crypto_aegis128_encrypt(struct aead_request *req)
{
- static const struct aegis128_ops ops = {
- .skcipher_walk_init = skcipher_walk_aead_encrypt,
- .crypt_chunk = crypto_aegis128_encrypt_chunk,
- };
-
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
union aegis_block tag = {};
unsigned int authsize = crypto_aead_authsize(tfm);
+ struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
unsigned int cryptlen = req->cryptlen;
+ struct skcipher_walk walk;
+ struct aegis_state state;
- crypto_aegis128_crypt(req, &tag, cryptlen, &ops);
+ skcipher_walk_aead_encrypt(&walk, req, false);
+ if (aegis128_do_simd()) {
+ crypto_aegis128_init_simd(&state, &ctx->key, req->iv);
+ crypto_aegis128_process_ad(&state, req->src, req->assoclen);
+ crypto_aegis128_process_crypt(&state, req, &walk,
+ crypto_aegis128_encrypt_chunk_simd);
+ crypto_aegis128_final_simd(&state, &tag, req->assoclen,
+ cryptlen);
+ } else {
+ crypto_aegis128_init(&state, &ctx->key, req->iv);
+ crypto_aegis128_process_ad(&state, req->src, req->assoclen);
+ crypto_aegis128_process_crypt(&state, req, &walk,
+ crypto_aegis128_encrypt_chunk);
+ crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen);
+ }
scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
authsize, 1);
@@ -384,41 +422,42 @@ static int crypto_aegis128_encrypt(struct aead_request *req)
static int crypto_aegis128_decrypt(struct aead_request *req)
{
- static const struct aegis128_ops ops = {
- .skcipher_walk_init = skcipher_walk_aead_decrypt,
- .crypt_chunk = crypto_aegis128_decrypt_chunk,
- };
static const u8 zeros[AEGIS128_MAX_AUTH_SIZE] = {};
-
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
union aegis_block tag;
unsigned int authsize = crypto_aead_authsize(tfm);
unsigned int cryptlen = req->cryptlen - authsize;
+ struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
+ struct skcipher_walk walk;
+ struct aegis_state state;
scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
authsize, 0);
- crypto_aegis128_crypt(req, &tag, cryptlen, &ops);
+ skcipher_walk_aead_decrypt(&walk, req, false);
+ if (aegis128_do_simd()) {
+ crypto_aegis128_init_simd(&state, &ctx->key, req->iv);
+ crypto_aegis128_process_ad(&state, req->src, req->assoclen);
+ crypto_aegis128_process_crypt(&state, req, &walk,
+ crypto_aegis128_decrypt_chunk_simd);
+ crypto_aegis128_final_simd(&state, &tag, req->assoclen,
+ cryptlen);
+ } else {
+ crypto_aegis128_init(&state, &ctx->key, req->iv);
+ crypto_aegis128_process_ad(&state, req->src, req->assoclen);
+ crypto_aegis128_process_crypt(&state, req, &walk,
+ crypto_aegis128_decrypt_chunk);
+ crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen);
+ }
return crypto_memneq(tag.bytes, zeros, authsize) ? -EBADMSG : 0;
}
-static int crypto_aegis128_init_tfm(struct crypto_aead *tfm)
-{
- return 0;
-}
-
-static void crypto_aegis128_exit_tfm(struct crypto_aead *tfm)
-{
-}
-
static struct aead_alg crypto_aegis128_alg = {
.setkey = crypto_aegis128_setkey,
.setauthsize = crypto_aegis128_setauthsize,
.encrypt = crypto_aegis128_encrypt,
.decrypt = crypto_aegis128_decrypt,
- .init = crypto_aegis128_init_tfm,
- .exit = crypto_aegis128_exit_tfm,
.ivsize = AEGIS128_NONCE_SIZE,
.maxauthsize = AEGIS128_MAX_AUTH_SIZE,
@@ -440,6 +479,10 @@ static struct aead_alg crypto_aegis128_alg = {
static int __init crypto_aegis128_module_init(void)
{
+ if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) &&
+ crypto_aegis128_have_simd())
+ static_branch_enable(&have_simd);
+
return crypto_register_aead(&crypto_aegis128_alg);
}
diff --git a/crypto/aegis128-neon-inner.c b/crypto/aegis128-neon-inner.c
new file mode 100644
index 000000000000..2a660ac1bc3a
--- /dev/null
+++ b/crypto/aegis128-neon-inner.c
@@ -0,0 +1,262 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+/*
+ * Copyright (C) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
+ */
+
+#ifdef CONFIG_ARM64
+#include <asm/neon-intrinsics.h>
+
+#define AES_ROUND "aese %0.16b, %1.16b \n\t aesmc %0.16b, %0.16b"
+#else
+#include <arm_neon.h>
+
+#define AES_ROUND "aese.8 %q0, %q1 \n\t aesmc.8 %q0, %q0"
+#endif
+
+#define AEGIS_BLOCK_SIZE 16
+
+#include <stddef.h>
+
+extern int aegis128_have_aes_insn;
+
+void *memcpy(void *dest, const void *src, size_t n);
+void *memset(void *s, int c, size_t n);
+
+struct aegis128_state {
+ uint8x16_t v[5];
+};
+
+extern const uint8_t crypto_aes_sbox[];
+
+static struct aegis128_state aegis128_load_state_neon(const void *state)
+{
+ return (struct aegis128_state){ {
+ vld1q_u8(state),
+ vld1q_u8(state + 16),
+ vld1q_u8(state + 32),
+ vld1q_u8(state + 48),
+ vld1q_u8(state + 64)
+ } };
+}
+
+static void aegis128_save_state_neon(struct aegis128_state st, void *state)
+{
+ vst1q_u8(state, st.v[0]);
+ vst1q_u8(state + 16, st.v[1]);
+ vst1q_u8(state + 32, st.v[2]);
+ vst1q_u8(state + 48, st.v[3]);
+ vst1q_u8(state + 64, st.v[4]);
+}
+
+static inline __attribute__((always_inline))
+uint8x16_t aegis_aes_round(uint8x16_t w)
+{
+ uint8x16_t z = {};
+
+#ifdef CONFIG_ARM64
+ if (!__builtin_expect(aegis128_have_aes_insn, 1)) {
+ static const uint8_t shift_rows[] = {
+ 0x0, 0x5, 0xa, 0xf, 0x4, 0x9, 0xe, 0x3,
+ 0x8, 0xd, 0x2, 0x7, 0xc, 0x1, 0x6, 0xb,
+ };
+ static const uint8_t ror32by8[] = {
+ 0x1, 0x2, 0x3, 0x0, 0x5, 0x6, 0x7, 0x4,
+ 0x9, 0xa, 0xb, 0x8, 0xd, 0xe, 0xf, 0xc,
+ };
+ uint8x16_t v;
+
+ // shift rows
+ w = vqtbl1q_u8(w, vld1q_u8(shift_rows));
+
+ // sub bytes
+#ifndef CONFIG_CC_IS_GCC
+ v = vqtbl4q_u8(vld1q_u8_x4(crypto_aes_sbox), w);
+ v = vqtbx4q_u8(v, vld1q_u8_x4(crypto_aes_sbox + 0x40), w - 0x40);
+ v = vqtbx4q_u8(v, vld1q_u8_x4(crypto_aes_sbox + 0x80), w - 0x80);
+ v = vqtbx4q_u8(v, vld1q_u8_x4(crypto_aes_sbox + 0xc0), w - 0xc0);
+#else
+ asm("tbl %0.16b, {v16.16b-v19.16b}, %1.16b" : "=w"(v) : "w"(w));
+ w -= 0x40;
+ asm("tbx %0.16b, {v20.16b-v23.16b}, %1.16b" : "+w"(v) : "w"(w));
+ w -= 0x40;
+ asm("tbx %0.16b, {v24.16b-v27.16b}, %1.16b" : "+w"(v) : "w"(w));
+ w -= 0x40;
+ asm("tbx %0.16b, {v28.16b-v31.16b}, %1.16b" : "+w"(v) : "w"(w));
+#endif
+
+ // mix columns
+ w = (v << 1) ^ (uint8x16_t)(((int8x16_t)v >> 7) & 0x1b);
+ w ^= (uint8x16_t)vrev32q_u16((uint16x8_t)v);
+ w ^= vqtbl1q_u8(v ^ w, vld1q_u8(ror32by8));
+
+ return w;
+ }
+#endif
+
+ /*
+ * We use inline asm here instead of the vaeseq_u8/vaesmcq_u8 intrinsics
+ * to force the compiler to issue the aese/aesmc instructions in pairs.
+ * This is much faster on many cores, where the instruction pair can
+ * execute in a single cycle.
+ */
+ asm(AES_ROUND : "+w"(w) : "w"(z));
+ return w;
+}
+
+static inline __attribute__((always_inline))
+struct aegis128_state aegis128_update_neon(struct aegis128_state st,
+ uint8x16_t m)
+{
+ m ^= aegis_aes_round(st.v[4]);
+ st.v[4] ^= aegis_aes_round(st.v[3]);
+ st.v[3] ^= aegis_aes_round(st.v[2]);
+ st.v[2] ^= aegis_aes_round(st.v[1]);
+ st.v[1] ^= aegis_aes_round(st.v[0]);
+ st.v[0] ^= m;
+
+ return st;
+}
+
+static inline __attribute__((always_inline))
+void preload_sbox(void)
+{
+ if (!IS_ENABLED(CONFIG_ARM64) ||
+ !IS_ENABLED(CONFIG_CC_IS_GCC) ||
+ __builtin_expect(aegis128_have_aes_insn, 1))
+ return;
+
+ asm("ld1 {v16.16b-v19.16b}, [%0], #64 \n\t"
+ "ld1 {v20.16b-v23.16b}, [%0], #64 \n\t"
+ "ld1 {v24.16b-v27.16b}, [%0], #64 \n\t"
+ "ld1 {v28.16b-v31.16b}, [%0] \n\t"
+ :: "r"(crypto_aes_sbox));
+}
+
+void crypto_aegis128_init_neon(void *state, const void *key, const void *iv)
+{
+ static const uint8_t const0[] = {
+ 0x00, 0x01, 0x01, 0x02, 0x03, 0x05, 0x08, 0x0d,
+ 0x15, 0x22, 0x37, 0x59, 0x90, 0xe9, 0x79, 0x62,
+ };
+ static const uint8_t const1[] = {
+ 0xdb, 0x3d, 0x18, 0x55, 0x6d, 0xc2, 0x2f, 0xf1,
+ 0x20, 0x11, 0x31, 0x42, 0x73, 0xb5, 0x28, 0xdd,
+ };
+ uint8x16_t k = vld1q_u8(key);
+ uint8x16_t kiv = k ^ vld1q_u8(iv);
+ struct aegis128_state st = {{
+ kiv,
+ vld1q_u8(const1),
+ vld1q_u8(const0),
+ k ^ vld1q_u8(const0),
+ k ^ vld1q_u8(const1),
+ }};
+ int i;
+
+ preload_sbox();
+
+ for (i = 0; i < 5; i++) {
+ st = aegis128_update_neon(st, k);
+ st = aegis128_update_neon(st, kiv);
+ }
+ aegis128_save_state_neon(st, state);
+}
+
+void crypto_aegis128_update_neon(void *state, const void *msg)
+{
+ struct aegis128_state st = aegis128_load_state_neon(state);
+
+ preload_sbox();
+
+ st = aegis128_update_neon(st, vld1q_u8(msg));
+
+ aegis128_save_state_neon(st, state);
+}
+
+void crypto_aegis128_encrypt_chunk_neon(void *state, void *dst, const void *src,
+ unsigned int size)
+{
+ struct aegis128_state st = aegis128_load_state_neon(state);
+ uint8x16_t msg;
+
+ preload_sbox();
+
+ while (size >= AEGIS_BLOCK_SIZE) {
+ uint8x16_t s = st.v[1] ^ (st.v[2] & st.v[3]) ^ st.v[4];
+
+ msg = vld1q_u8(src);
+ st = aegis128_update_neon(st, msg);
+ vst1q_u8(dst, msg ^ s);
+
+ size -= AEGIS_BLOCK_SIZE;
+ src += AEGIS_BLOCK_SIZE;
+ dst += AEGIS_BLOCK_SIZE;
+ }
+
+ if (size > 0) {
+ uint8x16_t s = st.v[1] ^ (st.v[2] & st.v[3]) ^ st.v[4];
+ uint8_t buf[AEGIS_BLOCK_SIZE] = {};
+
+ memcpy(buf, src, size);
+ msg = vld1q_u8(buf);
+ st = aegis128_update_neon(st, msg);
+ vst1q_u8(buf, msg ^ s);
+ memcpy(dst, buf, size);
+ }
+
+ aegis128_save_state_neon(st, state);
+}
+
+void crypto_aegis128_decrypt_chunk_neon(void *state, void *dst, const void *src,
+ unsigned int size)
+{
+ struct aegis128_state st = aegis128_load_state_neon(state);
+ uint8x16_t msg;
+
+ preload_sbox();
+
+ while (size >= AEGIS_BLOCK_SIZE) {
+ msg = vld1q_u8(src) ^ st.v[1] ^ (st.v[2] & st.v[3]) ^ st.v[4];
+ st = aegis128_update_neon(st, msg);
+ vst1q_u8(dst, msg);
+
+ size -= AEGIS_BLOCK_SIZE;
+ src += AEGIS_BLOCK_SIZE;
+ dst += AEGIS_BLOCK_SIZE;
+ }
+
+ if (size > 0) {
+ uint8x16_t s = st.v[1] ^ (st.v[2] & st.v[3]) ^ st.v[4];
+ uint8_t buf[AEGIS_BLOCK_SIZE];
+
+ vst1q_u8(buf, s);
+ memcpy(buf, src, size);
+ msg = vld1q_u8(buf) ^ s;
+ vst1q_u8(buf, msg);
+ memcpy(dst, buf, size);
+
+ st = aegis128_update_neon(st, msg);
+ }
+
+ aegis128_save_state_neon(st, state);
+}
+
+void crypto_aegis128_final_neon(void *state, void *tag_xor, uint64_t assoclen,
+ uint64_t cryptlen)
+{
+ struct aegis128_state st = aegis128_load_state_neon(state);
+ uint8x16_t v;
+ int i;
+
+ preload_sbox();
+
+ v = st.v[3] ^ (uint8x16_t)vcombine_u64(vmov_n_u64(8 * assoclen),
+ vmov_n_u64(8 * cryptlen));
+
+ for (i = 0; i < 7; i++)
+ st = aegis128_update_neon(st, v);
+
+ v = vld1q_u8(tag_xor);
+ v ^= st.v[0] ^ st.v[1] ^ st.v[2] ^ st.v[3] ^ st.v[4];
+ vst1q_u8(tag_xor, v);
+}
diff --git a/crypto/aegis128-neon.c b/crypto/aegis128-neon.c
new file mode 100644
index 000000000000..8271b1fa0fbc
--- /dev/null
+++ b/crypto/aegis128-neon.c
@@ -0,0 +1,70 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+/*
+ * Copyright (C) 2019 Linaro Ltd <ard.biesheuvel@linaro.org>
+ */
+
+#include <asm/cpufeature.h>
+#include <asm/neon.h>
+
+#include "aegis.h"
+
+void crypto_aegis128_init_neon(void *state, const void *key, const void *iv);
+void crypto_aegis128_update_neon(void *state, const void *msg);
+void crypto_aegis128_encrypt_chunk_neon(void *state, void *dst, const void *src,
+ unsigned int size);
+void crypto_aegis128_decrypt_chunk_neon(void *state, void *dst, const void *src,
+ unsigned int size);
+void crypto_aegis128_final_neon(void *state, void *tag_xor, uint64_t assoclen,
+ uint64_t cryptlen);
+
+int aegis128_have_aes_insn __ro_after_init;
+
+bool crypto_aegis128_have_simd(void)
+{
+ if (cpu_have_feature(cpu_feature(AES))) {
+ aegis128_have_aes_insn = 1;
+ return true;
+ }
+ return IS_ENABLED(CONFIG_ARM64);
+}
+
+void crypto_aegis128_init_simd(union aegis_block *state,
+ const union aegis_block *key,
+ const u8 *iv)
+{
+ kernel_neon_begin();
+ crypto_aegis128_init_neon(state, key, iv);
+ kernel_neon_end();
+}
+
+void crypto_aegis128_update_simd(union aegis_block *state, const void *msg)
+{
+ kernel_neon_begin();
+ crypto_aegis128_update_neon(state, msg);
+ kernel_neon_end();
+}
+
+void crypto_aegis128_encrypt_chunk_simd(union aegis_block *state, u8 *dst,
+ const u8 *src, unsigned int size)
+{
+ kernel_neon_begin();
+ crypto_aegis128_encrypt_chunk_neon(state, dst, src, size);
+ kernel_neon_end();
+}
+
+void crypto_aegis128_decrypt_chunk_simd(union aegis_block *state, u8 *dst,
+ const u8 *src, unsigned int size)
+{
+ kernel_neon_begin();
+ crypto_aegis128_decrypt_chunk_neon(state, dst, src, size);
+ kernel_neon_end();
+}
+
+void crypto_aegis128_final_simd(union aegis_block *state,
+ union aegis_block *tag_xor,
+ u64 assoclen, u64 cryptlen)
+{
+ kernel_neon_begin();
+ crypto_aegis128_final_neon(state, tag_xor, assoclen, cryptlen);
+ kernel_neon_end();
+}
diff --git a/crypto/aegis128l.c b/crypto/aegis128l.c
deleted file mode 100644
index 9bca3d619a22..000000000000
--- a/crypto/aegis128l.c
+++ /dev/null
@@ -1,522 +0,0 @@
-// SPDX-License-Identifier: GPL-2.0-or-later
-/*
- * The AEGIS-128L Authenticated-Encryption Algorithm
- *
- * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
- * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
- */
-
-#include <crypto/algapi.h>
-#include <crypto/internal/aead.h>
-#include <crypto/internal/skcipher.h>
-#include <crypto/scatterwalk.h>
-#include <linux/err.h>
-#include <linux/init.h>
-#include <linux/kernel.h>
-#include <linux/module.h>
-#include <linux/scatterlist.h>
-
-#include "aegis.h"
-
-#define AEGIS128L_CHUNK_BLOCKS 2
-#define AEGIS128L_CHUNK_SIZE (AEGIS128L_CHUNK_BLOCKS * AEGIS_BLOCK_SIZE)
-#define AEGIS128L_NONCE_SIZE 16
-#define AEGIS128L_STATE_BLOCKS 8
-#define AEGIS128L_KEY_SIZE 16
-#define AEGIS128L_MIN_AUTH_SIZE 8
-#define AEGIS128L_MAX_AUTH_SIZE 16
-
-union aegis_chunk {
- union aegis_block blocks[AEGIS128L_CHUNK_BLOCKS];
- u8 bytes[AEGIS128L_CHUNK_SIZE];
-};
-
-struct aegis_state {
- union aegis_block blocks[AEGIS128L_STATE_BLOCKS];
-};
-
-struct aegis_ctx {
- union aegis_block key;
-};
-
-struct aegis128l_ops {
- int (*skcipher_walk_init)(struct skcipher_walk *walk,
- struct aead_request *req, bool atomic);
-
- void (*crypt_chunk)(struct aegis_state *state, u8 *dst,
- const u8 *src, unsigned int size);
-};
-
-static void crypto_aegis128l_update(struct aegis_state *state)
-{
- union aegis_block tmp;
- unsigned int i;
-
- tmp = state->blocks[AEGIS128L_STATE_BLOCKS - 1];
- for (i = AEGIS128L_STATE_BLOCKS - 1; i > 0; i--)
- crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1],
- &state->blocks[i]);
- crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]);
-}
-
-static void crypto_aegis128l_update_a(struct aegis_state *state,
- const union aegis_chunk *msg)
-{
- crypto_aegis128l_update(state);
- crypto_aegis_block_xor(&state->blocks[0], &msg->blocks[0]);
- crypto_aegis_block_xor(&state->blocks[4], &msg->blocks[1]);
-}
-
-static void crypto_aegis128l_update_u(struct aegis_state *state,
- const void *msg)
-{
- crypto_aegis128l_update(state);
- crypto_xor(state->blocks[0].bytes, msg + 0 * AEGIS_BLOCK_SIZE,
- AEGIS_BLOCK_SIZE);
- crypto_xor(state->blocks[4].bytes, msg + 1 * AEGIS_BLOCK_SIZE,
- AEGIS_BLOCK_SIZE);
-}
-
-static void crypto_aegis128l_init(struct aegis_state *state,
- const union aegis_block *key,
- const u8 *iv)
-{
- union aegis_block key_iv;
- union aegis_chunk chunk;
- unsigned int i;
-
- memcpy(chunk.blocks[0].bytes, iv, AEGIS_BLOCK_SIZE);
- chunk.blocks[1] = *key;
-
- key_iv = *key;
- crypto_aegis_block_xor(&key_iv, &chunk.blocks[0]);
-
- state->blocks[0] = key_iv;
- state->blocks[1] = crypto_aegis_const[1];
- state->blocks[2] = crypto_aegis_const[0];
- state->blocks[3] = crypto_aegis_const[1];
- state->blocks[4] = key_iv;
- state->blocks[5] = *key;
- state->blocks[6] = *key;
- state->blocks[7] = *key;
-
- crypto_aegis_block_xor(&state->blocks[5], &crypto_aegis_const[0]);
- crypto_aegis_block_xor(&state->blocks[6], &crypto_aegis_const[1]);
- crypto_aegis_block_xor(&state->blocks[7], &crypto_aegis_const[0]);
-
- for (i = 0; i < 10; i++) {
- crypto_aegis128l_update_a(state, &chunk);
- }
-}
-
-static void crypto_aegis128l_ad(struct aegis_state *state,
- const u8 *src, unsigned int size)
-{
- if (AEGIS_ALIGNED(src)) {
- const union aegis_chunk *src_chunk =
- (const union aegis_chunk *)src;
-
- while (size >= AEGIS128L_CHUNK_SIZE) {
- crypto_aegis128l_update_a(state, src_chunk);
-
- size -= AEGIS128L_CHUNK_SIZE;
- src_chunk += 1;
- }
- } else {
- while (size >= AEGIS128L_CHUNK_SIZE) {
- crypto_aegis128l_update_u(state, src);
-
- size -= AEGIS128L_CHUNK_SIZE;
- src += AEGIS128L_CHUNK_SIZE;
- }
- }
-}
-
-static void crypto_aegis128l_encrypt_chunk(struct aegis_state *state, u8 *dst,
- const u8 *src, unsigned int size)
-{
- union aegis_chunk tmp;
- union aegis_block *tmp0 = &tmp.blocks[0];
- union aegis_block *tmp1 = &tmp.blocks[1];
-
- if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
- while (size >= AEGIS128L_CHUNK_SIZE) {
- union aegis_chunk *dst_blk =
- (union aegis_chunk *)dst;
- const union aegis_chunk *src_blk =
- (const union aegis_chunk *)src;
-
- *tmp0 = state->blocks[2];
- crypto_aegis_block_and(tmp0, &state->blocks[3]);
- crypto_aegis_block_xor(tmp0, &state->blocks[6]);
- crypto_aegis_block_xor(tmp0, &state->blocks[1]);
- crypto_aegis_block_xor(tmp0, &src_blk->blocks[0]);
-
- *tmp1 = state->blocks[6];
- crypto_aegis_block_and(tmp1, &state->blocks[7]);
- crypto_aegis_block_xor(tmp1, &state->blocks[5]);
- crypto_aegis_block_xor(tmp1, &state->blocks[2]);
- crypto_aegis_block_xor(tmp1, &src_blk->blocks[1]);
-
- crypto_aegis128l_update_a(state, src_blk);
-
- *dst_blk = tmp;
-
- size -= AEGIS128L_CHUNK_SIZE;
- src += AEGIS128L_CHUNK_SIZE;
- dst += AEGIS128L_CHUNK_SIZE;
- }
- } else {
- while (size >= AEGIS128L_CHUNK_SIZE) {
- *tmp0 = state->blocks[2];
- crypto_aegis_block_and(tmp0, &state->blocks[3]);
- crypto_aegis_block_xor(tmp0, &state->blocks[6]);
- crypto_aegis_block_xor(tmp0, &state->blocks[1]);
- crypto_xor(tmp0->bytes, src + 0 * AEGIS_BLOCK_SIZE,
- AEGIS_BLOCK_SIZE);
-
- *tmp1 = state->blocks[6];
- crypto_aegis_block_and(tmp1, &state->blocks[7]);
- crypto_aegis_block_xor(tmp1, &state->blocks[5]);
- crypto_aegis_block_xor(tmp1, &state->blocks[2]);
- crypto_xor(tmp1->bytes, src + 1 * AEGIS_BLOCK_SIZE,
- AEGIS_BLOCK_SIZE);
-
- crypto_aegis128l_update_u(state, src);
-
- memcpy(dst, tmp.bytes, AEGIS128L_CHUNK_SIZE);
-
- size -= AEGIS128L_CHUNK_SIZE;
- src += AEGIS128L_CHUNK_SIZE;
- dst += AEGIS128L_CHUNK_SIZE;
- }
- }
-
- if (size > 0) {
- union aegis_chunk msg = {};
- memcpy(msg.bytes, src, size);
-
- *tmp0 = state->blocks[2];
- crypto_aegis_block_and(tmp0, &state->blocks[3]);
- crypto_aegis_block_xor(tmp0, &state->blocks[6]);
- crypto_aegis_block_xor(tmp0, &state->blocks[1]);
-
- *tmp1 = state->blocks[6];
- crypto_aegis_block_and(tmp1, &state->blocks[7]);
- crypto_aegis_block_xor(tmp1, &state->blocks[5]);
- crypto_aegis_block_xor(tmp1, &state->blocks[2]);
-
- crypto_aegis128l_update_a(state, &msg);
-
- crypto_aegis_block_xor(&msg.blocks[0], tmp0);
- crypto_aegis_block_xor(&msg.blocks[1], tmp1);
-
- memcpy(dst, msg.bytes, size);
- }
-}
-
-static void crypto_aegis128l_decrypt_chunk(struct aegis_state *state, u8 *dst,
- const u8 *src, unsigned int size)
-{
- union aegis_chunk tmp;
- union aegis_block *tmp0 = &tmp.blocks[0];
- union aegis_block *tmp1 = &tmp.blocks[1];
-
- if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
- while (size >= AEGIS128L_CHUNK_SIZE) {
- union aegis_chunk *dst_blk =
- (union aegis_chunk *)dst;
- const union aegis_chunk *src_blk =
- (const union aegis_chunk *)src;
-
- *tmp0 = state->blocks[2];
- crypto_aegis_block_and(tmp0, &state->blocks[3]);
- crypto_aegis_block_xor(tmp0, &state->blocks[6]);
- crypto_aegis_block_xor(tmp0, &state->blocks[1]);
- crypto_aegis_block_xor(tmp0, &src_blk->blocks[0]);
-
- *tmp1 = state->blocks[6];
- crypto_aegis_block_and(tmp1, &state->blocks[7]);
- crypto_aegis_block_xor(tmp1, &state->blocks[5]);
- crypto_aegis_block_xor(tmp1, &state->blocks[2]);
- crypto_aegis_block_xor(tmp1, &src_blk->blocks[1]);
-
- crypto_aegis128l_update_a(state, &tmp);
-
- *dst_blk = tmp;
-
- size -= AEGIS128L_CHUNK_SIZE;
- src += AEGIS128L_CHUNK_SIZE;
- dst += AEGIS128L_CHUNK_SIZE;
- }
- } else {
- while (size >= AEGIS128L_CHUNK_SIZE) {
- *tmp0 = state->blocks[2];
- crypto_aegis_block_and(tmp0, &state->blocks[3]);
- crypto_aegis_block_xor(tmp0, &state->blocks[6]);
- crypto_aegis_block_xor(tmp0, &state->blocks[1]);
- crypto_xor(tmp0->bytes, src + 0 * AEGIS_BLOCK_SIZE,
- AEGIS_BLOCK_SIZE);
-
- *tmp1 = state->blocks[6];
- crypto_aegis_block_and(tmp1, &state->blocks[7]);
- crypto_aegis_block_xor(tmp1, &state->blocks[5]);
- crypto_aegis_block_xor(tmp1, &state->blocks[2]);
- crypto_xor(tmp1->bytes, src + 1 * AEGIS_BLOCK_SIZE,
- AEGIS_BLOCK_SIZE);
-
- crypto_aegis128l_update_a(state, &tmp);
-
- memcpy(dst, tmp.bytes, AEGIS128L_CHUNK_SIZE);
-
- size -= AEGIS128L_CHUNK_SIZE;
- src += AEGIS128L_CHUNK_SIZE;
- dst += AEGIS128L_CHUNK_SIZE;
- }
- }
-
- if (size > 0) {
- union aegis_chunk msg = {};
- memcpy(msg.bytes, src, size);
-
- *tmp0 = state->blocks[2];
- crypto_aegis_block_and(tmp0, &state->blocks[3]);
- crypto_aegis_block_xor(tmp0, &state->blocks[6]);
- crypto_aegis_block_xor(tmp0, &state->blocks[1]);
- crypto_aegis_block_xor(&msg.blocks[0], tmp0);
-
- *tmp1 = state->blocks[6];
- crypto_aegis_block_and(tmp1, &state->blocks[7]);
- crypto_aegis_block_xor(tmp1, &state->blocks[5]);
- crypto_aegis_block_xor(tmp1, &state->blocks[2]);
- crypto_aegis_block_xor(&msg.blocks[1], tmp1);
-
- memset(msg.bytes + size, 0, AEGIS128L_CHUNK_SIZE - size);
-
- crypto_aegis128l_update_a(state, &msg);
-
- memcpy(dst, msg.bytes, size);
- }
-}
-
-static void crypto_aegis128l_process_ad(struct aegis_state *state,
- struct scatterlist *sg_src,
- unsigned int assoclen)
-{
- struct scatter_walk walk;
- union aegis_chunk buf;
- unsigned int pos = 0;
-
- scatterwalk_start(&walk, sg_src);
- while (assoclen != 0) {
- unsigned int size = scatterwalk_clamp(&walk, assoclen);
- unsigned int left = size;
- void *mapped = scatterwalk_map(&walk);
- const u8 *src = (const u8 *)mapped;
-
- if (pos + size >= AEGIS128L_CHUNK_SIZE) {
- if (pos > 0) {
- unsigned int fill = AEGIS128L_CHUNK_SIZE - pos;
- memcpy(buf.bytes + pos, src, fill);
- crypto_aegis128l_update_a(state, &buf);
- pos = 0;
- left -= fill;
- src += fill;
- }
-
- crypto_aegis128l_ad(state, src, left);
- src += left & ~(AEGIS128L_CHUNK_SIZE - 1);
- left &= AEGIS128L_CHUNK_SIZE - 1;
- }
-
- memcpy(buf.bytes + pos, src, left);
-
- pos += left;
- assoclen -= size;
- scatterwalk_unmap(mapped);
- scatterwalk_advance(&walk, size);
- scatterwalk_done(&walk, 0, assoclen);
- }
-
- if (pos > 0) {
- memset(buf.bytes + pos, 0, AEGIS128L_CHUNK_SIZE - pos);
- crypto_aegis128l_update_a(state, &buf);
- }
-}
-
-static void crypto_aegis128l_process_crypt(struct aegis_state *state,
- struct aead_request *req,
- const struct aegis128l_ops *ops)
-{
- struct skcipher_walk walk;
-
- ops->skcipher_walk_init(&walk, req, false);
-
- while (walk.nbytes) {
- unsigned int nbytes = walk.nbytes;
-
- if (nbytes < walk.total)
- nbytes = round_down(nbytes, walk.stride);
-
- ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
- nbytes);
-
- skcipher_walk_done(&walk, walk.nbytes - nbytes);
- }
-}
-
-static void crypto_aegis128l_final(struct aegis_state *state,
- union aegis_block *tag_xor,
- u64 assoclen, u64 cryptlen)
-{
- u64 assocbits = assoclen * 8;
- u64 cryptbits = cryptlen * 8;
-
- union aegis_chunk tmp;
- unsigned int i;
-
- tmp.blocks[0].words64[0] = cpu_to_le64(assocbits);
- tmp.blocks[0].words64[1] = cpu_to_le64(cryptbits);
-
- crypto_aegis_block_xor(&tmp.blocks[0], &state->blocks[2]);
-
- tmp.blocks[1] = tmp.blocks[0];
- for (i = 0; i < 7; i++)
- crypto_aegis128l_update_a(state, &tmp);
-
- for (i = 0; i < 7; i++)
- crypto_aegis_block_xor(tag_xor, &state->blocks[i]);
-}
-
-static int crypto_aegis128l_setkey(struct crypto_aead *aead, const u8 *key,
- unsigned int keylen)
-{
- struct aegis_ctx *ctx = crypto_aead_ctx(aead);
-
- if (keylen != AEGIS128L_KEY_SIZE) {
- crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
- return -EINVAL;
- }
-
- memcpy(ctx->key.bytes, key, AEGIS128L_KEY_SIZE);
- return 0;
-}
-
-static int crypto_aegis128l_setauthsize(struct crypto_aead *tfm,
- unsigned int authsize)
-{
- if (authsize > AEGIS128L_MAX_AUTH_SIZE)
- return -EINVAL;
- if (authsize < AEGIS128L_MIN_AUTH_SIZE)
- return -EINVAL;
- return 0;
-}
-
-static void crypto_aegis128l_crypt(struct aead_request *req,
- union aegis_block *tag_xor,
- unsigned int cryptlen,
- const struct aegis128l_ops *ops)
-{
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
- struct aegis_state state;
-
- crypto_aegis128l_init(&state, &ctx->key, req->iv);
- crypto_aegis128l_process_ad(&state, req->src, req->assoclen);
- crypto_aegis128l_process_crypt(&state, req, ops);
- crypto_aegis128l_final(&state, tag_xor, req->assoclen, cryptlen);
-}
-
-static int crypto_aegis128l_encrypt(struct aead_request *req)
-{
- static const struct aegis128l_ops ops = {
- .skcipher_walk_init = skcipher_walk_aead_encrypt,
- .crypt_chunk = crypto_aegis128l_encrypt_chunk,
- };
-
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- union aegis_block tag = {};
- unsigned int authsize = crypto_aead_authsize(tfm);
- unsigned int cryptlen = req->cryptlen;
-
- crypto_aegis128l_crypt(req, &tag, cryptlen, &ops);
-
- scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
- authsize, 1);
- return 0;
-}
-
-static int crypto_aegis128l_decrypt(struct aead_request *req)
-{
- static const struct aegis128l_ops ops = {
- .skcipher_walk_init = skcipher_walk_aead_decrypt,
- .crypt_chunk = crypto_aegis128l_decrypt_chunk,
- };
- static const u8 zeros[AEGIS128L_MAX_AUTH_SIZE] = {};
-
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- union aegis_block tag;
- unsigned int authsize = crypto_aead_authsize(tfm);
- unsigned int cryptlen = req->cryptlen - authsize;
-
- scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
- authsize, 0);
-
- crypto_aegis128l_crypt(req, &tag, cryptlen, &ops);
-
- return crypto_memneq(tag.bytes, zeros, authsize) ? -EBADMSG : 0;
-}
-
-static int crypto_aegis128l_init_tfm(struct crypto_aead *tfm)
-{
- return 0;
-}
-
-static void crypto_aegis128l_exit_tfm(struct crypto_aead *tfm)
-{
-}
-
-static struct aead_alg crypto_aegis128l_alg = {
- .setkey = crypto_aegis128l_setkey,
- .setauthsize = crypto_aegis128l_setauthsize,
- .encrypt = crypto_aegis128l_encrypt,
- .decrypt = crypto_aegis128l_decrypt,
- .init = crypto_aegis128l_init_tfm,
- .exit = crypto_aegis128l_exit_tfm,
-
- .ivsize = AEGIS128L_NONCE_SIZE,
- .maxauthsize = AEGIS128L_MAX_AUTH_SIZE,
- .chunksize = AEGIS128L_CHUNK_SIZE,
-
- .base = {
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct aegis_ctx),
- .cra_alignmask = 0,
-
- .cra_priority = 100,
-
- .cra_name = "aegis128l",
- .cra_driver_name = "aegis128l-generic",
-
- .cra_module = THIS_MODULE,
- }
-};
-
-static int __init crypto_aegis128l_module_init(void)
-{
- return crypto_register_aead(&crypto_aegis128l_alg);
-}
-
-static void __exit crypto_aegis128l_module_exit(void)
-{
- crypto_unregister_aead(&crypto_aegis128l_alg);
-}
-
-subsys_initcall(crypto_aegis128l_module_init);
-module_exit(crypto_aegis128l_module_exit);
-
-MODULE_LICENSE("GPL");
-MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
-MODULE_DESCRIPTION("AEGIS-128L AEAD algorithm");
-MODULE_ALIAS_CRYPTO("aegis128l");
-MODULE_ALIAS_CRYPTO("aegis128l-generic");
diff --git a/crypto/aegis256.c b/crypto/aegis256.c
deleted file mode 100644
index b47fd39595ad..000000000000
--- a/crypto/aegis256.c
+++ /dev/null
@@ -1,473 +0,0 @@
-// SPDX-License-Identifier: GPL-2.0-or-later
-/*
- * The AEGIS-256 Authenticated-Encryption Algorithm
- *
- * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
- * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
- */
-
-#include <crypto/algapi.h>
-#include <crypto/internal/aead.h>
-#include <crypto/internal/skcipher.h>
-#include <crypto/scatterwalk.h>
-#include <linux/err.h>
-#include <linux/init.h>
-#include <linux/kernel.h>
-#include <linux/module.h>
-#include <linux/scatterlist.h>
-
-#include "aegis.h"
-
-#define AEGIS256_NONCE_SIZE 32
-#define AEGIS256_STATE_BLOCKS 6
-#define AEGIS256_KEY_SIZE 32
-#define AEGIS256_MIN_AUTH_SIZE 8
-#define AEGIS256_MAX_AUTH_SIZE 16
-
-struct aegis_state {
- union aegis_block blocks[AEGIS256_STATE_BLOCKS];
-};
-
-struct aegis_ctx {
- union aegis_block key[AEGIS256_KEY_SIZE / AEGIS_BLOCK_SIZE];
-};
-
-struct aegis256_ops {
- int (*skcipher_walk_init)(struct skcipher_walk *walk,
- struct aead_request *req, bool atomic);
-
- void (*crypt_chunk)(struct aegis_state *state, u8 *dst,
- const u8 *src, unsigned int size);
-};
-
-static void crypto_aegis256_update(struct aegis_state *state)
-{
- union aegis_block tmp;
- unsigned int i;
-
- tmp = state->blocks[AEGIS256_STATE_BLOCKS - 1];
- for (i = AEGIS256_STATE_BLOCKS - 1; i > 0; i--)
- crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1],
- &state->blocks[i]);
- crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]);
-}
-
-static void crypto_aegis256_update_a(struct aegis_state *state,
- const union aegis_block *msg)
-{
- crypto_aegis256_update(state);
- crypto_aegis_block_xor(&state->blocks[0], msg);
-}
-
-static void crypto_aegis256_update_u(struct aegis_state *state, const void *msg)
-{
- crypto_aegis256_update(state);
- crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE);
-}
-
-static void crypto_aegis256_init(struct aegis_state *state,
- const union aegis_block *key,
- const u8 *iv)
-{
- union aegis_block key_iv[2];
- unsigned int i;
-
- key_iv[0] = key[0];
- key_iv[1] = key[1];
- crypto_xor(key_iv[0].bytes, iv + 0 * AEGIS_BLOCK_SIZE,
- AEGIS_BLOCK_SIZE);
- crypto_xor(key_iv[1].bytes, iv + 1 * AEGIS_BLOCK_SIZE,
- AEGIS_BLOCK_SIZE);
-
- state->blocks[0] = key_iv[0];
- state->blocks[1] = key_iv[1];
- state->blocks[2] = crypto_aegis_const[1];
- state->blocks[3] = crypto_aegis_const[0];
- state->blocks[4] = key[0];
- state->blocks[5] = key[1];
-
- crypto_aegis_block_xor(&state->blocks[4], &crypto_aegis_const[0]);
- crypto_aegis_block_xor(&state->blocks[5], &crypto_aegis_const[1]);
-
- for (i = 0; i < 4; i++) {
- crypto_aegis256_update_a(state, &key[0]);
- crypto_aegis256_update_a(state, &key[1]);
- crypto_aegis256_update_a(state, &key_iv[0]);
- crypto_aegis256_update_a(state, &key_iv[1]);
- }
-}
-
-static void crypto_aegis256_ad(struct aegis_state *state,
- const u8 *src, unsigned int size)
-{
- if (AEGIS_ALIGNED(src)) {
- const union aegis_block *src_blk =
- (const union aegis_block *)src;
-
- while (size >= AEGIS_BLOCK_SIZE) {
- crypto_aegis256_update_a(state, src_blk);
-
- size -= AEGIS_BLOCK_SIZE;
- src_blk++;
- }
- } else {
- while (size >= AEGIS_BLOCK_SIZE) {
- crypto_aegis256_update_u(state, src);
-
- size -= AEGIS_BLOCK_SIZE;
- src += AEGIS_BLOCK_SIZE;
- }
- }
-}
-
-static void crypto_aegis256_encrypt_chunk(struct aegis_state *state, u8 *dst,
- const u8 *src, unsigned int size)
-{
- union aegis_block tmp;
-
- if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
- while (size >= AEGIS_BLOCK_SIZE) {
- union aegis_block *dst_blk =
- (union aegis_block *)dst;
- const union aegis_block *src_blk =
- (const union aegis_block *)src;
-
- tmp = state->blocks[2];
- crypto_aegis_block_and(&tmp, &state->blocks[3]);
- crypto_aegis_block_xor(&tmp, &state->blocks[5]);
- crypto_aegis_block_xor(&tmp, &state->blocks[4]);
- crypto_aegis_block_xor(&tmp, &state->blocks[1]);
- crypto_aegis_block_xor(&tmp, src_blk);
-
- crypto_aegis256_update_a(state, src_blk);
-
- *dst_blk = tmp;
-
- size -= AEGIS_BLOCK_SIZE;
- src += AEGIS_BLOCK_SIZE;
- dst += AEGIS_BLOCK_SIZE;
- }
- } else {
- while (size >= AEGIS_BLOCK_SIZE) {
- tmp = state->blocks[2];
- crypto_aegis_block_and(&tmp, &state->blocks[3]);
- crypto_aegis_block_xor(&tmp, &state->blocks[5]);
- crypto_aegis_block_xor(&tmp, &state->blocks[4]);
- crypto_aegis_block_xor(&tmp, &state->blocks[1]);
- crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
-
- crypto_aegis256_update_u(state, src);
-
- memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
-
- size -= AEGIS_BLOCK_SIZE;
- src += AEGIS_BLOCK_SIZE;
- dst += AEGIS_BLOCK_SIZE;
- }
- }
-
- if (size > 0) {
- union aegis_block msg = {};
- memcpy(msg.bytes, src, size);
-
- tmp = state->blocks[2];
- crypto_aegis_block_and(&tmp, &state->blocks[3]);
- crypto_aegis_block_xor(&tmp, &state->blocks[5]);
- crypto_aegis_block_xor(&tmp, &state->blocks[4]);
- crypto_aegis_block_xor(&tmp, &state->blocks[1]);
-
- crypto_aegis256_update_a(state, &msg);
-
- crypto_aegis_block_xor(&msg, &tmp);
-
- memcpy(dst, msg.bytes, size);
- }
-}
-
-static void crypto_aegis256_decrypt_chunk(struct aegis_state *state, u8 *dst,
- const u8 *src, unsigned int size)
-{
- union aegis_block tmp;
-
- if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
- while (size >= AEGIS_BLOCK_SIZE) {
- union aegis_block *dst_blk =
- (union aegis_block *)dst;
- const union aegis_block *src_blk =
- (const union aegis_block *)src;
-
- tmp = state->blocks[2];
- crypto_aegis_block_and(&tmp, &state->blocks[3]);
- crypto_aegis_block_xor(&tmp, &state->blocks[5]);
- crypto_aegis_block_xor(&tmp, &state->blocks[4]);
- crypto_aegis_block_xor(&tmp, &state->blocks[1]);
- crypto_aegis_block_xor(&tmp, src_blk);
-
- crypto_aegis256_update_a(state, &tmp);
-
- *dst_blk = tmp;
-
- size -= AEGIS_BLOCK_SIZE;
- src += AEGIS_BLOCK_SIZE;
- dst += AEGIS_BLOCK_SIZE;
- }
- } else {
- while (size >= AEGIS_BLOCK_SIZE) {
- tmp = state->blocks[2];
- crypto_aegis_block_and(&tmp, &state->blocks[3]);
- crypto_aegis_block_xor(&tmp, &state->blocks[5]);
- crypto_aegis_block_xor(&tmp, &state->blocks[4]);
- crypto_aegis_block_xor(&tmp, &state->blocks[1]);
- crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
-
- crypto_aegis256_update_a(state, &tmp);
-
- memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
-
- size -= AEGIS_BLOCK_SIZE;
- src += AEGIS_BLOCK_SIZE;
- dst += AEGIS_BLOCK_SIZE;
- }
- }
-
- if (size > 0) {
- union aegis_block msg = {};
- memcpy(msg.bytes, src, size);
-
- tmp = state->blocks[2];
- crypto_aegis_block_and(&tmp, &state->blocks[3]);
- crypto_aegis_block_xor(&tmp, &state->blocks[5]);
- crypto_aegis_block_xor(&tmp, &state->blocks[4]);
- crypto_aegis_block_xor(&tmp, &state->blocks[1]);
- crypto_aegis_block_xor(&msg, &tmp);
-
- memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size);
-
- crypto_aegis256_update_a(state, &msg);
-
- memcpy(dst, msg.bytes, size);
- }
-}
-
-static void crypto_aegis256_process_ad(struct aegis_state *state,
- struct scatterlist *sg_src,
- unsigned int assoclen)
-{
- struct scatter_walk walk;
- union aegis_block buf;
- unsigned int pos = 0;
-
- scatterwalk_start(&walk, sg_src);
- while (assoclen != 0) {
- unsigned int size = scatterwalk_clamp(&walk, assoclen);
- unsigned int left = size;
- void *mapped = scatterwalk_map(&walk);
- const u8 *src = (const u8 *)mapped;
-
- if (pos + size >= AEGIS_BLOCK_SIZE) {
- if (pos > 0) {
- unsigned int fill = AEGIS_BLOCK_SIZE - pos;
- memcpy(buf.bytes + pos, src, fill);
- crypto_aegis256_update_a(state, &buf);
- pos = 0;
- left -= fill;
- src += fill;
- }
-
- crypto_aegis256_ad(state, src, left);
- src += left & ~(AEGIS_BLOCK_SIZE - 1);
- left &= AEGIS_BLOCK_SIZE - 1;
- }
-
- memcpy(buf.bytes + pos, src, left);
-
- pos += left;
- assoclen -= size;
- scatterwalk_unmap(mapped);
- scatterwalk_advance(&walk, size);
- scatterwalk_done(&walk, 0, assoclen);
- }
-
- if (pos > 0) {
- memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos);
- crypto_aegis256_update_a(state, &buf);
- }
-}
-
-static void crypto_aegis256_process_crypt(struct aegis_state *state,
- struct aead_request *req,
- const struct aegis256_ops *ops)
-{
- struct skcipher_walk walk;
-
- ops->skcipher_walk_init(&walk, req, false);
-
- while (walk.nbytes) {
- unsigned int nbytes = walk.nbytes;
-
- if (nbytes < walk.total)
- nbytes = round_down(nbytes, walk.stride);
-
- ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
- nbytes);
-
- skcipher_walk_done(&walk, walk.nbytes - nbytes);
- }
-}
-
-static void crypto_aegis256_final(struct aegis_state *state,
- union aegis_block *tag_xor,
- u64 assoclen, u64 cryptlen)
-{
- u64 assocbits = assoclen * 8;
- u64 cryptbits = cryptlen * 8;
-
- union aegis_block tmp;
- unsigned int i;
-
- tmp.words64[0] = cpu_to_le64(assocbits);
- tmp.words64[1] = cpu_to_le64(cryptbits);
-
- crypto_aegis_block_xor(&tmp, &state->blocks[3]);
-
- for (i = 0; i < 7; i++)
- crypto_aegis256_update_a(state, &tmp);
-
- for (i = 0; i < AEGIS256_STATE_BLOCKS; i++)
- crypto_aegis_block_xor(tag_xor, &state->blocks[i]);
-}
-
-static int crypto_aegis256_setkey(struct crypto_aead *aead, const u8 *key,
- unsigned int keylen)
-{
- struct aegis_ctx *ctx = crypto_aead_ctx(aead);
-
- if (keylen != AEGIS256_KEY_SIZE) {
- crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
- return -EINVAL;
- }
-
- memcpy(ctx->key[0].bytes, key, AEGIS_BLOCK_SIZE);
- memcpy(ctx->key[1].bytes, key + AEGIS_BLOCK_SIZE,
- AEGIS_BLOCK_SIZE);
- return 0;
-}
-
-static int crypto_aegis256_setauthsize(struct crypto_aead *tfm,
- unsigned int authsize)
-{
- if (authsize > AEGIS256_MAX_AUTH_SIZE)
- return -EINVAL;
- if (authsize < AEGIS256_MIN_AUTH_SIZE)
- return -EINVAL;
- return 0;
-}
-
-static void crypto_aegis256_crypt(struct aead_request *req,
- union aegis_block *tag_xor,
- unsigned int cryptlen,
- const struct aegis256_ops *ops)
-{
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
- struct aegis_state state;
-
- crypto_aegis256_init(&state, ctx->key, req->iv);
- crypto_aegis256_process_ad(&state, req->src, req->assoclen);
- crypto_aegis256_process_crypt(&state, req, ops);
- crypto_aegis256_final(&state, tag_xor, req->assoclen, cryptlen);
-}
-
-static int crypto_aegis256_encrypt(struct aead_request *req)
-{
- static const struct aegis256_ops ops = {
- .skcipher_walk_init = skcipher_walk_aead_encrypt,
- .crypt_chunk = crypto_aegis256_encrypt_chunk,
- };
-
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- union aegis_block tag = {};
- unsigned int authsize = crypto_aead_authsize(tfm);
- unsigned int cryptlen = req->cryptlen;
-
- crypto_aegis256_crypt(req, &tag, cryptlen, &ops);
-
- scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
- authsize, 1);
- return 0;
-}
-
-static int crypto_aegis256_decrypt(struct aead_request *req)
-{
- static const struct aegis256_ops ops = {
- .skcipher_walk_init = skcipher_walk_aead_decrypt,
- .crypt_chunk = crypto_aegis256_decrypt_chunk,
- };
- static const u8 zeros[AEGIS256_MAX_AUTH_SIZE] = {};
-
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- union aegis_block tag;
- unsigned int authsize = crypto_aead_authsize(tfm);
- unsigned int cryptlen = req->cryptlen - authsize;
-
- scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
- authsize, 0);
-
- crypto_aegis256_crypt(req, &tag, cryptlen, &ops);
-
- return crypto_memneq(tag.bytes, zeros, authsize) ? -EBADMSG : 0;
-}
-
-static int crypto_aegis256_init_tfm(struct crypto_aead *tfm)
-{
- return 0;
-}
-
-static void crypto_aegis256_exit_tfm(struct crypto_aead *tfm)
-{
-}
-
-static struct aead_alg crypto_aegis256_alg = {
- .setkey = crypto_aegis256_setkey,
- .setauthsize = crypto_aegis256_setauthsize,
- .encrypt = crypto_aegis256_encrypt,
- .decrypt = crypto_aegis256_decrypt,
- .init = crypto_aegis256_init_tfm,
- .exit = crypto_aegis256_exit_tfm,
-
- .ivsize = AEGIS256_NONCE_SIZE,
- .maxauthsize = AEGIS256_MAX_AUTH_SIZE,
- .chunksize = AEGIS_BLOCK_SIZE,
-
- .base = {
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct aegis_ctx),
- .cra_alignmask = 0,
-
- .cra_priority = 100,
-
- .cra_name = "aegis256",
- .cra_driver_name = "aegis256-generic",
-
- .cra_module = THIS_MODULE,
- }
-};
-
-static int __init crypto_aegis256_module_init(void)
-{
- return crypto_register_aead(&crypto_aegis256_alg);
-}
-
-static void __exit crypto_aegis256_module_exit(void)
-{
- crypto_unregister_aead(&crypto_aegis256_alg);
-}
-
-subsys_initcall(crypto_aegis256_module_init);
-module_exit(crypto_aegis256_module_exit);
-
-MODULE_LICENSE("GPL");
-MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
-MODULE_DESCRIPTION("AEGIS-256 AEAD algorithm");
-MODULE_ALIAS_CRYPTO("aegis256");
-MODULE_ALIAS_CRYPTO("aegis256-generic");
diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c
index f217568917e4..27ab27931813 100644
--- a/crypto/aes_generic.c
+++ b/crypto/aes_generic.c
@@ -61,8 +61,6 @@ static inline u8 byte(const u32 x, const unsigned n)
return x >> (n << 3);
}
-static const u32 rco_tab[10] = { 1, 2, 4, 8, 16, 32, 64, 128, 27, 54 };
-
/* cacheline-aligned to facilitate prefetching into cache */
__visible const u32 crypto_ft_tab[4][256] ____cacheline_aligned = {
{
@@ -328,7 +326,7 @@ __visible const u32 crypto_ft_tab[4][256] ____cacheline_aligned = {
}
};
-__visible const u32 crypto_fl_tab[4][256] ____cacheline_aligned = {
+static const u32 crypto_fl_tab[4][256] ____cacheline_aligned = {
{
0x00000063, 0x0000007c, 0x00000077, 0x0000007b,
0x000000f2, 0x0000006b, 0x0000006f, 0x000000c5,
@@ -856,7 +854,7 @@ __visible const u32 crypto_it_tab[4][256] ____cacheline_aligned = {
}
};
-__visible const u32 crypto_il_tab[4][256] ____cacheline_aligned = {
+static const u32 crypto_il_tab[4][256] ____cacheline_aligned = {
{
0x00000052, 0x00000009, 0x0000006a, 0x000000d5,
0x00000030, 0x00000036, 0x000000a5, 0x00000038,
@@ -1121,158 +1119,7 @@ __visible const u32 crypto_il_tab[4][256] ____cacheline_aligned = {
};
EXPORT_SYMBOL_GPL(crypto_ft_tab);
-EXPORT_SYMBOL_GPL(crypto_fl_tab);
EXPORT_SYMBOL_GPL(crypto_it_tab);
-EXPORT_SYMBOL_GPL(crypto_il_tab);
-
-/* initialise the key schedule from the user supplied key */
-
-#define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b)
-
-#define imix_col(y, x) do { \
- u = star_x(x); \
- v = star_x(u); \
- w = star_x(v); \
- t = w ^ (x); \
- (y) = u ^ v ^ w; \
- (y) ^= ror32(u ^ t, 8) ^ \
- ror32(v ^ t, 16) ^ \
- ror32(t, 24); \
-} while (0)
-
-#define ls_box(x) \
- crypto_fl_tab[0][byte(x, 0)] ^ \
- crypto_fl_tab[1][byte(x, 1)] ^ \
- crypto_fl_tab[2][byte(x, 2)] ^ \
- crypto_fl_tab[3][byte(x, 3)]
-
-#define loop4(i) do { \
- t = ror32(t, 8); \
- t = ls_box(t) ^ rco_tab[i]; \
- t ^= ctx->key_enc[4 * i]; \
- ctx->key_enc[4 * i + 4] = t; \
- t ^= ctx->key_enc[4 * i + 1]; \
- ctx->key_enc[4 * i + 5] = t; \
- t ^= ctx->key_enc[4 * i + 2]; \
- ctx->key_enc[4 * i + 6] = t; \
- t ^= ctx->key_enc[4 * i + 3]; \
- ctx->key_enc[4 * i + 7] = t; \
-} while (0)
-
-#define loop6(i) do { \
- t = ror32(t, 8); \
- t = ls_box(t) ^ rco_tab[i]; \
- t ^= ctx->key_enc[6 * i]; \
- ctx->key_enc[6 * i + 6] = t; \
- t ^= ctx->key_enc[6 * i + 1]; \
- ctx->key_enc[6 * i + 7] = t; \
- t ^= ctx->key_enc[6 * i + 2]; \
- ctx->key_enc[6 * i + 8] = t; \
- t ^= ctx->key_enc[6 * i + 3]; \
- ctx->key_enc[6 * i + 9] = t; \
- t ^= ctx->key_enc[6 * i + 4]; \
- ctx->key_enc[6 * i + 10] = t; \
- t ^= ctx->key_enc[6 * i + 5]; \
- ctx->key_enc[6 * i + 11] = t; \
-} while (0)
-
-#define loop8tophalf(i) do { \
- t = ror32(t, 8); \
- t = ls_box(t) ^ rco_tab[i]; \
- t ^= ctx->key_enc[8 * i]; \
- ctx->key_enc[8 * i + 8] = t; \
- t ^= ctx->key_enc[8 * i + 1]; \
- ctx->key_enc[8 * i + 9] = t; \
- t ^= ctx->key_enc[8 * i + 2]; \
- ctx->key_enc[8 * i + 10] = t; \
- t ^= ctx->key_enc[8 * i + 3]; \
- ctx->key_enc[8 * i + 11] = t; \
-} while (0)
-
-#define loop8(i) do { \
- loop8tophalf(i); \
- t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \
- ctx->key_enc[8 * i + 12] = t; \
- t ^= ctx->key_enc[8 * i + 5]; \
- ctx->key_enc[8 * i + 13] = t; \
- t ^= ctx->key_enc[8 * i + 6]; \
- ctx->key_enc[8 * i + 14] = t; \
- t ^= ctx->key_enc[8 * i + 7]; \
- ctx->key_enc[8 * i + 15] = t; \
-} while (0)
-
-/**
- * crypto_aes_expand_key - Expands the AES key as described in FIPS-197
- * @ctx: The location where the computed key will be stored.
- * @in_key: The supplied key.
- * @key_len: The length of the supplied key.
- *
- * Returns 0 on success. The function fails only if an invalid key size (or
- * pointer) is supplied.
- * The expanded key size is 240 bytes (max of 14 rounds with a unique 16 bytes
- * key schedule plus a 16 bytes key which is used before the first round).
- * The decryption key is prepared for the "Equivalent Inverse Cipher" as
- * described in FIPS-197. The first slot (16 bytes) of each key (enc or dec) is
- * for the initial combination, the second slot for the first round and so on.
- */
-int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
- unsigned int key_len)
-{
- u32 i, t, u, v, w, j;
-
- if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
- key_len != AES_KEYSIZE_256)
- return -EINVAL;
-
- ctx->key_length = key_len;
-
- ctx->key_enc[0] = get_unaligned_le32(in_key);
- ctx->key_enc[1] = get_unaligned_le32(in_key + 4);
- ctx->key_enc[2] = get_unaligned_le32(in_key + 8);
- ctx->key_enc[3] = get_unaligned_le32(in_key + 12);
-
- ctx->key_dec[key_len + 24] = ctx->key_enc[0];
- ctx->key_dec[key_len + 25] = ctx->key_enc[1];
- ctx->key_dec[key_len + 26] = ctx->key_enc[2];
- ctx->key_dec[key_len + 27] = ctx->key_enc[3];
-
- switch (key_len) {
- case AES_KEYSIZE_128:
- t = ctx->key_enc[3];
- for (i = 0; i < 10; ++i)
- loop4(i);
- break;
-
- case AES_KEYSIZE_192:
- ctx->key_enc[4] = get_unaligned_le32(in_key + 16);
- t = ctx->key_enc[5] = get_unaligned_le32(in_key + 20);
- for (i = 0; i < 8; ++i)
- loop6(i);
- break;
-
- case AES_KEYSIZE_256:
- ctx->key_enc[4] = get_unaligned_le32(in_key + 16);
- ctx->key_enc[5] = get_unaligned_le32(in_key + 20);
- ctx->key_enc[6] = get_unaligned_le32(in_key + 24);
- t = ctx->key_enc[7] = get_unaligned_le32(in_key + 28);
- for (i = 0; i < 6; ++i)
- loop8(i);
- loop8tophalf(i);
- break;
- }
-
- ctx->key_dec[0] = ctx->key_enc[key_len + 24];
- ctx->key_dec[1] = ctx->key_enc[key_len + 25];
- ctx->key_dec[2] = ctx->key_enc[key_len + 26];
- ctx->key_dec[3] = ctx->key_enc[key_len + 27];
-
- for (i = 4; i < key_len + 24; ++i) {
- j = key_len + 24 - (i & ~3) + (i & 3);
- imix_col(ctx->key_dec[j], ctx->key_enc[i]);
- }
- return 0;
-}
-EXPORT_SYMBOL_GPL(crypto_aes_expand_key);
/**
* crypto_aes_set_key - Set the AES key.
@@ -1280,24 +1127,18 @@ EXPORT_SYMBOL_GPL(crypto_aes_expand_key);
* @in_key: The input key.
* @key_len: The size of the key.
*
- * Returns 0 on success, on failure the %CRYPTO_TFM_RES_BAD_KEY_LEN flag in tfm
- * is set. The function uses crypto_aes_expand_key() to expand the key.
- * &crypto_aes_ctx _must_ be the private data embedded in @tfm which is
- * retrieved with crypto_tfm_ctx().
+ * This function uses aes_expand_key() to expand the key. &crypto_aes_ctx
+ * _must_ be the private data embedded in @tfm which is retrieved with
+ * crypto_tfm_ctx().
+ *
+ * Return: 0 on success; -EINVAL on failure (only happens for bad key lengths)
*/
int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
unsigned int key_len)
{
struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
- u32 *flags = &tfm->crt_flags;
- int ret;
-
- ret = crypto_aes_expand_key(ctx, in_key, key_len);
- if (!ret)
- return 0;
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
- return -EINVAL;
+ return aes_expandkey(ctx, in_key, key_len);
}
EXPORT_SYMBOL_GPL(crypto_aes_set_key);
@@ -1332,7 +1173,7 @@ EXPORT_SYMBOL_GPL(crypto_aes_set_key);
f_rl(bo, bi, 3, k); \
} while (0)
-static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
+static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
{
const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
u32 b0[4], b1[4];
@@ -1402,7 +1243,7 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
i_rl(bo, bi, 3, k); \
} while (0)
-static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
+static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
{
const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
u32 b0[4], b1[4];
@@ -1454,8 +1295,8 @@ static struct crypto_alg aes_alg = {
.cia_min_keysize = AES_MIN_KEY_SIZE,
.cia_max_keysize = AES_MAX_KEY_SIZE,
.cia_setkey = crypto_aes_set_key,
- .cia_encrypt = aes_encrypt,
- .cia_decrypt = aes_decrypt
+ .cia_encrypt = crypto_aes_encrypt,
+ .cia_decrypt = crypto_aes_decrypt
}
}
};
diff --git a/crypto/aes_ti.c b/crypto/aes_ti.c
index 798fc9a2c8d6..205c2c257d49 100644
--- a/crypto/aes_ti.c
+++ b/crypto/aes_ti.c
@@ -8,271 +8,19 @@
#include <crypto/aes.h>
#include <linux/crypto.h>
#include <linux/module.h>
-#include <asm/unaligned.h>
-
-/*
- * Emit the sbox as volatile const to prevent the compiler from doing
- * constant folding on sbox references involving fixed indexes.
- */
-static volatile const u8 __cacheline_aligned __aesti_sbox[] = {
- 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5,
- 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76,
- 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0,
- 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0,
- 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc,
- 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15,
- 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a,
- 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75,
- 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0,
- 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84,
- 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b,
- 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf,
- 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85,
- 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8,
- 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5,
- 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2,
- 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17,
- 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73,
- 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88,
- 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb,
- 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c,
- 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79,
- 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9,
- 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08,
- 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6,
- 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a,
- 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e,
- 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e,
- 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94,
- 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf,
- 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68,
- 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16,
-};
-
-static volatile const u8 __cacheline_aligned __aesti_inv_sbox[] = {
- 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38,
- 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb,
- 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87,
- 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb,
- 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d,
- 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e,
- 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2,
- 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25,
- 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16,
- 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92,
- 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda,
- 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84,
- 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a,
- 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06,
- 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02,
- 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b,
- 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea,
- 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73,
- 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85,
- 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e,
- 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89,
- 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b,
- 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20,
- 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4,
- 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31,
- 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f,
- 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d,
- 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef,
- 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0,
- 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61,
- 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26,
- 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d,
-};
-
-static u32 mul_by_x(u32 w)
-{
- u32 x = w & 0x7f7f7f7f;
- u32 y = w & 0x80808080;
-
- /* multiply by polynomial 'x' (0b10) in GF(2^8) */
- return (x << 1) ^ (y >> 7) * 0x1b;
-}
-
-static u32 mul_by_x2(u32 w)
-{
- u32 x = w & 0x3f3f3f3f;
- u32 y = w & 0x80808080;
- u32 z = w & 0x40404040;
-
- /* multiply by polynomial 'x^2' (0b100) in GF(2^8) */
- return (x << 2) ^ (y >> 7) * 0x36 ^ (z >> 6) * 0x1b;
-}
-
-static u32 mix_columns(u32 x)
-{
- /*
- * Perform the following matrix multiplication in GF(2^8)
- *
- * | 0x2 0x3 0x1 0x1 | | x[0] |
- * | 0x1 0x2 0x3 0x1 | | x[1] |
- * | 0x1 0x1 0x2 0x3 | x | x[2] |
- * | 0x3 0x1 0x1 0x2 | | x[3] |
- */
- u32 y = mul_by_x(x) ^ ror32(x, 16);
-
- return y ^ ror32(x ^ y, 8);
-}
-
-static u32 inv_mix_columns(u32 x)
-{
- /*
- * Perform the following matrix multiplication in GF(2^8)
- *
- * | 0xe 0xb 0xd 0x9 | | x[0] |
- * | 0x9 0xe 0xb 0xd | | x[1] |
- * | 0xd 0x9 0xe 0xb | x | x[2] |
- * | 0xb 0xd 0x9 0xe | | x[3] |
- *
- * which can conveniently be reduced to
- *
- * | 0x2 0x3 0x1 0x1 | | 0x5 0x0 0x4 0x0 | | x[0] |
- * | 0x1 0x2 0x3 0x1 | | 0x0 0x5 0x0 0x4 | | x[1] |
- * | 0x1 0x1 0x2 0x3 | x | 0x4 0x0 0x5 0x0 | x | x[2] |
- * | 0x3 0x1 0x1 0x2 | | 0x0 0x4 0x0 0x5 | | x[3] |
- */
- u32 y = mul_by_x2(x);
-
- return mix_columns(x ^ y ^ ror32(y, 16));
-}
-
-static __always_inline u32 subshift(u32 in[], int pos)
-{
- return (__aesti_sbox[in[pos] & 0xff]) ^
- (__aesti_sbox[(in[(pos + 1) % 4] >> 8) & 0xff] << 8) ^
- (__aesti_sbox[(in[(pos + 2) % 4] >> 16) & 0xff] << 16) ^
- (__aesti_sbox[(in[(pos + 3) % 4] >> 24) & 0xff] << 24);
-}
-
-static __always_inline u32 inv_subshift(u32 in[], int pos)
-{
- return (__aesti_inv_sbox[in[pos] & 0xff]) ^
- (__aesti_inv_sbox[(in[(pos + 3) % 4] >> 8) & 0xff] << 8) ^
- (__aesti_inv_sbox[(in[(pos + 2) % 4] >> 16) & 0xff] << 16) ^
- (__aesti_inv_sbox[(in[(pos + 1) % 4] >> 24) & 0xff] << 24);
-}
-
-static u32 subw(u32 in)
-{
- return (__aesti_sbox[in & 0xff]) ^
- (__aesti_sbox[(in >> 8) & 0xff] << 8) ^
- (__aesti_sbox[(in >> 16) & 0xff] << 16) ^
- (__aesti_sbox[(in >> 24) & 0xff] << 24);
-}
-
-static int aesti_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
- unsigned int key_len)
-{
- u32 kwords = key_len / sizeof(u32);
- u32 rc, i, j;
-
- if (key_len != AES_KEYSIZE_128 &&
- key_len != AES_KEYSIZE_192 &&
- key_len != AES_KEYSIZE_256)
- return -EINVAL;
-
- ctx->key_length = key_len;
-
- for (i = 0; i < kwords; i++)
- ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32));
-
- for (i = 0, rc = 1; i < 10; i++, rc = mul_by_x(rc)) {
- u32 *rki = ctx->key_enc + (i * kwords);
- u32 *rko = rki + kwords;
-
- rko[0] = ror32(subw(rki[kwords - 1]), 8) ^ rc ^ rki[0];
- rko[1] = rko[0] ^ rki[1];
- rko[2] = rko[1] ^ rki[2];
- rko[3] = rko[2] ^ rki[3];
-
- if (key_len == 24) {
- if (i >= 7)
- break;
- rko[4] = rko[3] ^ rki[4];
- rko[5] = rko[4] ^ rki[5];
- } else if (key_len == 32) {
- if (i >= 6)
- break;
- rko[4] = subw(rko[3]) ^ rki[4];
- rko[5] = rko[4] ^ rki[5];
- rko[6] = rko[5] ^ rki[6];
- rko[7] = rko[6] ^ rki[7];
- }
- }
-
- /*
- * Generate the decryption keys for the Equivalent Inverse Cipher.
- * This involves reversing the order of the round keys, and applying
- * the Inverse Mix Columns transformation to all but the first and
- * the last one.
- */
- ctx->key_dec[0] = ctx->key_enc[key_len + 24];
- ctx->key_dec[1] = ctx->key_enc[key_len + 25];
- ctx->key_dec[2] = ctx->key_enc[key_len + 26];
- ctx->key_dec[3] = ctx->key_enc[key_len + 27];
-
- for (i = 4, j = key_len + 20; j > 0; i += 4, j -= 4) {
- ctx->key_dec[i] = inv_mix_columns(ctx->key_enc[j]);
- ctx->key_dec[i + 1] = inv_mix_columns(ctx->key_enc[j + 1]);
- ctx->key_dec[i + 2] = inv_mix_columns(ctx->key_enc[j + 2]);
- ctx->key_dec[i + 3] = inv_mix_columns(ctx->key_enc[j + 3]);
- }
-
- ctx->key_dec[i] = ctx->key_enc[0];
- ctx->key_dec[i + 1] = ctx->key_enc[1];
- ctx->key_dec[i + 2] = ctx->key_enc[2];
- ctx->key_dec[i + 3] = ctx->key_enc[3];
-
- return 0;
-}
static int aesti_set_key(struct crypto_tfm *tfm, const u8 *in_key,
unsigned int key_len)
{
struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
- int err;
-
- err = aesti_expand_key(ctx, in_key, key_len);
- if (err)
- return err;
-
- /*
- * In order to force the compiler to emit data independent Sbox lookups
- * at the start of each block, xor the first round key with values at
- * fixed indexes in the Sbox. This will need to be repeated each time
- * the key is used, which will pull the entire Sbox into the D-cache
- * before any data dependent Sbox lookups are performed.
- */
- ctx->key_enc[0] ^= __aesti_sbox[ 0] ^ __aesti_sbox[128];
- ctx->key_enc[1] ^= __aesti_sbox[32] ^ __aesti_sbox[160];
- ctx->key_enc[2] ^= __aesti_sbox[64] ^ __aesti_sbox[192];
- ctx->key_enc[3] ^= __aesti_sbox[96] ^ __aesti_sbox[224];
-
- ctx->key_dec[0] ^= __aesti_inv_sbox[ 0] ^ __aesti_inv_sbox[128];
- ctx->key_dec[1] ^= __aesti_inv_sbox[32] ^ __aesti_inv_sbox[160];
- ctx->key_dec[2] ^= __aesti_inv_sbox[64] ^ __aesti_inv_sbox[192];
- ctx->key_dec[3] ^= __aesti_inv_sbox[96] ^ __aesti_inv_sbox[224];
- return 0;
+ return aes_expandkey(ctx, in_key, key_len);
}
static void aesti_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
{
const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
- const u32 *rkp = ctx->key_enc + 4;
- int rounds = 6 + ctx->key_length / 4;
- u32 st0[4], st1[4];
unsigned long flags;
- int round;
-
- st0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in);
- st0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4);
- st0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8);
- st0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12);
/*
* Temporarily disable interrupts to avoid races where cachelines are
@@ -280,30 +28,7 @@ static void aesti_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
*/
local_irq_save(flags);
- st0[0] ^= __aesti_sbox[ 0] ^ __aesti_sbox[128];
- st0[1] ^= __aesti_sbox[32] ^ __aesti_sbox[160];
- st0[2] ^= __aesti_sbox[64] ^ __aesti_sbox[192];
- st0[3] ^= __aesti_sbox[96] ^ __aesti_sbox[224];
-
- for (round = 0;; round += 2, rkp += 8) {
- st1[0] = mix_columns(subshift(st0, 0)) ^ rkp[0];
- st1[1] = mix_columns(subshift(st0, 1)) ^ rkp[1];
- st1[2] = mix_columns(subshift(st0, 2)) ^ rkp[2];
- st1[3] = mix_columns(subshift(st0, 3)) ^ rkp[3];
-
- if (round == rounds - 2)
- break;
-
- st0[0] = mix_columns(subshift(st1, 0)) ^ rkp[4];
- st0[1] = mix_columns(subshift(st1, 1)) ^ rkp[5];
- st0[2] = mix_columns(subshift(st1, 2)) ^ rkp[6];
- st0[3] = mix_columns(subshift(st1, 3)) ^ rkp[7];
- }
-
- put_unaligned_le32(subshift(st1, 0) ^ rkp[4], out);
- put_unaligned_le32(subshift(st1, 1) ^ rkp[5], out + 4);
- put_unaligned_le32(subshift(st1, 2) ^ rkp[6], out + 8);
- put_unaligned_le32(subshift(st1, 3) ^ rkp[7], out + 12);
+ aes_encrypt(ctx, out, in);
local_irq_restore(flags);
}
@@ -311,16 +36,7 @@ static void aesti_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
static void aesti_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
{
const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
- const u32 *rkp = ctx->key_dec + 4;
- int rounds = 6 + ctx->key_length / 4;
- u32 st0[4], st1[4];
unsigned long flags;
- int round;
-
- st0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in);
- st0[1] = ctx->key_dec[1] ^ get_unaligned_le32(in + 4);
- st0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8);
- st0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12);
/*
* Temporarily disable interrupts to avoid races where cachelines are
@@ -328,30 +44,7 @@ static void aesti_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
*/
local_irq_save(flags);
- st0[0] ^= __aesti_inv_sbox[ 0] ^ __aesti_inv_sbox[128];
- st0[1] ^= __aesti_inv_sbox[32] ^ __aesti_inv_sbox[160];
- st0[2] ^= __aesti_inv_sbox[64] ^ __aesti_inv_sbox[192];
- st0[3] ^= __aesti_inv_sbox[96] ^ __aesti_inv_sbox[224];
-
- for (round = 0;; round += 2, rkp += 8) {
- st1[0] = inv_mix_columns(inv_subshift(st0, 0)) ^ rkp[0];
- st1[1] = inv_mix_columns(inv_subshift(st0, 1)) ^ rkp[1];
- st1[2] = inv_mix_columns(inv_subshift(st0, 2)) ^ rkp[2];
- st1[3] = inv_mix_columns(inv_subshift(st0, 3)) ^ rkp[3];
-
- if (round == rounds - 2)
- break;
-
- st0[0] = inv_mix_columns(inv_subshift(st1, 0)) ^ rkp[4];
- st0[1] = inv_mix_columns(inv_subshift(st1, 1)) ^ rkp[5];
- st0[2] = inv_mix_columns(inv_subshift(st1, 2)) ^ rkp[6];
- st0[3] = inv_mix_columns(inv_subshift(st1, 3)) ^ rkp[7];
- }
-
- put_unaligned_le32(inv_subshift(st1, 0) ^ rkp[4], out);
- put_unaligned_le32(inv_subshift(st1, 1) ^ rkp[5], out + 4);
- put_unaligned_le32(inv_subshift(st1, 2) ^ rkp[6], out + 8);
- put_unaligned_le32(inv_subshift(st1, 3) ^ rkp[7], out + 12);
+ aes_decrypt(ctx, out, in);
local_irq_restore(flags);
}
diff --git a/crypto/af_alg.c b/crypto/af_alg.c
index 879cf23f7489..439367a8e95c 100644
--- a/crypto/af_alg.c
+++ b/crypto/af_alg.c
@@ -134,11 +134,13 @@ void af_alg_release_parent(struct sock *sk)
sk = ask->parent;
ask = alg_sk(sk);
- lock_sock(sk);
+ local_bh_disable();
+ bh_lock_sock(sk);
ask->nokey_refcnt -= nokey;
if (!last)
last = !--ask->refcnt;
- release_sock(sk);
+ bh_unlock_sock(sk);
+ local_bh_enable();
if (last)
sock_put(sk);
@@ -169,7 +171,7 @@ static int alg_bind(struct socket *sock, struct sockaddr *uaddr, int addr_len)
sa->salg_name[sizeof(sa->salg_name) + addr_len - sizeof(*sa) - 1] = 0;
type = alg_get_type(sa->salg_type);
- if (IS_ERR(type) && PTR_ERR(type) == -ENOENT) {
+ if (PTR_ERR(type) == -ENOENT) {
request_module("algif-%s", sa->salg_type);
type = alg_get_type(sa->salg_type);
}
@@ -1043,7 +1045,7 @@ void af_alg_async_cb(struct crypto_async_request *_req, int err)
af_alg_free_resources(areq);
sock_put(sk);
- iocb->ki_complete(iocb, err ? err : resultlen, 0);
+ iocb->ki_complete(iocb, err ? err : (int)resultlen, 0);
}
EXPORT_SYMBOL_GPL(af_alg_async_cb);
diff --git a/crypto/ahash.c b/crypto/ahash.c
index 3815b363a693..68a0f0cb75c4 100644
--- a/crypto/ahash.c
+++ b/crypto/ahash.c
@@ -23,6 +23,8 @@
#include "internal.h"
+static const struct crypto_type crypto_ahash_type;
+
struct ahash_request_priv {
crypto_completion_t complete;
void *data;
@@ -509,6 +511,13 @@ static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
return crypto_alg_extsize(alg);
}
+static void crypto_ahash_free_instance(struct crypto_instance *inst)
+{
+ struct ahash_instance *ahash = ahash_instance(inst);
+
+ ahash->free(ahash);
+}
+
#ifdef CONFIG_NET
static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
{
@@ -542,9 +551,10 @@ static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
__crypto_hash_alg_common(alg)->digestsize);
}
-const struct crypto_type crypto_ahash_type = {
+static const struct crypto_type crypto_ahash_type = {
.extsize = crypto_ahash_extsize,
.init_tfm = crypto_ahash_init_tfm,
+ .free = crypto_ahash_free_instance,
#ifdef CONFIG_PROC_FS
.show = crypto_ahash_show,
#endif
@@ -554,7 +564,15 @@ const struct crypto_type crypto_ahash_type = {
.type = CRYPTO_ALG_TYPE_AHASH,
.tfmsize = offsetof(struct crypto_ahash, base),
};
-EXPORT_SYMBOL_GPL(crypto_ahash_type);
+
+int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
+ struct crypto_instance *inst,
+ const char *name, u32 type, u32 mask)
+{
+ spawn->base.frontend = &crypto_ahash_type;
+ return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
+}
+EXPORT_SYMBOL_GPL(crypto_grab_ahash);
struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
u32 mask)
@@ -598,9 +616,9 @@ int crypto_register_ahash(struct ahash_alg *alg)
}
EXPORT_SYMBOL_GPL(crypto_register_ahash);
-int crypto_unregister_ahash(struct ahash_alg *alg)
+void crypto_unregister_ahash(struct ahash_alg *alg)
{
- return crypto_unregister_alg(&alg->halg.base);
+ crypto_unregister_alg(&alg->halg.base);
}
EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
@@ -638,6 +656,9 @@ int ahash_register_instance(struct crypto_template *tmpl,
{
int err;
+ if (WARN_ON(!inst->free))
+ return -EINVAL;
+
err = ahash_prepare_alg(&inst->alg);
if (err)
return err;
@@ -646,31 +667,6 @@ int ahash_register_instance(struct crypto_template *tmpl,
}
EXPORT_SYMBOL_GPL(ahash_register_instance);
-void ahash_free_instance(struct crypto_instance *inst)
-{
- crypto_drop_spawn(crypto_instance_ctx(inst));
- kfree(ahash_instance(inst));
-}
-EXPORT_SYMBOL_GPL(ahash_free_instance);
-
-int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn,
- struct hash_alg_common *alg,
- struct crypto_instance *inst)
-{
- return crypto_init_spawn2(&spawn->base, &alg->base, inst,
- &crypto_ahash_type);
-}
-EXPORT_SYMBOL_GPL(crypto_init_ahash_spawn);
-
-struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
-{
- struct crypto_alg *alg;
-
- alg = crypto_attr_alg2(rta, &crypto_ahash_type, type, mask);
- return IS_ERR(alg) ? ERR_CAST(alg) : __crypto_hash_alg_common(alg);
-}
-EXPORT_SYMBOL_GPL(ahash_attr_alg);
-
bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
{
struct crypto_alg *alg = &halg->base;
diff --git a/crypto/akcipher.c b/crypto/akcipher.c
index 7d5cf4939423..f866085c8a4a 100644
--- a/crypto/akcipher.c
+++ b/crypto/akcipher.c
@@ -90,11 +90,12 @@ static const struct crypto_type crypto_akcipher_type = {
.tfmsize = offsetof(struct crypto_akcipher, base),
};
-int crypto_grab_akcipher(struct crypto_akcipher_spawn *spawn, const char *name,
- u32 type, u32 mask)
+int crypto_grab_akcipher(struct crypto_akcipher_spawn *spawn,
+ struct crypto_instance *inst,
+ const char *name, u32 type, u32 mask)
{
spawn->base.frontend = &crypto_akcipher_type;
- return crypto_grab_spawn(&spawn->base, name, type, mask);
+ return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
}
EXPORT_SYMBOL_GPL(crypto_grab_akcipher);
@@ -146,6 +147,8 @@ EXPORT_SYMBOL_GPL(crypto_unregister_akcipher);
int akcipher_register_instance(struct crypto_template *tmpl,
struct akcipher_instance *inst)
{
+ if (WARN_ON(!inst->free))
+ return -EINVAL;
akcipher_prepare_alg(&inst->alg);
return crypto_register_instance(tmpl, akcipher_crypto_instance(inst));
}
diff --git a/crypto/algapi.c b/crypto/algapi.c
index de30ddc952d8..69605e21af92 100644
--- a/crypto/algapi.c
+++ b/crypto/algapi.c
@@ -65,11 +65,6 @@ static int crypto_check_alg(struct crypto_alg *alg)
static void crypto_free_instance(struct crypto_instance *inst)
{
- if (!inst->alg.cra_type->free) {
- inst->tmpl->free(inst);
- return;
- }
-
inst->alg.cra_type->free(inst);
}
@@ -82,6 +77,15 @@ static void crypto_destroy_instance(struct crypto_alg *alg)
crypto_tmpl_put(tmpl);
}
+/*
+ * This function adds a spawn to the list secondary_spawns which
+ * will be used at the end of crypto_remove_spawns to unregister
+ * instances, unless the spawn happens to be one that is depended
+ * on by the new algorithm (nalg in crypto_remove_spawns).
+ *
+ * This function is also responsible for resurrecting any algorithms
+ * in the dependency chain of nalg by unsetting n->dead.
+ */
static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
struct list_head *stack,
struct list_head *top,
@@ -93,15 +97,17 @@ static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
if (!spawn)
return NULL;
- n = list_next_entry(spawn, list);
+ n = list_prev_entry(spawn, list);
+ list_move(&spawn->list, secondary_spawns);
- if (spawn->alg && &n->list != stack && !n->alg)
- n->alg = (n->list.next == stack) ? alg :
- &list_next_entry(n, list)->inst->alg;
+ if (list_is_last(&n->list, stack))
+ return top;
- list_move(&spawn->list, secondary_spawns);
+ n = list_next_entry(n, list);
+ if (!spawn->dead)
+ n->dead = false;
- return &n->list == stack ? top : &n->inst->alg.cra_users;
+ return &n->inst->alg.cra_users;
}
static void crypto_remove_instance(struct crypto_instance *inst,
@@ -113,8 +119,6 @@ static void crypto_remove_instance(struct crypto_instance *inst,
return;
inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
- if (hlist_unhashed(&inst->list))
- return;
if (!tmpl || !crypto_tmpl_get(tmpl))
return;
@@ -126,6 +130,12 @@ static void crypto_remove_instance(struct crypto_instance *inst,
BUG_ON(!list_empty(&inst->alg.cra_users));
}
+/*
+ * Given an algorithm alg, remove all algorithms that depend on it
+ * through spawns. If nalg is not null, then exempt any algorithms
+ * that is depended on by nalg. This is useful when nalg itself
+ * depends on alg.
+ */
void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
struct crypto_alg *nalg)
{
@@ -144,6 +154,11 @@ void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
list_move(&spawn->list, &top);
}
+ /*
+ * Perform a depth-first walk starting from alg through
+ * the cra_users tree. The list stack records the path
+ * from alg to the current spawn.
+ */
spawns = &top;
do {
while (!list_empty(spawns)) {
@@ -153,17 +168,26 @@ void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
list);
inst = spawn->inst;
- BUG_ON(&inst->alg == alg);
-
list_move(&spawn->list, &stack);
+ spawn->dead = !spawn->registered || &inst->alg != nalg;
+
+ if (!spawn->registered)
+ break;
+
+ BUG_ON(&inst->alg == alg);
if (&inst->alg == nalg)
break;
- spawn->alg = NULL;
spawns = &inst->alg.cra_users;
/*
+ * Even if spawn->registered is true, the
+ * instance itself may still be unregistered.
+ * This is because it may have failed during
+ * registration. Therefore we still need to
+ * make the following test.
+ *
* We may encounter an unregistered instance here, since
* an instance's spawns are set up prior to the instance
* being registered. An unregistered instance will have
@@ -178,10 +202,15 @@ void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
} while ((spawns = crypto_more_spawns(alg, &stack, &top,
&secondary_spawns)));
+ /*
+ * Remove all instances that are marked as dead. Also
+ * complete the resurrection of the others by moving them
+ * back to the cra_users list.
+ */
list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
- if (spawn->alg)
+ if (!spawn->dead)
list_move(&spawn->list, &spawn->alg->cra_users);
- else
+ else if (spawn->registered)
crypto_remove_instance(spawn->inst, list);
}
}
@@ -257,6 +286,7 @@ void crypto_alg_tested(const char *name, int err)
struct crypto_alg *alg;
struct crypto_alg *q;
LIST_HEAD(list);
+ bool best;
down_write(&crypto_alg_sem);
list_for_each_entry(q, &crypto_alg_list, cra_list) {
@@ -280,6 +310,21 @@ found:
alg->cra_flags |= CRYPTO_ALG_TESTED;
+ /* Only satisfy larval waiters if we are the best. */
+ best = true;
+ list_for_each_entry(q, &crypto_alg_list, cra_list) {
+ if (crypto_is_moribund(q) || !crypto_is_larval(q))
+ continue;
+
+ if (strcmp(alg->cra_name, q->cra_name))
+ continue;
+
+ if (q->cra_priority > alg->cra_priority) {
+ best = false;
+ break;
+ }
+ }
+
list_for_each_entry(q, &crypto_alg_list, cra_list) {
if (q == alg)
continue;
@@ -303,10 +348,12 @@ found:
continue;
if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
continue;
- if (!crypto_mod_get(alg))
- continue;
- larval->adult = alg;
+ if (best && crypto_mod_get(alg))
+ larval->adult = alg;
+ else
+ larval->adult = ERR_PTR(-EAGAIN);
+
continue;
}
@@ -397,7 +444,7 @@ static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
return 0;
}
-int crypto_unregister_alg(struct crypto_alg *alg)
+void crypto_unregister_alg(struct crypto_alg *alg)
{
int ret;
LIST_HEAD(list);
@@ -406,15 +453,14 @@ int crypto_unregister_alg(struct crypto_alg *alg)
ret = crypto_remove_alg(alg, &list);
up_write(&crypto_alg_sem);
- if (ret)
- return ret;
+ if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
+ return;
BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
if (alg->cra_destroy)
alg->cra_destroy(alg);
crypto_remove_final(&list);
- return 0;
}
EXPORT_SYMBOL_GPL(crypto_unregister_alg);
@@ -438,18 +484,12 @@ err:
}
EXPORT_SYMBOL_GPL(crypto_register_algs);
-int crypto_unregister_algs(struct crypto_alg *algs, int count)
+void crypto_unregister_algs(struct crypto_alg *algs, int count)
{
- int i, ret;
-
- for (i = 0; i < count; i++) {
- ret = crypto_unregister_alg(&algs[i]);
- if (ret)
- pr_err("Failed to unregister %s %s: %d\n",
- algs[i].cra_driver_name, algs[i].cra_name, ret);
- }
+ int i;
- return 0;
+ for (i = 0; i < count; i++)
+ crypto_unregister_alg(&algs[i]);
}
EXPORT_SYMBOL_GPL(crypto_unregister_algs);
@@ -561,6 +601,7 @@ int crypto_register_instance(struct crypto_template *tmpl,
struct crypto_instance *inst)
{
struct crypto_larval *larval;
+ struct crypto_spawn *spawn;
int err;
err = crypto_check_alg(&inst->alg);
@@ -572,6 +613,22 @@ int crypto_register_instance(struct crypto_template *tmpl,
down_write(&crypto_alg_sem);
+ larval = ERR_PTR(-EAGAIN);
+ for (spawn = inst->spawns; spawn;) {
+ struct crypto_spawn *next;
+
+ if (spawn->dead)
+ goto unlock;
+
+ next = spawn->next;
+ spawn->inst = inst;
+ spawn->registered = true;
+
+ crypto_mod_put(spawn->alg);
+
+ spawn = next;
+ }
+
larval = __crypto_register_alg(&inst->alg);
if (IS_ERR(larval))
goto unlock;
@@ -594,7 +651,7 @@ err:
}
EXPORT_SYMBOL_GPL(crypto_register_instance);
-int crypto_unregister_instance(struct crypto_instance *inst)
+void crypto_unregister_instance(struct crypto_instance *inst)
{
LIST_HEAD(list);
@@ -606,97 +663,70 @@ int crypto_unregister_instance(struct crypto_instance *inst)
up_write(&crypto_alg_sem);
crypto_remove_final(&list);
-
- return 0;
}
EXPORT_SYMBOL_GPL(crypto_unregister_instance);
-int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
- struct crypto_instance *inst, u32 mask)
+int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
+ const char *name, u32 type, u32 mask)
{
+ struct crypto_alg *alg;
int err = -EAGAIN;
if (WARN_ON_ONCE(inst == NULL))
return -EINVAL;
- spawn->inst = inst;
- spawn->mask = mask;
+ /* Allow the result of crypto_attr_alg_name() to be passed directly */
+ if (IS_ERR(name))
+ return PTR_ERR(name);
+
+ alg = crypto_find_alg(name, spawn->frontend, type, mask);
+ if (IS_ERR(alg))
+ return PTR_ERR(alg);
down_write(&crypto_alg_sem);
if (!crypto_is_moribund(alg)) {
list_add(&spawn->list, &alg->cra_users);
spawn->alg = alg;
+ spawn->mask = mask;
+ spawn->next = inst->spawns;
+ inst->spawns = spawn;
err = 0;
}
up_write(&crypto_alg_sem);
-
- return err;
-}
-EXPORT_SYMBOL_GPL(crypto_init_spawn);
-
-int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
- struct crypto_instance *inst,
- const struct crypto_type *frontend)
-{
- int err = -EINVAL;
-
- if ((alg->cra_flags ^ frontend->type) & frontend->maskset)
- goto out;
-
- spawn->frontend = frontend;
- err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
-
-out:
- return err;
-}
-EXPORT_SYMBOL_GPL(crypto_init_spawn2);
-
-int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
- u32 type, u32 mask)
-{
- struct crypto_alg *alg;
- int err;
-
- alg = crypto_find_alg(name, spawn->frontend, type, mask);
- if (IS_ERR(alg))
- return PTR_ERR(alg);
-
- err = crypto_init_spawn(spawn, alg, spawn->inst, mask);
- crypto_mod_put(alg);
+ if (err)
+ crypto_mod_put(alg);
return err;
}
EXPORT_SYMBOL_GPL(crypto_grab_spawn);
void crypto_drop_spawn(struct crypto_spawn *spawn)
{
- if (!spawn->alg)
+ if (!spawn->alg) /* not yet initialized? */
return;
down_write(&crypto_alg_sem);
- list_del(&spawn->list);
+ if (!spawn->dead)
+ list_del(&spawn->list);
up_write(&crypto_alg_sem);
+
+ if (!spawn->registered)
+ crypto_mod_put(spawn->alg);
}
EXPORT_SYMBOL_GPL(crypto_drop_spawn);
static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
{
struct crypto_alg *alg;
- struct crypto_alg *alg2;
down_read(&crypto_alg_sem);
alg = spawn->alg;
- alg2 = alg;
- if (alg2)
- alg2 = crypto_mod_get(alg2);
- up_read(&crypto_alg_sem);
-
- if (!alg2) {
- if (alg)
- crypto_shoot_alg(alg);
- return ERR_PTR(-EAGAIN);
+ if (!spawn->dead && !crypto_mod_get(alg)) {
+ alg->cra_flags |= CRYPTO_ALG_DYING;
+ alg = NULL;
}
+ up_read(&crypto_alg_sem);
- return alg;
+ return alg ?: ERR_PTR(-EAGAIN);
}
struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
@@ -809,20 +839,6 @@ const char *crypto_attr_alg_name(struct rtattr *rta)
}
EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
-struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
- const struct crypto_type *frontend,
- u32 type, u32 mask)
-{
- const char *name;
-
- name = crypto_attr_alg_name(rta);
- if (IS_ERR(name))
- return ERR_CAST(name);
-
- return crypto_find_alg(name, frontend, type, mask);
-}
-EXPORT_SYMBOL_GPL(crypto_attr_alg2);
-
int crypto_attr_u32(struct rtattr *rta, u32 *num)
{
struct crypto_attr_u32 *nu32;
@@ -856,32 +872,6 @@ int crypto_inst_setname(struct crypto_instance *inst, const char *name,
}
EXPORT_SYMBOL_GPL(crypto_inst_setname);
-void *crypto_alloc_instance(const char *name, struct crypto_alg *alg,
- unsigned int head)
-{
- struct crypto_instance *inst;
- char *p;
- int err;
-
- p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
- GFP_KERNEL);
- if (!p)
- return ERR_PTR(-ENOMEM);
-
- inst = (void *)(p + head);
-
- err = crypto_inst_setname(inst, name, alg);
- if (err)
- goto err_free_inst;
-
- return p;
-
-err_free_inst:
- kfree(p);
- return ERR_PTR(err);
-}
-EXPORT_SYMBOL_GPL(crypto_alloc_instance);
-
void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
{
INIT_LIST_HEAD(&queue->list);
@@ -1052,32 +1042,6 @@ void crypto_stats_get(struct crypto_alg *alg)
}
EXPORT_SYMBOL_GPL(crypto_stats_get);
-void crypto_stats_ablkcipher_encrypt(unsigned int nbytes, int ret,
- struct crypto_alg *alg)
-{
- if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
- atomic64_inc(&alg->stats.cipher.err_cnt);
- } else {
- atomic64_inc(&alg->stats.cipher.encrypt_cnt);
- atomic64_add(nbytes, &alg->stats.cipher.encrypt_tlen);
- }
- crypto_alg_put(alg);
-}
-EXPORT_SYMBOL_GPL(crypto_stats_ablkcipher_encrypt);
-
-void crypto_stats_ablkcipher_decrypt(unsigned int nbytes, int ret,
- struct crypto_alg *alg)
-{
- if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
- atomic64_inc(&alg->stats.cipher.err_cnt);
- } else {
- atomic64_inc(&alg->stats.cipher.decrypt_cnt);
- atomic64_add(nbytes, &alg->stats.cipher.decrypt_tlen);
- }
- crypto_alg_put(alg);
-}
-EXPORT_SYMBOL_GPL(crypto_stats_ablkcipher_decrypt);
-
void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
int ret)
{
diff --git a/crypto/algboss.c b/crypto/algboss.c
index a62149d6c839..535f1f87e6c1 100644
--- a/crypto/algboss.c
+++ b/crypto/algboss.c
@@ -58,7 +58,6 @@ static int cryptomgr_probe(void *data)
{
struct cryptomgr_param *param = data;
struct crypto_template *tmpl;
- struct crypto_instance *inst;
int err;
tmpl = crypto_lookup_template(param->template);
@@ -66,16 +65,7 @@ static int cryptomgr_probe(void *data)
goto out;
do {
- if (tmpl->create) {
- err = tmpl->create(tmpl, param->tb);
- continue;
- }
-
- inst = tmpl->alloc(param->tb);
- if (IS_ERR(inst))
- err = PTR_ERR(inst);
- else if ((err = crypto_register_instance(tmpl, inst)))
- tmpl->free(inst);
+ err = tmpl->create(tmpl, param->tb);
} while (err == -EAGAIN && !signal_pending(current));
crypto_tmpl_put(tmpl);
diff --git a/crypto/algif_skcipher.c b/crypto/algif_skcipher.c
index c1601edd70e3..e2c8ab408bed 100644
--- a/crypto/algif_skcipher.c
+++ b/crypto/algif_skcipher.c
@@ -56,7 +56,7 @@ static int _skcipher_recvmsg(struct socket *sock, struct msghdr *msg,
struct alg_sock *pask = alg_sk(psk);
struct af_alg_ctx *ctx = ask->private;
struct crypto_skcipher *tfm = pask->private;
- unsigned int bs = crypto_skcipher_blocksize(tfm);
+ unsigned int bs = crypto_skcipher_chunksize(tfm);
struct af_alg_async_req *areq;
int err = 0;
size_t len = 0;
diff --git a/crypto/anubis.c b/crypto/anubis.c
index f9ce78fde6ee..5da0241ef453 100644
--- a/crypto/anubis.c
+++ b/crypto/anubis.c
@@ -464,7 +464,6 @@ static int anubis_setkey(struct crypto_tfm *tfm, const u8 *in_key,
{
struct anubis_ctx *ctx = crypto_tfm_ctx(tfm);
const __be32 *key = (const __be32 *)in_key;
- u32 *flags = &tfm->crt_flags;
int N, R, i, r;
u32 kappa[ANUBIS_MAX_N];
u32 inter[ANUBIS_MAX_N];
@@ -474,7 +473,6 @@ static int anubis_setkey(struct crypto_tfm *tfm, const u8 *in_key,
case 32: case 36: case 40:
break;
default:
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
return -EINVAL;
}
diff --git a/crypto/api.c b/crypto/api.c
index d8ba54142620..7d71a9b10e5f 100644
--- a/crypto/api.c
+++ b/crypto/api.c
@@ -97,7 +97,7 @@ static void crypto_larval_destroy(struct crypto_alg *alg)
struct crypto_larval *larval = (void *)alg;
BUG_ON(!crypto_is_larval(alg));
- if (larval->adult)
+ if (!IS_ERR_OR_NULL(larval->adult))
crypto_mod_put(larval->adult);
kfree(larval);
}
@@ -178,6 +178,8 @@ static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
alg = ERR_PTR(-ETIMEDOUT);
else if (!alg)
alg = ERR_PTR(-ENOENT);
+ else if (IS_ERR(alg))
+ ;
else if (crypto_is_test_larval(larval) &&
!(alg->cra_flags & CRYPTO_ALG_TESTED))
alg = ERR_PTR(-EAGAIN);
@@ -295,20 +297,7 @@ static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
if (type_obj)
return type_obj->init(tfm, type, mask);
-
- switch (crypto_tfm_alg_type(tfm)) {
- case CRYPTO_ALG_TYPE_CIPHER:
- return crypto_init_cipher_ops(tfm);
-
- case CRYPTO_ALG_TYPE_COMPRESS:
- return crypto_init_compress_ops(tfm);
-
- default:
- break;
- }
-
- BUG();
- return -EINVAL;
+ return 0;
}
static void crypto_exit_ops(struct crypto_tfm *tfm)
@@ -344,13 +333,12 @@ static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
return len;
}
-void crypto_shoot_alg(struct crypto_alg *alg)
+static void crypto_shoot_alg(struct crypto_alg *alg)
{
down_write(&crypto_alg_sem);
alg->cra_flags |= CRYPTO_ALG_DYING;
up_write(&crypto_alg_sem);
}
-EXPORT_SYMBOL_GPL(crypto_shoot_alg);
struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
u32 mask)
@@ -406,7 +394,7 @@ EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
*
* The returned transform is of a non-determinate type. Most people
* should use one of the more specific allocation functions such as
- * crypto_alloc_blkcipher.
+ * crypto_alloc_skcipher().
*
* In case of error the return value is an error pointer.
*/
@@ -516,7 +504,7 @@ EXPORT_SYMBOL_GPL(crypto_find_alg);
*
* The returned transform is of a non-determinate type. Most people
* should use one of the more specific allocation functions such as
- * crypto_alloc_blkcipher.
+ * crypto_alloc_skcipher().
*
* In case of error the return value is an error pointer.
*/
@@ -608,3 +596,4 @@ EXPORT_SYMBOL_GPL(crypto_req_done);
MODULE_DESCRIPTION("Cryptographic core API");
MODULE_LICENSE("GPL");
+MODULE_SOFTDEP("pre: cryptomgr");
diff --git a/crypto/asymmetric_keys/asym_tpm.c b/crypto/asymmetric_keys/asym_tpm.c
index 76d2ce3a1b5b..378b18b9bc34 100644
--- a/crypto/asymmetric_keys/asym_tpm.c
+++ b/crypto/asymmetric_keys/asym_tpm.c
@@ -13,7 +13,7 @@
#include <crypto/sha.h>
#include <asm/unaligned.h>
#include <keys/asymmetric-subtype.h>
-#include <keys/trusted.h>
+#include <keys/trusted_tpm.h>
#include <crypto/asym_tpm_subtype.h>
#include <crypto/public_key.h>
@@ -21,10 +21,6 @@
#define TPM_ORD_LOADKEY2 65
#define TPM_ORD_UNBIND 30
#define TPM_ORD_SIGN 60
-#define TPM_LOADKEY2_SIZE 59
-#define TPM_FLUSHSPECIFIC_SIZE 18
-#define TPM_UNBIND_SIZE 63
-#define TPM_SIGN_SIZE 63
#define TPM_RT_KEY 0x00000001
@@ -68,16 +64,13 @@ static int tpm_loadkey2(struct tpm_buf *tb,
return ret;
/* build the request buffer */
- INIT_BUF(tb);
- store16(tb, TPM_TAG_RQU_AUTH1_COMMAND);
- store32(tb, TPM_LOADKEY2_SIZE + keybloblen);
- store32(tb, TPM_ORD_LOADKEY2);
- store32(tb, keyhandle);
- storebytes(tb, keyblob, keybloblen);
- store32(tb, authhandle);
- storebytes(tb, nonceodd, TPM_NONCE_SIZE);
- store8(tb, cont);
- storebytes(tb, authdata, SHA1_DIGEST_SIZE);
+ tpm_buf_reset(tb, TPM_TAG_RQU_AUTH1_COMMAND, TPM_ORD_LOADKEY2);
+ tpm_buf_append_u32(tb, keyhandle);
+ tpm_buf_append(tb, keyblob, keybloblen);
+ tpm_buf_append_u32(tb, authhandle);
+ tpm_buf_append(tb, nonceodd, TPM_NONCE_SIZE);
+ tpm_buf_append_u8(tb, cont);
+ tpm_buf_append(tb, authdata, SHA1_DIGEST_SIZE);
ret = trusted_tpm_send(tb->data, MAX_BUF_SIZE);
if (ret < 0) {
@@ -101,12 +94,9 @@ static int tpm_loadkey2(struct tpm_buf *tb,
*/
static int tpm_flushspecific(struct tpm_buf *tb, uint32_t handle)
{
- INIT_BUF(tb);
- store16(tb, TPM_TAG_RQU_COMMAND);
- store32(tb, TPM_FLUSHSPECIFIC_SIZE);
- store32(tb, TPM_ORD_FLUSHSPECIFIC);
- store32(tb, handle);
- store32(tb, TPM_RT_KEY);
+ tpm_buf_reset(tb, TPM_TAG_RQU_COMMAND, TPM_ORD_FLUSHSPECIFIC);
+ tpm_buf_append_u32(tb, handle);
+ tpm_buf_append_u32(tb, TPM_RT_KEY);
return trusted_tpm_send(tb->data, MAX_BUF_SIZE);
}
@@ -155,17 +145,14 @@ static int tpm_unbind(struct tpm_buf *tb,
return ret;
/* build the request buffer */
- INIT_BUF(tb);
- store16(tb, TPM_TAG_RQU_AUTH1_COMMAND);
- store32(tb, TPM_UNBIND_SIZE + bloblen);
- store32(tb, TPM_ORD_UNBIND);
- store32(tb, keyhandle);
- store32(tb, bloblen);
- storebytes(tb, blob, bloblen);
- store32(tb, authhandle);
- storebytes(tb, nonceodd, TPM_NONCE_SIZE);
- store8(tb, cont);
- storebytes(tb, authdata, SHA1_DIGEST_SIZE);
+ tpm_buf_reset(tb, TPM_TAG_RQU_AUTH1_COMMAND, TPM_ORD_UNBIND);
+ tpm_buf_append_u32(tb, keyhandle);
+ tpm_buf_append_u32(tb, bloblen);
+ tpm_buf_append(tb, blob, bloblen);
+ tpm_buf_append_u32(tb, authhandle);
+ tpm_buf_append(tb, nonceodd, TPM_NONCE_SIZE);
+ tpm_buf_append_u8(tb, cont);
+ tpm_buf_append(tb, authdata, SHA1_DIGEST_SIZE);
ret = trusted_tpm_send(tb->data, MAX_BUF_SIZE);
if (ret < 0) {
@@ -241,17 +228,14 @@ static int tpm_sign(struct tpm_buf *tb,
return ret;
/* build the request buffer */
- INIT_BUF(tb);
- store16(tb, TPM_TAG_RQU_AUTH1_COMMAND);
- store32(tb, TPM_SIGN_SIZE + bloblen);
- store32(tb, TPM_ORD_SIGN);
- store32(tb, keyhandle);
- store32(tb, bloblen);
- storebytes(tb, blob, bloblen);
- store32(tb, authhandle);
- storebytes(tb, nonceodd, TPM_NONCE_SIZE);
- store8(tb, cont);
- storebytes(tb, authdata, SHA1_DIGEST_SIZE);
+ tpm_buf_reset(tb, TPM_TAG_RQU_AUTH1_COMMAND, TPM_ORD_SIGN);
+ tpm_buf_append_u32(tb, keyhandle);
+ tpm_buf_append_u32(tb, bloblen);
+ tpm_buf_append(tb, blob, bloblen);
+ tpm_buf_append_u32(tb, authhandle);
+ tpm_buf_append(tb, nonceodd, TPM_NONCE_SIZE);
+ tpm_buf_append_u8(tb, cont);
+ tpm_buf_append(tb, authdata, SHA1_DIGEST_SIZE);
ret = trusted_tpm_send(tb->data, MAX_BUF_SIZE);
if (ret < 0) {
@@ -486,6 +470,7 @@ static int tpm_key_encrypt(struct tpm_key *tk,
if (ret < 0)
goto error_free_tfm;
+ ret = -ENOMEM;
req = akcipher_request_alloc(tfm, GFP_KERNEL);
if (!req)
goto error_free_tfm;
@@ -519,7 +504,7 @@ static int tpm_key_decrypt(struct tpm_key *tk,
struct kernel_pkey_params *params,
const void *in, void *out)
{
- struct tpm_buf *tb;
+ struct tpm_buf tb;
uint32_t keyhandle;
uint8_t srkauth[SHA1_DIGEST_SIZE];
uint8_t keyauth[SHA1_DIGEST_SIZE];
@@ -533,14 +518,14 @@ static int tpm_key_decrypt(struct tpm_key *tk,
if (strcmp(params->encoding, "pkcs1"))
return -ENOPKG;
- tb = kzalloc(sizeof(*tb), GFP_KERNEL);
- if (!tb)
- return -ENOMEM;
+ r = tpm_buf_init(&tb, 0, 0);
+ if (r)
+ return r;
/* TODO: Handle a non-all zero SRK authorization */
memset(srkauth, 0, sizeof(srkauth));
- r = tpm_loadkey2(tb, SRKHANDLE, srkauth,
+ r = tpm_loadkey2(&tb, SRKHANDLE, srkauth,
tk->blob, tk->blob_len, &keyhandle);
if (r < 0) {
pr_devel("loadkey2 failed (%d)\n", r);
@@ -550,16 +535,16 @@ static int tpm_key_decrypt(struct tpm_key *tk,
/* TODO: Handle a non-all zero key authorization */
memset(keyauth, 0, sizeof(keyauth));
- r = tpm_unbind(tb, keyhandle, keyauth,
+ r = tpm_unbind(&tb, keyhandle, keyauth,
in, params->in_len, out, params->out_len);
if (r < 0)
pr_devel("tpm_unbind failed (%d)\n", r);
- if (tpm_flushspecific(tb, keyhandle) < 0)
+ if (tpm_flushspecific(&tb, keyhandle) < 0)
pr_devel("flushspecific failed (%d)\n", r);
error:
- kzfree(tb);
+ tpm_buf_destroy(&tb);
pr_devel("<==%s() = %d\n", __func__, r);
return r;
}
@@ -643,7 +628,7 @@ static int tpm_key_sign(struct tpm_key *tk,
struct kernel_pkey_params *params,
const void *in, void *out)
{
- struct tpm_buf *tb;
+ struct tpm_buf tb;
uint32_t keyhandle;
uint8_t srkauth[SHA1_DIGEST_SIZE];
uint8_t keyauth[SHA1_DIGEST_SIZE];
@@ -681,15 +666,14 @@ static int tpm_key_sign(struct tpm_key *tk,
goto error_free_asn1_wrapped;
}
- r = -ENOMEM;
- tb = kzalloc(sizeof(*tb), GFP_KERNEL);
- if (!tb)
+ r = tpm_buf_init(&tb, 0, 0);
+ if (r)
goto error_free_asn1_wrapped;
/* TODO: Handle a non-all zero SRK authorization */
memset(srkauth, 0, sizeof(srkauth));
- r = tpm_loadkey2(tb, SRKHANDLE, srkauth,
+ r = tpm_loadkey2(&tb, SRKHANDLE, srkauth,
tk->blob, tk->blob_len, &keyhandle);
if (r < 0) {
pr_devel("loadkey2 failed (%d)\n", r);
@@ -699,15 +683,15 @@ static int tpm_key_sign(struct tpm_key *tk,
/* TODO: Handle a non-all zero key authorization */
memset(keyauth, 0, sizeof(keyauth));
- r = tpm_sign(tb, keyhandle, keyauth, in, in_len, out, params->out_len);
+ r = tpm_sign(&tb, keyhandle, keyauth, in, in_len, out, params->out_len);
if (r < 0)
pr_devel("tpm_sign failed (%d)\n", r);
- if (tpm_flushspecific(tb, keyhandle) < 0)
+ if (tpm_flushspecific(&tb, keyhandle) < 0)
pr_devel("flushspecific failed (%d)\n", r);
error_free_tb:
- kzfree(tb);
+ tpm_buf_destroy(&tb);
error_free_asn1_wrapped:
kfree(asn1_wrapped);
pr_devel("<==%s() = %d\n", __func__, r);
diff --git a/crypto/asymmetric_keys/pkcs7_verify.c b/crypto/asymmetric_keys/pkcs7_verify.c
index 11bee67fa9cc..ce49820caa97 100644
--- a/crypto/asymmetric_keys/pkcs7_verify.c
+++ b/crypto/asymmetric_keys/pkcs7_verify.c
@@ -12,6 +12,7 @@
#include <linux/err.h>
#include <linux/asn1.h>
#include <crypto/hash.h>
+#include <crypto/hash_info.h>
#include <crypto/public_key.h>
#include "pkcs7_parser.h"
@@ -29,6 +30,10 @@ static int pkcs7_digest(struct pkcs7_message *pkcs7,
kenter(",%u,%s", sinfo->index, sinfo->sig->hash_algo);
+ /* The digest was calculated already. */
+ if (sig->digest)
+ return 0;
+
if (!sinfo->sig->hash_algo)
return -ENOPKG;
@@ -117,6 +122,34 @@ error_no_desc:
return ret;
}
+int pkcs7_get_digest(struct pkcs7_message *pkcs7, const u8 **buf, u32 *len,
+ enum hash_algo *hash_algo)
+{
+ struct pkcs7_signed_info *sinfo = pkcs7->signed_infos;
+ int i, ret;
+
+ /*
+ * This function doesn't support messages with more than one signature.
+ */
+ if (sinfo == NULL || sinfo->next != NULL)
+ return -EBADMSG;
+
+ ret = pkcs7_digest(pkcs7, sinfo);
+ if (ret)
+ return ret;
+
+ *buf = sinfo->sig->digest;
+ *len = sinfo->sig->digest_size;
+
+ for (i = 0; i < HASH_ALGO__LAST; i++)
+ if (!strcmp(hash_algo_name[i], sinfo->sig->hash_algo)) {
+ *hash_algo = i;
+ break;
+ }
+
+ return 0;
+}
+
/*
* Find the key (X.509 certificate) to use to verify a PKCS#7 message. PKCS#7
* uses the issuer's name and the issuing certificate serial number for
diff --git a/crypto/asymmetric_keys/public_key.c b/crypto/asymmetric_keys/public_key.c
index 364b9df9d631..d7f43d4ea925 100644
--- a/crypto/asymmetric_keys/public_key.c
+++ b/crypto/asymmetric_keys/public_key.c
@@ -184,6 +184,7 @@ static int software_key_eds_op(struct kernel_pkey_params *params,
if (IS_ERR(tfm))
return PTR_ERR(tfm);
+ ret = -ENOMEM;
req = akcipher_request_alloc(tfm, GFP_KERNEL);
if (!req)
goto error_free_tfm;
diff --git a/crypto/asymmetric_keys/verify_pefile.c b/crypto/asymmetric_keys/verify_pefile.c
index 3b303fe2f061..cc9dbcecaaca 100644
--- a/crypto/asymmetric_keys/verify_pefile.c
+++ b/crypto/asymmetric_keys/verify_pefile.c
@@ -96,7 +96,7 @@ static int pefile_parse_binary(const void *pebuf, unsigned int pelen,
if (!ddir->certs.virtual_address || !ddir->certs.size) {
pr_debug("Unsigned PE binary\n");
- return -EKEYREJECTED;
+ return -ENODATA;
}
chkaddr(ctx->header_size, ddir->certs.virtual_address,
@@ -403,6 +403,8 @@ error_no_desc:
* (*) 0 if at least one signature chain intersects with the keys in the trust
* keyring, or:
*
+ * (*) -ENODATA if there is no signature present.
+ *
* (*) -ENOPKG if a suitable crypto module couldn't be found for a check on a
* chain.
*
diff --git a/crypto/authenc.c b/crypto/authenc.c
index 3f0ed9402582..775e7138fd10 100644
--- a/crypto/authenc.c
+++ b/crypto/authenc.c
@@ -91,15 +91,12 @@ static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
int err = -EINVAL;
if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
- goto badkey;
+ goto out;
crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc) &
CRYPTO_TFM_REQ_MASK);
err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen);
- crypto_aead_set_flags(authenc, crypto_ahash_get_flags(auth) &
- CRYPTO_TFM_RES_MASK);
-
if (err)
goto out;
@@ -107,16 +104,9 @@ static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
crypto_skcipher_set_flags(enc, crypto_aead_get_flags(authenc) &
CRYPTO_TFM_REQ_MASK);
err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen);
- crypto_aead_set_flags(authenc, crypto_skcipher_get_flags(enc) &
- CRYPTO_TFM_RES_MASK);
-
out:
memzero_explicit(&keys, sizeof(keys));
return err;
-
-badkey:
- crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
- goto out;
}
static void authenc_geniv_ahash_done(struct crypto_async_request *areq, int err)
@@ -383,12 +373,12 @@ static int crypto_authenc_create(struct crypto_template *tmpl,
struct rtattr **tb)
{
struct crypto_attr_type *algt;
+ u32 mask;
struct aead_instance *inst;
+ struct authenc_instance_ctx *ctx;
struct hash_alg_common *auth;
struct crypto_alg *auth_base;
struct skcipher_alg *enc;
- struct authenc_instance_ctx *ctx;
- const char *enc_name;
int err;
algt = crypto_get_attr_type(tb);
@@ -398,38 +388,24 @@ static int crypto_authenc_create(struct crypto_template *tmpl,
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;
- auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
- CRYPTO_ALG_TYPE_AHASH_MASK |
- crypto_requires_sync(algt->type, algt->mask));
- if (IS_ERR(auth))
- return PTR_ERR(auth);
-
- auth_base = &auth->base;
-
- enc_name = crypto_attr_alg_name(tb[2]);
- err = PTR_ERR(enc_name);
- if (IS_ERR(enc_name))
- goto out_put_auth;
+ mask = crypto_requires_sync(algt->type, algt->mask);
inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
- err = -ENOMEM;
if (!inst)
- goto out_put_auth;
-
+ return -ENOMEM;
ctx = aead_instance_ctx(inst);
- err = crypto_init_ahash_spawn(&ctx->auth, auth,
- aead_crypto_instance(inst));
+ err = crypto_grab_ahash(&ctx->auth, aead_crypto_instance(inst),
+ crypto_attr_alg_name(tb[1]), 0, mask);
if (err)
goto err_free_inst;
+ auth = crypto_spawn_ahash_alg(&ctx->auth);
+ auth_base = &auth->base;
- crypto_set_skcipher_spawn(&ctx->enc, aead_crypto_instance(inst));
- err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
- crypto_requires_sync(algt->type,
- algt->mask));
+ err = crypto_grab_skcipher(&ctx->enc, aead_crypto_instance(inst),
+ crypto_attr_alg_name(tb[2]), 0, mask);
if (err)
- goto err_drop_auth;
-
+ goto err_free_inst;
enc = crypto_spawn_skcipher_alg(&ctx->enc);
ctx->reqoff = ALIGN(2 * auth->digestsize + auth_base->cra_alignmask,
@@ -440,12 +416,12 @@ static int crypto_authenc_create(struct crypto_template *tmpl,
"authenc(%s,%s)", auth_base->cra_name,
enc->base.cra_name) >=
CRYPTO_MAX_ALG_NAME)
- goto err_drop_enc;
+ goto err_free_inst;
if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
"authenc(%s,%s)", auth_base->cra_driver_name,
enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
- goto err_drop_enc;
+ goto err_free_inst;
inst->alg.base.cra_flags = (auth_base->cra_flags |
enc->base.cra_flags) & CRYPTO_ALG_ASYNC;
@@ -470,21 +446,11 @@ static int crypto_authenc_create(struct crypto_template *tmpl,
inst->free = crypto_authenc_free;
err = aead_register_instance(tmpl, inst);
- if (err)
- goto err_drop_enc;
-
-out:
- crypto_mod_put(auth_base);
- return err;
-
-err_drop_enc:
- crypto_drop_skcipher(&ctx->enc);
-err_drop_auth:
- crypto_drop_ahash(&ctx->auth);
+ if (err) {
err_free_inst:
- kfree(inst);
-out_put_auth:
- goto out;
+ crypto_authenc_free(inst);
+ }
+ return err;
}
static struct crypto_template crypto_authenc_tmpl = {
diff --git a/crypto/authencesn.c b/crypto/authencesn.c
index adb7554fca29..589008146fce 100644
--- a/crypto/authencesn.c
+++ b/crypto/authencesn.c
@@ -65,15 +65,12 @@ static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *
int err = -EINVAL;
if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
- goto badkey;
+ goto out;
crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc_esn) &
CRYPTO_TFM_REQ_MASK);
err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen);
- crypto_aead_set_flags(authenc_esn, crypto_ahash_get_flags(auth) &
- CRYPTO_TFM_RES_MASK);
-
if (err)
goto out;
@@ -81,16 +78,9 @@ static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *
crypto_skcipher_set_flags(enc, crypto_aead_get_flags(authenc_esn) &
CRYPTO_TFM_REQ_MASK);
err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen);
- crypto_aead_set_flags(authenc_esn, crypto_skcipher_get_flags(enc) &
- CRYPTO_TFM_RES_MASK);
-
out:
memzero_explicit(&keys, sizeof(keys));
return err;
-
-badkey:
- crypto_aead_set_flags(authenc_esn, CRYPTO_TFM_RES_BAD_KEY_LEN);
- goto out;
}
static int crypto_authenc_esn_genicv_tail(struct aead_request *req,
@@ -401,12 +391,12 @@ static int crypto_authenc_esn_create(struct crypto_template *tmpl,
struct rtattr **tb)
{
struct crypto_attr_type *algt;
+ u32 mask;
struct aead_instance *inst;
+ struct authenc_esn_instance_ctx *ctx;
struct hash_alg_common *auth;
struct crypto_alg *auth_base;
struct skcipher_alg *enc;
- struct authenc_esn_instance_ctx *ctx;
- const char *enc_name;
int err;
algt = crypto_get_attr_type(tb);
@@ -416,50 +406,36 @@ static int crypto_authenc_esn_create(struct crypto_template *tmpl,
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;
- auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
- CRYPTO_ALG_TYPE_AHASH_MASK |
- crypto_requires_sync(algt->type, algt->mask));
- if (IS_ERR(auth))
- return PTR_ERR(auth);
-
- auth_base = &auth->base;
-
- enc_name = crypto_attr_alg_name(tb[2]);
- err = PTR_ERR(enc_name);
- if (IS_ERR(enc_name))
- goto out_put_auth;
+ mask = crypto_requires_sync(algt->type, algt->mask);
inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
- err = -ENOMEM;
if (!inst)
- goto out_put_auth;
-
+ return -ENOMEM;
ctx = aead_instance_ctx(inst);
- err = crypto_init_ahash_spawn(&ctx->auth, auth,
- aead_crypto_instance(inst));
+ err = crypto_grab_ahash(&ctx->auth, aead_crypto_instance(inst),
+ crypto_attr_alg_name(tb[1]), 0, mask);
if (err)
goto err_free_inst;
+ auth = crypto_spawn_ahash_alg(&ctx->auth);
+ auth_base = &auth->base;
- crypto_set_skcipher_spawn(&ctx->enc, aead_crypto_instance(inst));
- err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
- crypto_requires_sync(algt->type,
- algt->mask));
+ err = crypto_grab_skcipher(&ctx->enc, aead_crypto_instance(inst),
+ crypto_attr_alg_name(tb[2]), 0, mask);
if (err)
- goto err_drop_auth;
-
+ goto err_free_inst;
enc = crypto_spawn_skcipher_alg(&ctx->enc);
err = -ENAMETOOLONG;
if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
"authencesn(%s,%s)", auth_base->cra_name,
enc->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
- goto err_drop_enc;
+ goto err_free_inst;
if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
"authencesn(%s,%s)", auth_base->cra_driver_name,
enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
- goto err_drop_enc;
+ goto err_free_inst;
inst->alg.base.cra_flags = (auth_base->cra_flags |
enc->base.cra_flags) & CRYPTO_ALG_ASYNC;
@@ -485,21 +461,11 @@ static int crypto_authenc_esn_create(struct crypto_template *tmpl,
inst->free = crypto_authenc_esn_free,
err = aead_register_instance(tmpl, inst);
- if (err)
- goto err_drop_enc;
-
-out:
- crypto_mod_put(auth_base);
- return err;
-
-err_drop_enc:
- crypto_drop_skcipher(&ctx->enc);
-err_drop_auth:
- crypto_drop_ahash(&ctx->auth);
+ if (err) {
err_free_inst:
- kfree(inst);
-out_put_auth:
- goto out;
+ crypto_authenc_esn_free(inst);
+ }
+ return err;
}
static struct crypto_template crypto_authenc_esn_tmpl = {
diff --git a/crypto/blake2b_generic.c b/crypto/blake2b_generic.c
new file mode 100644
index 000000000000..1d262374fa4e
--- /dev/null
+++ b/crypto/blake2b_generic.c
@@ -0,0 +1,318 @@
+// SPDX-License-Identifier: (GPL-2.0-only OR Apache-2.0)
+/*
+ * BLAKE2b reference source code package - reference C implementations
+ *
+ * Copyright 2012, Samuel Neves <sneves@dei.uc.pt>. You may use this under the
+ * terms of the CC0, the OpenSSL Licence, or the Apache Public License 2.0, at
+ * your option. The terms of these licenses can be found at:
+ *
+ * - CC0 1.0 Universal : http://creativecommons.org/publicdomain/zero/1.0
+ * - OpenSSL license : https://www.openssl.org/source/license.html
+ * - Apache 2.0 : http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * More information about the BLAKE2 hash function can be found at
+ * https://blake2.net.
+ *
+ * Note: the original sources have been modified for inclusion in linux kernel
+ * in terms of coding style, using generic helpers and simplifications of error
+ * handling.
+ */
+
+#include <asm/unaligned.h>
+#include <linux/module.h>
+#include <linux/string.h>
+#include <linux/kernel.h>
+#include <linux/bitops.h>
+#include <crypto/internal/hash.h>
+
+#define BLAKE2B_160_DIGEST_SIZE (160 / 8)
+#define BLAKE2B_256_DIGEST_SIZE (256 / 8)
+#define BLAKE2B_384_DIGEST_SIZE (384 / 8)
+#define BLAKE2B_512_DIGEST_SIZE (512 / 8)
+
+enum blake2b_constant {
+ BLAKE2B_BLOCKBYTES = 128,
+ BLAKE2B_KEYBYTES = 64,
+};
+
+struct blake2b_state {
+ u64 h[8];
+ u64 t[2];
+ u64 f[2];
+ u8 buf[BLAKE2B_BLOCKBYTES];
+ size_t buflen;
+};
+
+static const u64 blake2b_IV[8] = {
+ 0x6a09e667f3bcc908ULL, 0xbb67ae8584caa73bULL,
+ 0x3c6ef372fe94f82bULL, 0xa54ff53a5f1d36f1ULL,
+ 0x510e527fade682d1ULL, 0x9b05688c2b3e6c1fULL,
+ 0x1f83d9abfb41bd6bULL, 0x5be0cd19137e2179ULL
+};
+
+static const u8 blake2b_sigma[12][16] = {
+ { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
+ { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
+ { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
+ { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
+ { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
+ { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
+ { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
+ { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
+ { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
+ { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
+ { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
+ { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 }
+};
+
+static void blake2b_increment_counter(struct blake2b_state *S, const u64 inc)
+{
+ S->t[0] += inc;
+ S->t[1] += (S->t[0] < inc);
+}
+
+#define G(r,i,a,b,c,d) \
+ do { \
+ a = a + b + m[blake2b_sigma[r][2*i+0]]; \
+ d = ror64(d ^ a, 32); \
+ c = c + d; \
+ b = ror64(b ^ c, 24); \
+ a = a + b + m[blake2b_sigma[r][2*i+1]]; \
+ d = ror64(d ^ a, 16); \
+ c = c + d; \
+ b = ror64(b ^ c, 63); \
+ } while (0)
+
+#define ROUND(r) \
+ do { \
+ G(r,0,v[ 0],v[ 4],v[ 8],v[12]); \
+ G(r,1,v[ 1],v[ 5],v[ 9],v[13]); \
+ G(r,2,v[ 2],v[ 6],v[10],v[14]); \
+ G(r,3,v[ 3],v[ 7],v[11],v[15]); \
+ G(r,4,v[ 0],v[ 5],v[10],v[15]); \
+ G(r,5,v[ 1],v[ 6],v[11],v[12]); \
+ G(r,6,v[ 2],v[ 7],v[ 8],v[13]); \
+ G(r,7,v[ 3],v[ 4],v[ 9],v[14]); \
+ } while (0)
+
+static void blake2b_compress(struct blake2b_state *S,
+ const u8 block[BLAKE2B_BLOCKBYTES])
+{
+ u64 m[16];
+ u64 v[16];
+ size_t i;
+
+ for (i = 0; i < 16; ++i)
+ m[i] = get_unaligned_le64(block + i * sizeof(m[i]));
+
+ for (i = 0; i < 8; ++i)
+ v[i] = S->h[i];
+
+ v[ 8] = blake2b_IV[0];
+ v[ 9] = blake2b_IV[1];
+ v[10] = blake2b_IV[2];
+ v[11] = blake2b_IV[3];
+ v[12] = blake2b_IV[4] ^ S->t[0];
+ v[13] = blake2b_IV[5] ^ S->t[1];
+ v[14] = blake2b_IV[6] ^ S->f[0];
+ v[15] = blake2b_IV[7] ^ S->f[1];
+
+ ROUND(0);
+ ROUND(1);
+ ROUND(2);
+ ROUND(3);
+ ROUND(4);
+ ROUND(5);
+ ROUND(6);
+ ROUND(7);
+ ROUND(8);
+ ROUND(9);
+ ROUND(10);
+ ROUND(11);
+
+ for (i = 0; i < 8; ++i)
+ S->h[i] = S->h[i] ^ v[i] ^ v[i + 8];
+}
+
+#undef G
+#undef ROUND
+
+struct blake2b_tfm_ctx {
+ u8 key[BLAKE2B_KEYBYTES];
+ unsigned int keylen;
+};
+
+static int blake2b_setkey(struct crypto_shash *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ struct blake2b_tfm_ctx *tctx = crypto_shash_ctx(tfm);
+
+ if (keylen == 0 || keylen > BLAKE2B_KEYBYTES)
+ return -EINVAL;
+
+ memcpy(tctx->key, key, keylen);
+ tctx->keylen = keylen;
+
+ return 0;
+}
+
+static int blake2b_init(struct shash_desc *desc)
+{
+ struct blake2b_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
+ struct blake2b_state *state = shash_desc_ctx(desc);
+ const int digestsize = crypto_shash_digestsize(desc->tfm);
+
+ memset(state, 0, sizeof(*state));
+ memcpy(state->h, blake2b_IV, sizeof(state->h));
+
+ /* Parameter block is all zeros except index 0, no xor for 1..7 */
+ state->h[0] ^= 0x01010000 | tctx->keylen << 8 | digestsize;
+
+ if (tctx->keylen) {
+ /*
+ * Prefill the buffer with the key, next call to _update or
+ * _final will process it
+ */
+ memcpy(state->buf, tctx->key, tctx->keylen);
+ state->buflen = BLAKE2B_BLOCKBYTES;
+ }
+ return 0;
+}
+
+static int blake2b_update(struct shash_desc *desc, const u8 *in,
+ unsigned int inlen)
+{
+ struct blake2b_state *state = shash_desc_ctx(desc);
+ const size_t left = state->buflen;
+ const size_t fill = BLAKE2B_BLOCKBYTES - left;
+
+ if (!inlen)
+ return 0;
+
+ if (inlen > fill) {
+ state->buflen = 0;
+ /* Fill buffer */
+ memcpy(state->buf + left, in, fill);
+ blake2b_increment_counter(state, BLAKE2B_BLOCKBYTES);
+ /* Compress */
+ blake2b_compress(state, state->buf);
+ in += fill;
+ inlen -= fill;
+ while (inlen > BLAKE2B_BLOCKBYTES) {
+ blake2b_increment_counter(state, BLAKE2B_BLOCKBYTES);
+ blake2b_compress(state, in);
+ in += BLAKE2B_BLOCKBYTES;
+ inlen -= BLAKE2B_BLOCKBYTES;
+ }
+ }
+ memcpy(state->buf + state->buflen, in, inlen);
+ state->buflen += inlen;
+
+ return 0;
+}
+
+static int blake2b_final(struct shash_desc *desc, u8 *out)
+{
+ struct blake2b_state *state = shash_desc_ctx(desc);
+ const int digestsize = crypto_shash_digestsize(desc->tfm);
+ size_t i;
+
+ blake2b_increment_counter(state, state->buflen);
+ /* Set last block */
+ state->f[0] = (u64)-1;
+ /* Padding */
+ memset(state->buf + state->buflen, 0, BLAKE2B_BLOCKBYTES - state->buflen);
+ blake2b_compress(state, state->buf);
+
+ /* Avoid temporary buffer and switch the internal output to LE order */
+ for (i = 0; i < ARRAY_SIZE(state->h); i++)
+ __cpu_to_le64s(&state->h[i]);
+
+ memcpy(out, state->h, digestsize);
+ return 0;
+}
+
+static struct shash_alg blake2b_algs[] = {
+ {
+ .base.cra_name = "blake2b-160",
+ .base.cra_driver_name = "blake2b-160-generic",
+ .base.cra_priority = 100,
+ .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
+ .base.cra_blocksize = BLAKE2B_BLOCKBYTES,
+ .base.cra_ctxsize = sizeof(struct blake2b_tfm_ctx),
+ .base.cra_module = THIS_MODULE,
+ .digestsize = BLAKE2B_160_DIGEST_SIZE,
+ .setkey = blake2b_setkey,
+ .init = blake2b_init,
+ .update = blake2b_update,
+ .final = blake2b_final,
+ .descsize = sizeof(struct blake2b_state),
+ }, {
+ .base.cra_name = "blake2b-256",
+ .base.cra_driver_name = "blake2b-256-generic",
+ .base.cra_priority = 100,
+ .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
+ .base.cra_blocksize = BLAKE2B_BLOCKBYTES,
+ .base.cra_ctxsize = sizeof(struct blake2b_tfm_ctx),
+ .base.cra_module = THIS_MODULE,
+ .digestsize = BLAKE2B_256_DIGEST_SIZE,
+ .setkey = blake2b_setkey,
+ .init = blake2b_init,
+ .update = blake2b_update,
+ .final = blake2b_final,
+ .descsize = sizeof(struct blake2b_state),
+ }, {
+ .base.cra_name = "blake2b-384",
+ .base.cra_driver_name = "blake2b-384-generic",
+ .base.cra_priority = 100,
+ .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
+ .base.cra_blocksize = BLAKE2B_BLOCKBYTES,
+ .base.cra_ctxsize = sizeof(struct blake2b_tfm_ctx),
+ .base.cra_module = THIS_MODULE,
+ .digestsize = BLAKE2B_384_DIGEST_SIZE,
+ .setkey = blake2b_setkey,
+ .init = blake2b_init,
+ .update = blake2b_update,
+ .final = blake2b_final,
+ .descsize = sizeof(struct blake2b_state),
+ }, {
+ .base.cra_name = "blake2b-512",
+ .base.cra_driver_name = "blake2b-512-generic",
+ .base.cra_priority = 100,
+ .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
+ .base.cra_blocksize = BLAKE2B_BLOCKBYTES,
+ .base.cra_ctxsize = sizeof(struct blake2b_tfm_ctx),
+ .base.cra_module = THIS_MODULE,
+ .digestsize = BLAKE2B_512_DIGEST_SIZE,
+ .setkey = blake2b_setkey,
+ .init = blake2b_init,
+ .update = blake2b_update,
+ .final = blake2b_final,
+ .descsize = sizeof(struct blake2b_state),
+ }
+};
+
+static int __init blake2b_mod_init(void)
+{
+ return crypto_register_shashes(blake2b_algs, ARRAY_SIZE(blake2b_algs));
+}
+
+static void __exit blake2b_mod_fini(void)
+{
+ crypto_unregister_shashes(blake2b_algs, ARRAY_SIZE(blake2b_algs));
+}
+
+subsys_initcall(blake2b_mod_init);
+module_exit(blake2b_mod_fini);
+
+MODULE_AUTHOR("David Sterba <kdave@kernel.org>");
+MODULE_DESCRIPTION("BLAKE2b generic implementation");
+MODULE_LICENSE("GPL");
+MODULE_ALIAS_CRYPTO("blake2b-160");
+MODULE_ALIAS_CRYPTO("blake2b-160-generic");
+MODULE_ALIAS_CRYPTO("blake2b-256");
+MODULE_ALIAS_CRYPTO("blake2b-256-generic");
+MODULE_ALIAS_CRYPTO("blake2b-384");
+MODULE_ALIAS_CRYPTO("blake2b-384-generic");
+MODULE_ALIAS_CRYPTO("blake2b-512");
+MODULE_ALIAS_CRYPTO("blake2b-512-generic");
diff --git a/crypto/blake2s_generic.c b/crypto/blake2s_generic.c
new file mode 100644
index 000000000000..005783ff45ad
--- /dev/null
+++ b/crypto/blake2s_generic.c
@@ -0,0 +1,169 @@
+// SPDX-License-Identifier: GPL-2.0 OR MIT
+/*
+ * Copyright (C) 2015-2019 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
+ */
+
+#include <crypto/internal/blake2s.h>
+#include <crypto/internal/simd.h>
+#include <crypto/internal/hash.h>
+
+#include <linux/types.h>
+#include <linux/jump_label.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+
+static int crypto_blake2s_setkey(struct crypto_shash *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(tfm);
+
+ if (keylen == 0 || keylen > BLAKE2S_KEY_SIZE)
+ return -EINVAL;
+
+ memcpy(tctx->key, key, keylen);
+ tctx->keylen = keylen;
+
+ return 0;
+}
+
+static int crypto_blake2s_init(struct shash_desc *desc)
+{
+ struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
+ struct blake2s_state *state = shash_desc_ctx(desc);
+ const int outlen = crypto_shash_digestsize(desc->tfm);
+
+ if (tctx->keylen)
+ blake2s_init_key(state, outlen, tctx->key, tctx->keylen);
+ else
+ blake2s_init(state, outlen);
+
+ return 0;
+}
+
+static int crypto_blake2s_update(struct shash_desc *desc, const u8 *in,
+ unsigned int inlen)
+{
+ struct blake2s_state *state = shash_desc_ctx(desc);
+ const size_t fill = BLAKE2S_BLOCK_SIZE - state->buflen;
+
+ if (unlikely(!inlen))
+ return 0;
+ if (inlen > fill) {
+ memcpy(state->buf + state->buflen, in, fill);
+ blake2s_compress_generic(state, state->buf, 1, BLAKE2S_BLOCK_SIZE);
+ state->buflen = 0;
+ in += fill;
+ inlen -= fill;
+ }
+ if (inlen > BLAKE2S_BLOCK_SIZE) {
+ const size_t nblocks = DIV_ROUND_UP(inlen, BLAKE2S_BLOCK_SIZE);
+ /* Hash one less (full) block than strictly possible */
+ blake2s_compress_generic(state, in, nblocks - 1, BLAKE2S_BLOCK_SIZE);
+ in += BLAKE2S_BLOCK_SIZE * (nblocks - 1);
+ inlen -= BLAKE2S_BLOCK_SIZE * (nblocks - 1);
+ }
+ memcpy(state->buf + state->buflen, in, inlen);
+ state->buflen += inlen;
+
+ return 0;
+}
+
+static int crypto_blake2s_final(struct shash_desc *desc, u8 *out)
+{
+ struct blake2s_state *state = shash_desc_ctx(desc);
+
+ blake2s_set_lastblock(state);
+ memset(state->buf + state->buflen, 0,
+ BLAKE2S_BLOCK_SIZE - state->buflen); /* Padding */
+ blake2s_compress_generic(state, state->buf, 1, state->buflen);
+ cpu_to_le32_array(state->h, ARRAY_SIZE(state->h));
+ memcpy(out, state->h, state->outlen);
+ memzero_explicit(state, sizeof(*state));
+
+ return 0;
+}
+
+static struct shash_alg blake2s_algs[] = {{
+ .base.cra_name = "blake2s-128",
+ .base.cra_driver_name = "blake2s-128-generic",
+ .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
+ .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx),
+ .base.cra_priority = 200,
+ .base.cra_blocksize = BLAKE2S_BLOCK_SIZE,
+ .base.cra_module = THIS_MODULE,
+
+ .digestsize = BLAKE2S_128_HASH_SIZE,
+ .setkey = crypto_blake2s_setkey,
+ .init = crypto_blake2s_init,
+ .update = crypto_blake2s_update,
+ .final = crypto_blake2s_final,
+ .descsize = sizeof(struct blake2s_state),
+}, {
+ .base.cra_name = "blake2s-160",
+ .base.cra_driver_name = "blake2s-160-generic",
+ .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
+ .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx),
+ .base.cra_priority = 200,
+ .base.cra_blocksize = BLAKE2S_BLOCK_SIZE,
+ .base.cra_module = THIS_MODULE,
+
+ .digestsize = BLAKE2S_160_HASH_SIZE,
+ .setkey = crypto_blake2s_setkey,
+ .init = crypto_blake2s_init,
+ .update = crypto_blake2s_update,
+ .final = crypto_blake2s_final,
+ .descsize = sizeof(struct blake2s_state),
+}, {
+ .base.cra_name = "blake2s-224",
+ .base.cra_driver_name = "blake2s-224-generic",
+ .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
+ .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx),
+ .base.cra_priority = 200,
+ .base.cra_blocksize = BLAKE2S_BLOCK_SIZE,
+ .base.cra_module = THIS_MODULE,
+
+ .digestsize = BLAKE2S_224_HASH_SIZE,
+ .setkey = crypto_blake2s_setkey,
+ .init = crypto_blake2s_init,
+ .update = crypto_blake2s_update,
+ .final = crypto_blake2s_final,
+ .descsize = sizeof(struct blake2s_state),
+}, {
+ .base.cra_name = "blake2s-256",
+ .base.cra_driver_name = "blake2s-256-generic",
+ .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
+ .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx),
+ .base.cra_priority = 200,
+ .base.cra_blocksize = BLAKE2S_BLOCK_SIZE,
+ .base.cra_module = THIS_MODULE,
+
+ .digestsize = BLAKE2S_256_HASH_SIZE,
+ .setkey = crypto_blake2s_setkey,
+ .init = crypto_blake2s_init,
+ .update = crypto_blake2s_update,
+ .final = crypto_blake2s_final,
+ .descsize = sizeof(struct blake2s_state),
+}};
+
+static int __init blake2s_mod_init(void)
+{
+ return crypto_register_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs));
+}
+
+static void __exit blake2s_mod_exit(void)
+{
+ crypto_unregister_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs));
+}
+
+subsys_initcall(blake2s_mod_init);
+module_exit(blake2s_mod_exit);
+
+MODULE_ALIAS_CRYPTO("blake2s-128");
+MODULE_ALIAS_CRYPTO("blake2s-128-generic");
+MODULE_ALIAS_CRYPTO("blake2s-160");
+MODULE_ALIAS_CRYPTO("blake2s-160-generic");
+MODULE_ALIAS_CRYPTO("blake2s-224");
+MODULE_ALIAS_CRYPTO("blake2s-224-generic");
+MODULE_ALIAS_CRYPTO("blake2s-256");
+MODULE_ALIAS_CRYPTO("blake2s-256-generic");
+MODULE_LICENSE("GPL v2");
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c
deleted file mode 100644
index 48a33817de11..000000000000
--- a/crypto/blkcipher.c
+++ /dev/null
@@ -1,548 +0,0 @@
-// SPDX-License-Identifier: GPL-2.0-or-later
-/*
- * Block chaining cipher operations.
- *
- * Generic encrypt/decrypt wrapper for ciphers, handles operations across
- * multiple page boundaries by using temporary blocks. In user context,
- * the kernel is given a chance to schedule us once per page.
- *
- * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
- */
-
-#include <crypto/aead.h>
-#include <crypto/internal/skcipher.h>
-#include <crypto/scatterwalk.h>
-#include <linux/errno.h>
-#include <linux/kernel.h>
-#include <linux/module.h>
-#include <linux/seq_file.h>
-#include <linux/slab.h>
-#include <linux/string.h>
-#include <linux/cryptouser.h>
-#include <linux/compiler.h>
-#include <net/netlink.h>
-
-#include "internal.h"
-
-enum {
- BLKCIPHER_WALK_PHYS = 1 << 0,
- BLKCIPHER_WALK_SLOW = 1 << 1,
- BLKCIPHER_WALK_COPY = 1 << 2,
- BLKCIPHER_WALK_DIFF = 1 << 3,
-};
-
-static int blkcipher_walk_next(struct blkcipher_desc *desc,
- struct blkcipher_walk *walk);
-static int blkcipher_walk_first(struct blkcipher_desc *desc,
- struct blkcipher_walk *walk);
-
-static inline void blkcipher_map_src(struct blkcipher_walk *walk)
-{
- walk->src.virt.addr = scatterwalk_map(&walk->in);
-}
-
-static inline void blkcipher_map_dst(struct blkcipher_walk *walk)
-{
- walk->dst.virt.addr = scatterwalk_map(&walk->out);
-}
-
-static inline void blkcipher_unmap_src(struct blkcipher_walk *walk)
-{
- scatterwalk_unmap(walk->src.virt.addr);
-}
-
-static inline void blkcipher_unmap_dst(struct blkcipher_walk *walk)
-{
- scatterwalk_unmap(walk->dst.virt.addr);
-}
-
-/* Get a spot of the specified length that does not straddle a page.
- * The caller needs to ensure that there is enough space for this operation.
- */
-static inline u8 *blkcipher_get_spot(u8 *start, unsigned int len)
-{
- u8 *end_page = (u8 *)(((unsigned long)(start + len - 1)) & PAGE_MASK);
- return max(start, end_page);
-}
-
-static inline void blkcipher_done_slow(struct blkcipher_walk *walk,
- unsigned int bsize)
-{
- u8 *addr;
-
- addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1);
- addr = blkcipher_get_spot(addr, bsize);
- scatterwalk_copychunks(addr, &walk->out, bsize, 1);
-}
-
-static inline void blkcipher_done_fast(struct blkcipher_walk *walk,
- unsigned int n)
-{
- if (walk->flags & BLKCIPHER_WALK_COPY) {
- blkcipher_map_dst(walk);
- memcpy(walk->dst.virt.addr, walk->page, n);
- blkcipher_unmap_dst(walk);
- } else if (!(walk->flags & BLKCIPHER_WALK_PHYS)) {
- if (walk->flags & BLKCIPHER_WALK_DIFF)
- blkcipher_unmap_dst(walk);
- blkcipher_unmap_src(walk);
- }
-
- scatterwalk_advance(&walk->in, n);
- scatterwalk_advance(&walk->out, n);
-}
-
-int blkcipher_walk_done(struct blkcipher_desc *desc,
- struct blkcipher_walk *walk, int err)
-{
- unsigned int n; /* bytes processed */
- bool more;
-
- if (unlikely(err < 0))
- goto finish;
-
- n = walk->nbytes - err;
- walk->total -= n;
- more = (walk->total != 0);
-
- if (likely(!(walk->flags & BLKCIPHER_WALK_SLOW))) {
- blkcipher_done_fast(walk, n);
- } else {
- if (WARN_ON(err)) {
- /* unexpected case; didn't process all bytes */
- err = -EINVAL;
- goto finish;
- }
- blkcipher_done_slow(walk, n);
- }
-
- scatterwalk_done(&walk->in, 0, more);
- scatterwalk_done(&walk->out, 1, more);
-
- if (more) {
- crypto_yield(desc->flags);
- return blkcipher_walk_next(desc, walk);
- }
- err = 0;
-finish:
- walk->nbytes = 0;
- if (walk->iv != desc->info)
- memcpy(desc->info, walk->iv, walk->ivsize);
- if (walk->buffer != walk->page)
- kfree(walk->buffer);
- if (walk->page)
- free_page((unsigned long)walk->page);
- return err;
-}
-EXPORT_SYMBOL_GPL(blkcipher_walk_done);
-
-static inline int blkcipher_next_slow(struct blkcipher_desc *desc,
- struct blkcipher_walk *walk,
- unsigned int bsize,
- unsigned int alignmask)
-{
- unsigned int n;
- unsigned aligned_bsize = ALIGN(bsize, alignmask + 1);
-
- if (walk->buffer)
- goto ok;
-
- walk->buffer = walk->page;
- if (walk->buffer)
- goto ok;
-
- n = aligned_bsize * 3 - (alignmask + 1) +
- (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
- walk->buffer = kmalloc(n, GFP_ATOMIC);
- if (!walk->buffer)
- return blkcipher_walk_done(desc, walk, -ENOMEM);
-
-ok:
- walk->dst.virt.addr = (u8 *)ALIGN((unsigned long)walk->buffer,
- alignmask + 1);
- walk->dst.virt.addr = blkcipher_get_spot(walk->dst.virt.addr, bsize);
- walk->src.virt.addr = blkcipher_get_spot(walk->dst.virt.addr +
- aligned_bsize, bsize);
-
- scatterwalk_copychunks(walk->src.virt.addr, &walk->in, bsize, 0);
-
- walk->nbytes = bsize;
- walk->flags |= BLKCIPHER_WALK_SLOW;
-
- return 0;
-}
-
-static inline int blkcipher_next_copy(struct blkcipher_walk *walk)
-{
- u8 *tmp = walk->page;
-
- blkcipher_map_src(walk);
- memcpy(tmp, walk->src.virt.addr, walk->nbytes);
- blkcipher_unmap_src(walk);
-
- walk->src.virt.addr = tmp;
- walk->dst.virt.addr = tmp;
-
- return 0;
-}
-
-static inline int blkcipher_next_fast(struct blkcipher_desc *desc,
- struct blkcipher_walk *walk)
-{
- unsigned long diff;
-
- walk->src.phys.page = scatterwalk_page(&walk->in);
- walk->src.phys.offset = offset_in_page(walk->in.offset);
- walk->dst.phys.page = scatterwalk_page(&walk->out);
- walk->dst.phys.offset = offset_in_page(walk->out.offset);
-
- if (walk->flags & BLKCIPHER_WALK_PHYS)
- return 0;
-
- diff = walk->src.phys.offset - walk->dst.phys.offset;
- diff |= walk->src.virt.page - walk->dst.virt.page;
-
- blkcipher_map_src(walk);
- walk->dst.virt.addr = walk->src.virt.addr;
-
- if (diff) {
- walk->flags |= BLKCIPHER_WALK_DIFF;
- blkcipher_map_dst(walk);
- }
-
- return 0;
-}
-
-static int blkcipher_walk_next(struct blkcipher_desc *desc,
- struct blkcipher_walk *walk)
-{
- unsigned int bsize;
- unsigned int n;
- int err;
-
- n = walk->total;
- if (unlikely(n < walk->cipher_blocksize)) {
- desc->flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
- return blkcipher_walk_done(desc, walk, -EINVAL);
- }
-
- bsize = min(walk->walk_blocksize, n);
-
- walk->flags &= ~(BLKCIPHER_WALK_SLOW | BLKCIPHER_WALK_COPY |
- BLKCIPHER_WALK_DIFF);
- if (!scatterwalk_aligned(&walk->in, walk->alignmask) ||
- !scatterwalk_aligned(&walk->out, walk->alignmask)) {
- walk->flags |= BLKCIPHER_WALK_COPY;
- if (!walk->page) {
- walk->page = (void *)__get_free_page(GFP_ATOMIC);
- if (!walk->page)
- n = 0;
- }
- }
-
- n = scatterwalk_clamp(&walk->in, n);
- n = scatterwalk_clamp(&walk->out, n);
-
- if (unlikely(n < bsize)) {
- err = blkcipher_next_slow(desc, walk, bsize, walk->alignmask);
- goto set_phys_lowmem;
- }
-
- walk->nbytes = n;
- if (walk->flags & BLKCIPHER_WALK_COPY) {
- err = blkcipher_next_copy(walk);
- goto set_phys_lowmem;
- }
-
- return blkcipher_next_fast(desc, walk);
-
-set_phys_lowmem:
- if (walk->flags & BLKCIPHER_WALK_PHYS) {
- walk->src.phys.page = virt_to_page(walk->src.virt.addr);
- walk->dst.phys.page = virt_to_page(walk->dst.virt.addr);
- walk->src.phys.offset &= PAGE_SIZE - 1;
- walk->dst.phys.offset &= PAGE_SIZE - 1;
- }
- return err;
-}
-
-static inline int blkcipher_copy_iv(struct blkcipher_walk *walk)
-{
- unsigned bs = walk->walk_blocksize;
- unsigned aligned_bs = ALIGN(bs, walk->alignmask + 1);
- unsigned int size = aligned_bs * 2 +
- walk->ivsize + max(aligned_bs, walk->ivsize) -
- (walk->alignmask + 1);
- u8 *iv;
-
- size += walk->alignmask & ~(crypto_tfm_ctx_alignment() - 1);
- walk->buffer = kmalloc(size, GFP_ATOMIC);
- if (!walk->buffer)
- return -ENOMEM;
-
- iv = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1);
- iv = blkcipher_get_spot(iv, bs) + aligned_bs;
- iv = blkcipher_get_spot(iv, bs) + aligned_bs;
- iv = blkcipher_get_spot(iv, walk->ivsize);
-
- walk->iv = memcpy(iv, walk->iv, walk->ivsize);
- return 0;
-}
-
-int blkcipher_walk_virt(struct blkcipher_desc *desc,
- struct blkcipher_walk *walk)
-{
- walk->flags &= ~BLKCIPHER_WALK_PHYS;
- walk->walk_blocksize = crypto_blkcipher_blocksize(desc->tfm);
- walk->cipher_blocksize = walk->walk_blocksize;
- walk->ivsize = crypto_blkcipher_ivsize(desc->tfm);
- walk->alignmask = crypto_blkcipher_alignmask(desc->tfm);
- return blkcipher_walk_first(desc, walk);
-}
-EXPORT_SYMBOL_GPL(blkcipher_walk_virt);
-
-int blkcipher_walk_phys(struct blkcipher_desc *desc,
- struct blkcipher_walk *walk)
-{
- walk->flags |= BLKCIPHER_WALK_PHYS;
- walk->walk_blocksize = crypto_blkcipher_blocksize(desc->tfm);
- walk->cipher_blocksize = walk->walk_blocksize;
- walk->ivsize = crypto_blkcipher_ivsize(desc->tfm);
- walk->alignmask = crypto_blkcipher_alignmask(desc->tfm);
- return blkcipher_walk_first(desc, walk);
-}
-EXPORT_SYMBOL_GPL(blkcipher_walk_phys);
-
-static int blkcipher_walk_first(struct blkcipher_desc *desc,
- struct blkcipher_walk *walk)
-{
- if (WARN_ON_ONCE(in_irq()))
- return -EDEADLK;
-
- walk->iv = desc->info;
- walk->nbytes = walk->total;
- if (unlikely(!walk->total))
- return 0;
-
- walk->buffer = NULL;
- if (unlikely(((unsigned long)walk->iv & walk->alignmask))) {
- int err = blkcipher_copy_iv(walk);
- if (err)
- return err;
- }
-
- scatterwalk_start(&walk->in, walk->in.sg);
- scatterwalk_start(&walk->out, walk->out.sg);
- walk->page = NULL;
-
- return blkcipher_walk_next(desc, walk);
-}
-
-int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
- struct blkcipher_walk *walk,
- unsigned int blocksize)
-{
- walk->flags &= ~BLKCIPHER_WALK_PHYS;
- walk->walk_blocksize = blocksize;
- walk->cipher_blocksize = crypto_blkcipher_blocksize(desc->tfm);
- walk->ivsize = crypto_blkcipher_ivsize(desc->tfm);
- walk->alignmask = crypto_blkcipher_alignmask(desc->tfm);
- return blkcipher_walk_first(desc, walk);
-}
-EXPORT_SYMBOL_GPL(blkcipher_walk_virt_block);
-
-int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
- struct blkcipher_walk *walk,
- struct crypto_aead *tfm,
- unsigned int blocksize)
-{
- walk->flags &= ~BLKCIPHER_WALK_PHYS;
- walk->walk_blocksize = blocksize;
- walk->cipher_blocksize = crypto_aead_blocksize(tfm);
- walk->ivsize = crypto_aead_ivsize(tfm);
- walk->alignmask = crypto_aead_alignmask(tfm);
- return blkcipher_walk_first(desc, walk);
-}
-EXPORT_SYMBOL_GPL(blkcipher_aead_walk_virt_block);
-
-static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key,
- unsigned int keylen)
-{
- struct blkcipher_alg *cipher = &tfm->__crt_alg->cra_blkcipher;
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
- int ret;
- u8 *buffer, *alignbuffer;
- unsigned long absize;
-
- absize = keylen + alignmask;
- buffer = kmalloc(absize, GFP_ATOMIC);
- if (!buffer)
- return -ENOMEM;
-
- alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
- memcpy(alignbuffer, key, keylen);
- ret = cipher->setkey(tfm, alignbuffer, keylen);
- memset(alignbuffer, 0, keylen);
- kfree(buffer);
- return ret;
-}
-
-static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
-{
- struct blkcipher_alg *cipher = &tfm->__crt_alg->cra_blkcipher;
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
-
- if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) {
- tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
- return -EINVAL;
- }
-
- if ((unsigned long)key & alignmask)
- return setkey_unaligned(tfm, key, keylen);
-
- return cipher->setkey(tfm, key, keylen);
-}
-
-static int async_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
- unsigned int keylen)
-{
- return setkey(crypto_ablkcipher_tfm(tfm), key, keylen);
-}
-
-static int async_encrypt(struct ablkcipher_request *req)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
- struct blkcipher_desc desc = {
- .tfm = __crypto_blkcipher_cast(tfm),
- .info = req->info,
- .flags = req->base.flags,
- };
-
-
- return alg->encrypt(&desc, req->dst, req->src, req->nbytes);
-}
-
-static int async_decrypt(struct ablkcipher_request *req)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
- struct blkcipher_desc desc = {
- .tfm = __crypto_blkcipher_cast(tfm),
- .info = req->info,
- .flags = req->base.flags,
- };
-
- return alg->decrypt(&desc, req->dst, req->src, req->nbytes);
-}
-
-static unsigned int crypto_blkcipher_ctxsize(struct crypto_alg *alg, u32 type,
- u32 mask)
-{
- struct blkcipher_alg *cipher = &alg->cra_blkcipher;
- unsigned int len = alg->cra_ctxsize;
-
- if ((mask & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_MASK &&
- cipher->ivsize) {
- len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1);
- len += cipher->ivsize;
- }
-
- return len;
-}
-
-static int crypto_init_blkcipher_ops_async(struct crypto_tfm *tfm)
-{
- struct ablkcipher_tfm *crt = &tfm->crt_ablkcipher;
- struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
-
- crt->setkey = async_setkey;
- crt->encrypt = async_encrypt;
- crt->decrypt = async_decrypt;
- crt->base = __crypto_ablkcipher_cast(tfm);
- crt->ivsize = alg->ivsize;
-
- return 0;
-}
-
-static int crypto_init_blkcipher_ops_sync(struct crypto_tfm *tfm)
-{
- struct blkcipher_tfm *crt = &tfm->crt_blkcipher;
- struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
- unsigned long align = crypto_tfm_alg_alignmask(tfm) + 1;
- unsigned long addr;
-
- crt->setkey = setkey;
- crt->encrypt = alg->encrypt;
- crt->decrypt = alg->decrypt;
-
- addr = (unsigned long)crypto_tfm_ctx(tfm);
- addr = ALIGN(addr, align);
- addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align);
- crt->iv = (void *)addr;
-
- return 0;
-}
-
-static int crypto_init_blkcipher_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
-{
- struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
-
- if (alg->ivsize > PAGE_SIZE / 8)
- return -EINVAL;
-
- if ((mask & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_MASK)
- return crypto_init_blkcipher_ops_sync(tfm);
- else
- return crypto_init_blkcipher_ops_async(tfm);
-}
-
-#ifdef CONFIG_NET
-static int crypto_blkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
-{
- struct crypto_report_blkcipher rblkcipher;
-
- memset(&rblkcipher, 0, sizeof(rblkcipher));
-
- strscpy(rblkcipher.type, "blkcipher", sizeof(rblkcipher.type));
- strscpy(rblkcipher.geniv, "<default>", sizeof(rblkcipher.geniv));
-
- rblkcipher.blocksize = alg->cra_blocksize;
- rblkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
- rblkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
- rblkcipher.ivsize = alg->cra_blkcipher.ivsize;
-
- return nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
- sizeof(rblkcipher), &rblkcipher);
-}
-#else
-static int crypto_blkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
-{
- return -ENOSYS;
-}
-#endif
-
-static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
- __maybe_unused;
-static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
-{
- seq_printf(m, "type : blkcipher\n");
- seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
- seq_printf(m, "min keysize : %u\n", alg->cra_blkcipher.min_keysize);
- seq_printf(m, "max keysize : %u\n", alg->cra_blkcipher.max_keysize);
- seq_printf(m, "ivsize : %u\n", alg->cra_blkcipher.ivsize);
- seq_printf(m, "geniv : <default>\n");
-}
-
-const struct crypto_type crypto_blkcipher_type = {
- .ctxsize = crypto_blkcipher_ctxsize,
- .init = crypto_init_blkcipher_ops,
-#ifdef CONFIG_PROC_FS
- .show = crypto_blkcipher_show,
-#endif
- .report = crypto_blkcipher_report,
-};
-EXPORT_SYMBOL_GPL(crypto_blkcipher_type);
-
-MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION("Generic block chaining cipher type");
diff --git a/crypto/camellia_generic.c b/crypto/camellia_generic.c
index b6a1121e2478..9a5783e5196a 100644
--- a/crypto/camellia_generic.c
+++ b/crypto/camellia_generic.c
@@ -970,12 +970,9 @@ camellia_set_key(struct crypto_tfm *tfm, const u8 *in_key,
{
struct camellia_ctx *cctx = crypto_tfm_ctx(tfm);
const unsigned char *key = (const unsigned char *)in_key;
- u32 *flags = &tfm->crt_flags;
- if (key_len != 16 && key_len != 24 && key_len != 32) {
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
+ if (key_len != 16 && key_len != 24 && key_len != 32)
return -EINVAL;
- }
cctx->key_length = key_len;
diff --git a/crypto/cast6_generic.c b/crypto/cast6_generic.c
index a8248f8e2777..c77ff6c8a2b2 100644
--- a/crypto/cast6_generic.c
+++ b/crypto/cast6_generic.c
@@ -103,17 +103,14 @@ static inline void W(u32 *key, unsigned int i)
key[7] ^= F2(key[0], Tr[i % 4][7], Tm[i][7]);
}
-int __cast6_setkey(struct cast6_ctx *c, const u8 *in_key,
- unsigned key_len, u32 *flags)
+int __cast6_setkey(struct cast6_ctx *c, const u8 *in_key, unsigned int key_len)
{
int i;
u32 key[8];
__be32 p_key[8]; /* padded key */
- if (key_len % 4 != 0) {
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
+ if (key_len % 4 != 0)
return -EINVAL;
- }
memset(p_key, 0, 32);
memcpy(p_key, in_key, key_len);
@@ -148,13 +145,12 @@ EXPORT_SYMBOL_GPL(__cast6_setkey);
int cast6_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
{
- return __cast6_setkey(crypto_tfm_ctx(tfm), key, keylen,
- &tfm->crt_flags);
+ return __cast6_setkey(crypto_tfm_ctx(tfm), key, keylen);
}
EXPORT_SYMBOL_GPL(cast6_setkey);
/*forward quad round*/
-static inline void Q(u32 *block, u8 *Kr, u32 *Km)
+static inline void Q(u32 *block, const u8 *Kr, const u32 *Km)
{
u32 I;
block[2] ^= F1(block[3], Kr[0], Km[0]);
@@ -164,7 +160,7 @@ static inline void Q(u32 *block, u8 *Kr, u32 *Km)
}
/*reverse quad round*/
-static inline void QBAR(u32 *block, u8 *Kr, u32 *Km)
+static inline void QBAR(u32 *block, const u8 *Kr, const u32 *Km)
{
u32 I;
block[3] ^= F1(block[0], Kr[3], Km[3]);
@@ -173,13 +169,14 @@ static inline void QBAR(u32 *block, u8 *Kr, u32 *Km)
block[2] ^= F1(block[3], Kr[0], Km[0]);
}
-void __cast6_encrypt(struct cast6_ctx *c, u8 *outbuf, const u8 *inbuf)
+void __cast6_encrypt(const void *ctx, u8 *outbuf, const u8 *inbuf)
{
+ const struct cast6_ctx *c = ctx;
const __be32 *src = (const __be32 *)inbuf;
__be32 *dst = (__be32 *)outbuf;
u32 block[4];
- u32 *Km;
- u8 *Kr;
+ const u32 *Km;
+ const u8 *Kr;
block[0] = be32_to_cpu(src[0]);
block[1] = be32_to_cpu(src[1]);
@@ -211,13 +208,14 @@ static void cast6_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
__cast6_encrypt(crypto_tfm_ctx(tfm), outbuf, inbuf);
}
-void __cast6_decrypt(struct cast6_ctx *c, u8 *outbuf, const u8 *inbuf)
+void __cast6_decrypt(const void *ctx, u8 *outbuf, const u8 *inbuf)
{
+ const struct cast6_ctx *c = ctx;
const __be32 *src = (const __be32 *)inbuf;
__be32 *dst = (__be32 *)outbuf;
u32 block[4];
- u32 *Km;
- u8 *Kr;
+ const u32 *Km;
+ const u8 *Kr;
block[0] = be32_to_cpu(src[0]);
block[1] = be32_to_cpu(src[1]);
diff --git a/crypto/cbc.c b/crypto/cbc.c
index dd96bcf4d4b6..e6f6273a7d39 100644
--- a/crypto/cbc.c
+++ b/crypto/cbc.c
@@ -54,10 +54,12 @@ static int crypto_cbc_create(struct crypto_template *tmpl, struct rtattr **tb)
struct crypto_alg *alg;
int err;
- inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
+ inst = skcipher_alloc_instance_simple(tmpl, tb);
if (IS_ERR(inst))
return PTR_ERR(inst);
+ alg = skcipher_ialg_simple(inst);
+
err = -EINVAL;
if (!is_power_of_2(alg->cra_blocksize))
goto out_free_inst;
@@ -66,14 +68,11 @@ static int crypto_cbc_create(struct crypto_template *tmpl, struct rtattr **tb)
inst->alg.decrypt = crypto_cbc_decrypt;
err = skcipher_register_instance(tmpl, inst);
- if (err)
- goto out_free_inst;
- goto out_put_alg;
-
+ if (err) {
out_free_inst:
- inst->free(inst);
-out_put_alg:
- crypto_mod_put(alg);
+ inst->free(inst);
+ }
+
return err;
}
diff --git a/crypto/ccm.c b/crypto/ccm.c
index 380eb619f657..241ecdc5c4e0 100644
--- a/crypto/ccm.c
+++ b/crypto/ccm.c
@@ -15,8 +15,6 @@
#include <linux/module.h>
#include <linux/slab.h>
-#include "internal.h"
-
struct ccm_instance_ctx {
struct crypto_skcipher_spawn ctr;
struct crypto_ahash_spawn mac;
@@ -91,26 +89,19 @@ static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
struct crypto_skcipher *ctr = ctx->ctr;
struct crypto_ahash *mac = ctx->mac;
- int err = 0;
+ int err;
crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
CRYPTO_TFM_REQ_MASK);
err = crypto_skcipher_setkey(ctr, key, keylen);
- crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) &
- CRYPTO_TFM_RES_MASK);
if (err)
- goto out;
+ return err;
crypto_ahash_clear_flags(mac, CRYPTO_TFM_REQ_MASK);
crypto_ahash_set_flags(mac, crypto_aead_get_flags(aead) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_ahash_setkey(mac, key, keylen);
- crypto_aead_set_flags(aead, crypto_ahash_get_flags(mac) &
- CRYPTO_TFM_RES_MASK);
-
-out:
- return err;
+ return crypto_ahash_setkey(mac, key, keylen);
}
static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
@@ -457,11 +448,11 @@ static int crypto_ccm_create_common(struct crypto_template *tmpl,
const char *mac_name)
{
struct crypto_attr_type *algt;
+ u32 mask;
struct aead_instance *inst;
+ struct ccm_instance_ctx *ictx;
struct skcipher_alg *ctr;
- struct crypto_alg *mac_alg;
struct hash_alg_common *mac;
- struct ccm_instance_ctx *ictx;
int err;
algt = crypto_get_attr_type(tb);
@@ -471,37 +462,28 @@ static int crypto_ccm_create_common(struct crypto_template *tmpl,
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;
- mac_alg = crypto_find_alg(mac_name, &crypto_ahash_type,
- CRYPTO_ALG_TYPE_HASH,
- CRYPTO_ALG_TYPE_AHASH_MASK |
- CRYPTO_ALG_ASYNC);
- if (IS_ERR(mac_alg))
- return PTR_ERR(mac_alg);
-
- mac = __crypto_hash_alg_common(mac_alg);
- err = -EINVAL;
- if (strncmp(mac->base.cra_name, "cbcmac(", 7) != 0 ||
- mac->digestsize != 16)
- goto out_put_mac;
+ mask = crypto_requires_sync(algt->type, algt->mask);
inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
- err = -ENOMEM;
if (!inst)
- goto out_put_mac;
-
+ return -ENOMEM;
ictx = aead_instance_ctx(inst);
- err = crypto_init_ahash_spawn(&ictx->mac, mac,
- aead_crypto_instance(inst));
+
+ err = crypto_grab_ahash(&ictx->mac, aead_crypto_instance(inst),
+ mac_name, 0, CRYPTO_ALG_ASYNC);
if (err)
goto err_free_inst;
+ mac = crypto_spawn_ahash_alg(&ictx->mac);
- crypto_set_skcipher_spawn(&ictx->ctr, aead_crypto_instance(inst));
- err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0,
- crypto_requires_sync(algt->type,
- algt->mask));
- if (err)
- goto err_drop_mac;
+ err = -EINVAL;
+ if (strncmp(mac->base.cra_name, "cbcmac(", 7) != 0 ||
+ mac->digestsize != 16)
+ goto err_free_inst;
+ err = crypto_grab_skcipher(&ictx->ctr, aead_crypto_instance(inst),
+ ctr_name, 0, mask);
+ if (err)
+ goto err_free_inst;
ctr = crypto_spawn_skcipher_alg(&ictx->ctr);
/* The skcipher algorithm must be CTR mode, using 16-byte blocks. */
@@ -509,21 +491,21 @@ static int crypto_ccm_create_common(struct crypto_template *tmpl,
if (strncmp(ctr->base.cra_name, "ctr(", 4) != 0 ||
crypto_skcipher_alg_ivsize(ctr) != 16 ||
ctr->base.cra_blocksize != 1)
- goto err_drop_ctr;
+ goto err_free_inst;
/* ctr and cbcmac must use the same underlying block cipher. */
if (strcmp(ctr->base.cra_name + 4, mac->base.cra_name + 7) != 0)
- goto err_drop_ctr;
+ goto err_free_inst;
err = -ENAMETOOLONG;
if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
"ccm(%s", ctr->base.cra_name + 4) >= CRYPTO_MAX_ALG_NAME)
- goto err_drop_ctr;
+ goto err_free_inst;
if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
"ccm_base(%s,%s)", ctr->base.cra_driver_name,
mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
- goto err_drop_ctr;
+ goto err_free_inst;
inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC;
inst->alg.base.cra_priority = (mac->base.cra_priority +
@@ -545,20 +527,11 @@ static int crypto_ccm_create_common(struct crypto_template *tmpl,
inst->free = crypto_ccm_free;
err = aead_register_instance(tmpl, inst);
- if (err)
- goto err_drop_ctr;
-
-out_put_mac:
- crypto_mod_put(mac_alg);
- return err;
-
-err_drop_ctr:
- crypto_drop_skcipher(&ictx->ctr);
-err_drop_mac:
- crypto_drop_ahash(&ictx->mac);
+ if (err) {
err_free_inst:
- kfree(inst);
- goto out_put_mac;
+ crypto_ccm_free(inst);
+ }
+ return err;
}
static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb)
@@ -604,7 +577,6 @@ static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
{
struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
struct crypto_aead *child = ctx->child;
- int err;
if (keylen < 3)
return -EINVAL;
@@ -615,11 +587,7 @@ static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_aead_setkey(child, key, keylen);
- crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
-
- return err;
+ return crypto_aead_setkey(child, key, keylen);
}
static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
@@ -745,6 +713,7 @@ static int crypto_rfc4309_create(struct crypto_template *tmpl,
struct rtattr **tb)
{
struct crypto_attr_type *algt;
+ u32 mask;
struct aead_instance *inst;
struct crypto_aead_spawn *spawn;
struct aead_alg *alg;
@@ -758,6 +727,8 @@ static int crypto_rfc4309_create(struct crypto_template *tmpl,
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;
+ mask = crypto_requires_sync(algt->type, algt->mask);
+
ccm_name = crypto_attr_alg_name(tb[1]);
if (IS_ERR(ccm_name))
return PTR_ERR(ccm_name);
@@ -767,9 +738,8 @@ static int crypto_rfc4309_create(struct crypto_template *tmpl,
return -ENOMEM;
spawn = aead_instance_ctx(inst);
- crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
- err = crypto_grab_aead(spawn, ccm_name, 0,
- crypto_requires_sync(algt->type, algt->mask));
+ err = crypto_grab_aead(spawn, aead_crypto_instance(inst),
+ ccm_name, 0, mask);
if (err)
goto out_free_inst;
@@ -896,7 +866,7 @@ static int cbcmac_init_tfm(struct crypto_tfm *tfm)
{
struct crypto_cipher *cipher;
struct crypto_instance *inst = (void *)tfm->__crt_alg;
- struct crypto_spawn *spawn = crypto_instance_ctx(inst);
+ struct crypto_cipher_spawn *spawn = crypto_instance_ctx(inst);
struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
cipher = crypto_spawn_cipher(spawn);
@@ -917,6 +887,7 @@ static void cbcmac_exit_tfm(struct crypto_tfm *tfm)
static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb)
{
struct shash_instance *inst;
+ struct crypto_cipher_spawn *spawn;
struct crypto_alg *alg;
int err;
@@ -924,21 +895,20 @@ static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb)
if (err)
return err;
- alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
- CRYPTO_ALG_TYPE_MASK);
- if (IS_ERR(alg))
- return PTR_ERR(alg);
+ inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+ if (!inst)
+ return -ENOMEM;
+ spawn = shash_instance_ctx(inst);
- inst = shash_alloc_instance("cbcmac", alg);
- err = PTR_ERR(inst);
- if (IS_ERR(inst))
- goto out_put_alg;
+ err = crypto_grab_cipher(spawn, shash_crypto_instance(inst),
+ crypto_attr_alg_name(tb[1]), 0, 0);
+ if (err)
+ goto err_free_inst;
+ alg = crypto_spawn_cipher_alg(spawn);
- err = crypto_init_spawn(shash_instance_ctx(inst), alg,
- shash_crypto_instance(inst),
- CRYPTO_ALG_TYPE_MASK);
+ err = crypto_inst_setname(shash_crypto_instance(inst), tmpl->name, alg);
if (err)
- goto out_free_inst;
+ goto err_free_inst;
inst->alg.base.cra_priority = alg->cra_priority;
inst->alg.base.cra_blocksize = 1;
@@ -957,14 +927,13 @@ static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb)
inst->alg.final = crypto_cbcmac_digest_final;
inst->alg.setkey = crypto_cbcmac_digest_setkey;
- err = shash_register_instance(tmpl, inst);
+ inst->free = shash_free_singlespawn_instance;
-out_free_inst:
- if (err)
- shash_free_instance(shash_crypto_instance(inst));
-
-out_put_alg:
- crypto_mod_put(alg);
+ err = shash_register_instance(tmpl, inst);
+ if (err) {
+err_free_inst:
+ shash_free_singlespawn_instance(inst);
+ }
return err;
}
@@ -972,7 +941,6 @@ static struct crypto_template crypto_ccm_tmpls[] = {
{
.name = "cbcmac",
.create = cbcmac_create,
- .free = shash_free_instance,
.module = THIS_MODULE,
}, {
.name = "ccm_base",
diff --git a/crypto/cfb.c b/crypto/cfb.c
index 7b68fbb61732..4e5219bbcd19 100644
--- a/crypto/cfb.c
+++ b/crypto/cfb.c
@@ -203,10 +203,12 @@ static int crypto_cfb_create(struct crypto_template *tmpl, struct rtattr **tb)
struct crypto_alg *alg;
int err;
- inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
+ inst = skcipher_alloc_instance_simple(tmpl, tb);
if (IS_ERR(inst))
return PTR_ERR(inst);
+ alg = skcipher_ialg_simple(inst);
+
/* CFB mode is a stream cipher. */
inst->alg.base.cra_blocksize = 1;
@@ -223,7 +225,6 @@ static int crypto_cfb_create(struct crypto_template *tmpl, struct rtattr **tb)
if (err)
inst->free(inst);
- crypto_mod_put(alg);
return err;
}
diff --git a/crypto/chacha20poly1305.c b/crypto/chacha20poly1305.c
index 74e824e537e6..ccaea5cb66d1 100644
--- a/crypto/chacha20poly1305.c
+++ b/crypto/chacha20poly1305.c
@@ -16,8 +16,6 @@
#include <linux/kernel.h>
#include <linux/module.h>
-#include "internal.h"
-
struct chachapoly_instance_ctx {
struct crypto_skcipher_spawn chacha;
struct crypto_ahash_spawn poly;
@@ -477,7 +475,6 @@ static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
unsigned int keylen)
{
struct chachapoly_ctx *ctx = crypto_aead_ctx(aead);
- int err;
if (keylen != ctx->saltlen + CHACHA_KEY_SIZE)
return -EINVAL;
@@ -488,11 +485,7 @@ static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
crypto_skcipher_clear_flags(ctx->chacha, CRYPTO_TFM_REQ_MASK);
crypto_skcipher_set_flags(ctx->chacha, crypto_aead_get_flags(aead) &
CRYPTO_TFM_REQ_MASK);
-
- err = crypto_skcipher_setkey(ctx->chacha, key, keylen);
- crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctx->chacha) &
- CRYPTO_TFM_RES_MASK);
- return err;
+ return crypto_skcipher_setkey(ctx->chacha, key, keylen);
}
static int chachapoly_setauthsize(struct crypto_aead *tfm,
@@ -563,12 +556,11 @@ static int chachapoly_create(struct crypto_template *tmpl, struct rtattr **tb,
const char *name, unsigned int ivsize)
{
struct crypto_attr_type *algt;
+ u32 mask;
struct aead_instance *inst;
- struct skcipher_alg *chacha;
- struct crypto_alg *poly;
- struct hash_alg_common *poly_hash;
struct chachapoly_instance_ctx *ctx;
- const char *chacha_name, *poly_name;
+ struct skcipher_alg *chacha;
+ struct hash_alg_common *poly;
int err;
if (ivsize > CHACHAPOLY_IV_SIZE)
@@ -581,72 +573,53 @@ static int chachapoly_create(struct crypto_template *tmpl, struct rtattr **tb,
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;
- chacha_name = crypto_attr_alg_name(tb[1]);
- if (IS_ERR(chacha_name))
- return PTR_ERR(chacha_name);
- poly_name = crypto_attr_alg_name(tb[2]);
- if (IS_ERR(poly_name))
- return PTR_ERR(poly_name);
-
- poly = crypto_find_alg(poly_name, &crypto_ahash_type,
- CRYPTO_ALG_TYPE_HASH,
- CRYPTO_ALG_TYPE_AHASH_MASK |
- crypto_requires_sync(algt->type,
- algt->mask));
- if (IS_ERR(poly))
- return PTR_ERR(poly);
- poly_hash = __crypto_hash_alg_common(poly);
+ mask = crypto_requires_sync(algt->type, algt->mask);
- err = -EINVAL;
- if (poly_hash->digestsize != POLY1305_DIGEST_SIZE)
- goto out_put_poly;
-
- err = -ENOMEM;
inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
if (!inst)
- goto out_put_poly;
-
+ return -ENOMEM;
ctx = aead_instance_ctx(inst);
ctx->saltlen = CHACHAPOLY_IV_SIZE - ivsize;
- err = crypto_init_ahash_spawn(&ctx->poly, poly_hash,
- aead_crypto_instance(inst));
+
+ err = crypto_grab_skcipher(&ctx->chacha, aead_crypto_instance(inst),
+ crypto_attr_alg_name(tb[1]), 0, mask);
if (err)
goto err_free_inst;
+ chacha = crypto_spawn_skcipher_alg(&ctx->chacha);
- crypto_set_skcipher_spawn(&ctx->chacha, aead_crypto_instance(inst));
- err = crypto_grab_skcipher(&ctx->chacha, chacha_name, 0,
- crypto_requires_sync(algt->type,
- algt->mask));
+ err = crypto_grab_ahash(&ctx->poly, aead_crypto_instance(inst),
+ crypto_attr_alg_name(tb[2]), 0, mask);
if (err)
- goto err_drop_poly;
-
- chacha = crypto_spawn_skcipher_alg(&ctx->chacha);
+ goto err_free_inst;
+ poly = crypto_spawn_ahash_alg(&ctx->poly);
err = -EINVAL;
+ if (poly->digestsize != POLY1305_DIGEST_SIZE)
+ goto err_free_inst;
/* Need 16-byte IV size, including Initial Block Counter value */
if (crypto_skcipher_alg_ivsize(chacha) != CHACHA_IV_SIZE)
- goto out_drop_chacha;
+ goto err_free_inst;
/* Not a stream cipher? */
if (chacha->base.cra_blocksize != 1)
- goto out_drop_chacha;
+ goto err_free_inst;
err = -ENAMETOOLONG;
if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
"%s(%s,%s)", name, chacha->base.cra_name,
- poly->cra_name) >= CRYPTO_MAX_ALG_NAME)
- goto out_drop_chacha;
+ poly->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
+ goto err_free_inst;
if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
"%s(%s,%s)", name, chacha->base.cra_driver_name,
- poly->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
- goto out_drop_chacha;
+ poly->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
+ goto err_free_inst;
- inst->alg.base.cra_flags = (chacha->base.cra_flags | poly->cra_flags) &
- CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_flags = (chacha->base.cra_flags |
+ poly->base.cra_flags) & CRYPTO_ALG_ASYNC;
inst->alg.base.cra_priority = (chacha->base.cra_priority +
- poly->cra_priority) / 2;
+ poly->base.cra_priority) / 2;
inst->alg.base.cra_blocksize = 1;
inst->alg.base.cra_alignmask = chacha->base.cra_alignmask |
- poly->cra_alignmask;
+ poly->base.cra_alignmask;
inst->alg.base.cra_ctxsize = sizeof(struct chachapoly_ctx) +
ctx->saltlen;
inst->alg.ivsize = ivsize;
@@ -662,20 +635,11 @@ static int chachapoly_create(struct crypto_template *tmpl, struct rtattr **tb,
inst->free = chachapoly_free;
err = aead_register_instance(tmpl, inst);
- if (err)
- goto out_drop_chacha;
-
-out_put_poly:
- crypto_mod_put(poly);
- return err;
-
-out_drop_chacha:
- crypto_drop_skcipher(&ctx->chacha);
-err_drop_poly:
- crypto_drop_ahash(&ctx->poly);
+ if (err) {
err_free_inst:
- kfree(inst);
- goto out_put_poly;
+ chachapoly_free(inst);
+ }
+ return err;
}
static int rfc7539_create(struct crypto_template *tmpl, struct rtattr **tb)
diff --git a/crypto/chacha_generic.c b/crypto/chacha_generic.c
index 085d8d219987..8beea79ab117 100644
--- a/crypto/chacha_generic.c
+++ b/crypto/chacha_generic.c
@@ -8,29 +8,10 @@
#include <asm/unaligned.h>
#include <crypto/algapi.h>
-#include <crypto/chacha.h>
+#include <crypto/internal/chacha.h>
#include <crypto/internal/skcipher.h>
#include <linux/module.h>
-static void chacha_docrypt(u32 *state, u8 *dst, const u8 *src,
- unsigned int bytes, int nrounds)
-{
- /* aligned to potentially speed up crypto_xor() */
- u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long));
-
- while (bytes >= CHACHA_BLOCK_SIZE) {
- chacha_block(state, stream, nrounds);
- crypto_xor_cpy(dst, src, stream, CHACHA_BLOCK_SIZE);
- bytes -= CHACHA_BLOCK_SIZE;
- dst += CHACHA_BLOCK_SIZE;
- src += CHACHA_BLOCK_SIZE;
- }
- if (bytes) {
- chacha_block(state, stream, nrounds);
- crypto_xor_cpy(dst, src, stream, bytes);
- }
-}
-
static int chacha_stream_xor(struct skcipher_request *req,
const struct chacha_ctx *ctx, const u8 *iv)
{
@@ -40,7 +21,7 @@ static int chacha_stream_xor(struct skcipher_request *req,
err = skcipher_walk_virt(&walk, req, false);
- crypto_chacha_init(state, ctx, iv);
+ chacha_init_generic(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
@@ -48,75 +29,23 @@ static int chacha_stream_xor(struct skcipher_request *req,
if (nbytes < walk.total)
nbytes = round_down(nbytes, CHACHA_BLOCK_SIZE);
- chacha_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr,
- nbytes, ctx->nrounds);
+ chacha_crypt_generic(state, walk.dst.virt.addr,
+ walk.src.virt.addr, nbytes, ctx->nrounds);
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
}
return err;
}
-void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv)
-{
- state[0] = 0x61707865; /* "expa" */
- state[1] = 0x3320646e; /* "nd 3" */
- state[2] = 0x79622d32; /* "2-by" */
- state[3] = 0x6b206574; /* "te k" */
- state[4] = ctx->key[0];
- state[5] = ctx->key[1];
- state[6] = ctx->key[2];
- state[7] = ctx->key[3];
- state[8] = ctx->key[4];
- state[9] = ctx->key[5];
- state[10] = ctx->key[6];
- state[11] = ctx->key[7];
- state[12] = get_unaligned_le32(iv + 0);
- state[13] = get_unaligned_le32(iv + 4);
- state[14] = get_unaligned_le32(iv + 8);
- state[15] = get_unaligned_le32(iv + 12);
-}
-EXPORT_SYMBOL_GPL(crypto_chacha_init);
-
-static int chacha_setkey(struct crypto_skcipher *tfm, const u8 *key,
- unsigned int keysize, int nrounds)
-{
- struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
- int i;
-
- if (keysize != CHACHA_KEY_SIZE)
- return -EINVAL;
-
- for (i = 0; i < ARRAY_SIZE(ctx->key); i++)
- ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
-
- ctx->nrounds = nrounds;
- return 0;
-}
-
-int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
- unsigned int keysize)
-{
- return chacha_setkey(tfm, key, keysize, 20);
-}
-EXPORT_SYMBOL_GPL(crypto_chacha20_setkey);
-
-int crypto_chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key,
- unsigned int keysize)
-{
- return chacha_setkey(tfm, key, keysize, 12);
-}
-EXPORT_SYMBOL_GPL(crypto_chacha12_setkey);
-
-int crypto_chacha_crypt(struct skcipher_request *req)
+static int crypto_chacha_crypt(struct skcipher_request *req)
{
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
return chacha_stream_xor(req, ctx, req->iv);
}
-EXPORT_SYMBOL_GPL(crypto_chacha_crypt);
-int crypto_xchacha_crypt(struct skcipher_request *req)
+static int crypto_xchacha_crypt(struct skcipher_request *req)
{
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
@@ -125,8 +54,8 @@ int crypto_xchacha_crypt(struct skcipher_request *req)
u8 real_iv[16];
/* Compute the subkey given the original key and first 128 nonce bits */
- crypto_chacha_init(state, ctx, req->iv);
- hchacha_block(state, subctx.key, ctx->nrounds);
+ chacha_init_generic(state, ctx->key, req->iv);
+ hchacha_block_generic(state, subctx.key, ctx->nrounds);
subctx.nrounds = ctx->nrounds;
/* Build the real IV */
@@ -136,7 +65,6 @@ int crypto_xchacha_crypt(struct skcipher_request *req)
/* Generate the stream and XOR it with the data */
return chacha_stream_xor(req, &subctx, real_iv);
}
-EXPORT_SYMBOL_GPL(crypto_xchacha_crypt);
static struct skcipher_alg algs[] = {
{
@@ -151,7 +79,7 @@ static struct skcipher_alg algs[] = {
.max_keysize = CHACHA_KEY_SIZE,
.ivsize = CHACHA_IV_SIZE,
.chunksize = CHACHA_BLOCK_SIZE,
- .setkey = crypto_chacha20_setkey,
+ .setkey = chacha20_setkey,
.encrypt = crypto_chacha_crypt,
.decrypt = crypto_chacha_crypt,
}, {
@@ -166,7 +94,7 @@ static struct skcipher_alg algs[] = {
.max_keysize = CHACHA_KEY_SIZE,
.ivsize = XCHACHA_IV_SIZE,
.chunksize = CHACHA_BLOCK_SIZE,
- .setkey = crypto_chacha20_setkey,
+ .setkey = chacha20_setkey,
.encrypt = crypto_xchacha_crypt,
.decrypt = crypto_xchacha_crypt,
}, {
@@ -181,7 +109,7 @@ static struct skcipher_alg algs[] = {
.max_keysize = CHACHA_KEY_SIZE,
.ivsize = XCHACHA_IV_SIZE,
.chunksize = CHACHA_BLOCK_SIZE,
- .setkey = crypto_chacha12_setkey,
+ .setkey = chacha12_setkey,
.encrypt = crypto_xchacha_crypt,
.decrypt = crypto_xchacha_crypt,
}
diff --git a/crypto/cipher.c b/crypto/cipher.c
index 108427026e7c..fd78150deb1c 100644
--- a/crypto/cipher.c
+++ b/crypto/cipher.c
@@ -2,7 +2,7 @@
/*
* Cryptographic API.
*
- * Cipher operations.
+ * Single-block cipher operations.
*
* Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
* Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
@@ -16,11 +16,11 @@
#include <linux/string.h>
#include "internal.h"
-static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key,
+static int setkey_unaligned(struct crypto_cipher *tfm, const u8 *key,
unsigned int keylen)
{
- struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
+ struct cipher_alg *cia = crypto_cipher_alg(tfm);
+ unsigned long alignmask = crypto_cipher_alignmask(tfm);
int ret;
u8 *buffer, *alignbuffer;
unsigned long absize;
@@ -32,83 +32,60 @@ static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key,
alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
memcpy(alignbuffer, key, keylen);
- ret = cia->cia_setkey(tfm, alignbuffer, keylen);
+ ret = cia->cia_setkey(crypto_cipher_tfm(tfm), alignbuffer, keylen);
memset(alignbuffer, 0, keylen);
kfree(buffer);
return ret;
}
-static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
+int crypto_cipher_setkey(struct crypto_cipher *tfm,
+ const u8 *key, unsigned int keylen)
{
- struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
+ struct cipher_alg *cia = crypto_cipher_alg(tfm);
+ unsigned long alignmask = crypto_cipher_alignmask(tfm);
- tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
- if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) {
- tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
+ if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize)
return -EINVAL;
- }
if ((unsigned long)key & alignmask)
return setkey_unaligned(tfm, key, keylen);
- return cia->cia_setkey(tfm, key, keylen);
-}
-
-static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *,
- const u8 *),
- struct crypto_tfm *tfm,
- u8 *dst, const u8 *src)
-{
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
- unsigned int size = crypto_tfm_alg_blocksize(tfm);
- u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
- u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
-
- memcpy(tmp, src, size);
- fn(tfm, tmp, tmp);
- memcpy(dst, tmp, size);
+ return cia->cia_setkey(crypto_cipher_tfm(tfm), key, keylen);
}
+EXPORT_SYMBOL_GPL(crypto_cipher_setkey);
-static void cipher_encrypt_unaligned(struct crypto_tfm *tfm,
- u8 *dst, const u8 *src)
+static inline void cipher_crypt_one(struct crypto_cipher *tfm,
+ u8 *dst, const u8 *src, bool enc)
{
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
- struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
+ unsigned long alignmask = crypto_cipher_alignmask(tfm);
+ struct cipher_alg *cia = crypto_cipher_alg(tfm);
+ void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
+ enc ? cia->cia_encrypt : cia->cia_decrypt;
if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
- cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src);
- return;
+ unsigned int bs = crypto_cipher_blocksize(tfm);
+ u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
+ u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
+
+ memcpy(tmp, src, bs);
+ fn(crypto_cipher_tfm(tfm), tmp, tmp);
+ memcpy(dst, tmp, bs);
+ } else {
+ fn(crypto_cipher_tfm(tfm), dst, src);
}
-
- cipher->cia_encrypt(tfm, dst, src);
}
-static void cipher_decrypt_unaligned(struct crypto_tfm *tfm,
- u8 *dst, const u8 *src)
+void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
+ u8 *dst, const u8 *src)
{
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
- struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
-
- if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
- cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src);
- return;
- }
-
- cipher->cia_decrypt(tfm, dst, src);
+ cipher_crypt_one(tfm, dst, src, true);
}
+EXPORT_SYMBOL_GPL(crypto_cipher_encrypt_one);
-int crypto_init_cipher_ops(struct crypto_tfm *tfm)
+void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
+ u8 *dst, const u8 *src)
{
- struct cipher_tfm *ops = &tfm->crt_cipher;
- struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
-
- ops->cit_setkey = setkey;
- ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ?
- cipher_encrypt_unaligned : cipher->cia_encrypt;
- ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ?
- cipher_decrypt_unaligned : cipher->cia_decrypt;
-
- return 0;
+ cipher_crypt_one(tfm, dst, src, false);
}
+EXPORT_SYMBOL_GPL(crypto_cipher_decrypt_one);
diff --git a/crypto/cmac.c b/crypto/cmac.c
index 0928aebc6205..143a6544c873 100644
--- a/crypto/cmac.c
+++ b/crypto/cmac.c
@@ -201,7 +201,7 @@ static int cmac_init_tfm(struct crypto_tfm *tfm)
{
struct crypto_cipher *cipher;
struct crypto_instance *inst = (void *)tfm->__crt_alg;
- struct crypto_spawn *spawn = crypto_instance_ctx(inst);
+ struct crypto_cipher_spawn *spawn = crypto_instance_ctx(inst);
struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
cipher = crypto_spawn_cipher(spawn);
@@ -222,6 +222,7 @@ static void cmac_exit_tfm(struct crypto_tfm *tfm)
static int cmac_create(struct crypto_template *tmpl, struct rtattr **tb)
{
struct shash_instance *inst;
+ struct crypto_cipher_spawn *spawn;
struct crypto_alg *alg;
unsigned long alignmask;
int err;
@@ -230,10 +231,16 @@ static int cmac_create(struct crypto_template *tmpl, struct rtattr **tb)
if (err)
return err;
- alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
- CRYPTO_ALG_TYPE_MASK);
- if (IS_ERR(alg))
- return PTR_ERR(alg);
+ inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+ if (!inst)
+ return -ENOMEM;
+ spawn = shash_instance_ctx(inst);
+
+ err = crypto_grab_cipher(spawn, shash_crypto_instance(inst),
+ crypto_attr_alg_name(tb[1]), 0, 0);
+ if (err)
+ goto err_free_inst;
+ alg = crypto_spawn_cipher_alg(spawn);
switch (alg->cra_blocksize) {
case 16:
@@ -241,19 +248,12 @@ static int cmac_create(struct crypto_template *tmpl, struct rtattr **tb)
break;
default:
err = -EINVAL;
- goto out_put_alg;
+ goto err_free_inst;
}
- inst = shash_alloc_instance("cmac", alg);
- err = PTR_ERR(inst);
- if (IS_ERR(inst))
- goto out_put_alg;
-
- err = crypto_init_spawn(shash_instance_ctx(inst), alg,
- shash_crypto_instance(inst),
- CRYPTO_ALG_TYPE_MASK);
+ err = crypto_inst_setname(shash_crypto_instance(inst), tmpl->name, alg);
if (err)
- goto out_free_inst;
+ goto err_free_inst;
alignmask = alg->cra_alignmask;
inst->alg.base.cra_alignmask = alignmask;
@@ -280,21 +280,19 @@ static int cmac_create(struct crypto_template *tmpl, struct rtattr **tb)
inst->alg.final = crypto_cmac_digest_final;
inst->alg.setkey = crypto_cmac_digest_setkey;
+ inst->free = shash_free_singlespawn_instance;
+
err = shash_register_instance(tmpl, inst);
if (err) {
-out_free_inst:
- shash_free_instance(shash_crypto_instance(inst));
+err_free_inst:
+ shash_free_singlespawn_instance(inst);
}
-
-out_put_alg:
- crypto_mod_put(alg);
return err;
}
static struct crypto_template crypto_cmac_tmpl = {
.name = "cmac",
.create = cmac_create,
- .free = shash_free_instance,
.module = THIS_MODULE,
};
diff --git a/crypto/compress.c b/crypto/compress.c
index e9edf8524787..9048fe390c46 100644
--- a/crypto/compress.c
+++ b/crypto/compress.c
@@ -6,34 +6,27 @@
*
* Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
*/
-#include <linux/types.h>
#include <linux/crypto.h>
-#include <linux/errno.h>
-#include <linux/string.h>
#include "internal.h"
-static int crypto_compress(struct crypto_tfm *tfm,
- const u8 *src, unsigned int slen,
- u8 *dst, unsigned int *dlen)
+int crypto_comp_compress(struct crypto_comp *comp,
+ const u8 *src, unsigned int slen,
+ u8 *dst, unsigned int *dlen)
{
+ struct crypto_tfm *tfm = crypto_comp_tfm(comp);
+
return tfm->__crt_alg->cra_compress.coa_compress(tfm, src, slen, dst,
dlen);
}
+EXPORT_SYMBOL_GPL(crypto_comp_compress);
-static int crypto_decompress(struct crypto_tfm *tfm,
- const u8 *src, unsigned int slen,
- u8 *dst, unsigned int *dlen)
+int crypto_comp_decompress(struct crypto_comp *comp,
+ const u8 *src, unsigned int slen,
+ u8 *dst, unsigned int *dlen)
{
+ struct crypto_tfm *tfm = crypto_comp_tfm(comp);
+
return tfm->__crt_alg->cra_compress.coa_decompress(tfm, src, slen, dst,
dlen);
}
-
-int crypto_init_compress_ops(struct crypto_tfm *tfm)
-{
- struct compress_tfm *ops = &tfm->crt_compress;
-
- ops->cot_compress = crypto_compress;
- ops->cot_decompress = crypto_decompress;
-
- return 0;
-}
+EXPORT_SYMBOL_GPL(crypto_comp_decompress);
diff --git a/crypto/crc32_generic.c b/crypto/crc32_generic.c
index 9e97912280bd..0e103fb5dd77 100644
--- a/crypto/crc32_generic.c
+++ b/crypto/crc32_generic.c
@@ -60,10 +60,8 @@ static int crc32_setkey(struct crypto_shash *hash, const u8 *key,
{
u32 *mctx = crypto_shash_ctx(hash);
- if (keylen != sizeof(u32)) {
- crypto_shash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ if (keylen != sizeof(u32))
return -EINVAL;
- }
*mctx = get_unaligned_le32(key);
return 0;
}
diff --git a/crypto/crc32c_generic.c b/crypto/crc32c_generic.c
index 7b25fe82072c..7fa9b0788685 100644
--- a/crypto/crc32c_generic.c
+++ b/crypto/crc32c_generic.c
@@ -74,10 +74,8 @@ static int chksum_setkey(struct crypto_shash *tfm, const u8 *key,
{
struct chksum_ctx *mctx = crypto_shash_ctx(tfm);
- if (keylen != sizeof(mctx->key)) {
- crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ if (keylen != sizeof(mctx->key))
return -EINVAL;
- }
mctx->key = get_unaligned_le32(key);
return 0;
}
diff --git a/crypto/cryptd.c b/crypto/cryptd.c
index 3748f9b4516d..d94c75c840a5 100644
--- a/crypto/cryptd.c
+++ b/crypto/cryptd.c
@@ -16,7 +16,7 @@
#include <crypto/internal/aead.h>
#include <crypto/internal/skcipher.h>
#include <crypto/cryptd.h>
-#include <linux/atomic.h>
+#include <linux/refcount.h>
#include <linux/err.h>
#include <linux/init.h>
#include <linux/kernel.h>
@@ -63,7 +63,7 @@ struct aead_instance_ctx {
};
struct cryptd_skcipher_ctx {
- atomic_t refcnt;
+ refcount_t refcnt;
struct crypto_sync_skcipher *child;
};
@@ -72,7 +72,7 @@ struct cryptd_skcipher_request_ctx {
};
struct cryptd_hash_ctx {
- atomic_t refcnt;
+ refcount_t refcnt;
struct crypto_shash *child;
};
@@ -82,7 +82,7 @@ struct cryptd_hash_request_ctx {
};
struct cryptd_aead_ctx {
- atomic_t refcnt;
+ refcount_t refcnt;
struct crypto_aead *child;
};
@@ -127,7 +127,7 @@ static int cryptd_enqueue_request(struct cryptd_queue *queue,
{
int cpu, err;
struct cryptd_cpu_queue *cpu_queue;
- atomic_t *refcnt;
+ refcount_t *refcnt;
cpu = get_cpu();
cpu_queue = this_cpu_ptr(queue->cpu_queue);
@@ -140,10 +140,10 @@ static int cryptd_enqueue_request(struct cryptd_queue *queue,
queue_work_on(cpu, cryptd_wq, &cpu_queue->work);
- if (!atomic_read(refcnt))
+ if (!refcount_read(refcnt))
goto out_put_cpu;
- atomic_inc(refcnt);
+ refcount_inc(refcnt);
out_put_cpu:
put_cpu();
@@ -221,48 +221,17 @@ static int cryptd_init_instance(struct crypto_instance *inst,
return 0;
}
-static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
- unsigned int tail)
-{
- char *p;
- struct crypto_instance *inst;
- int err;
-
- p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
- if (!p)
- return ERR_PTR(-ENOMEM);
-
- inst = (void *)(p + head);
-
- err = cryptd_init_instance(inst, alg);
- if (err)
- goto out_free_inst;
-
-out:
- return p;
-
-out_free_inst:
- kfree(p);
- p = ERR_PTR(err);
- goto out;
-}
-
static int cryptd_skcipher_setkey(struct crypto_skcipher *parent,
const u8 *key, unsigned int keylen)
{
struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
struct crypto_sync_skcipher *child = ctx->child;
- int err;
crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_sync_skcipher_set_flags(child,
crypto_skcipher_get_flags(parent) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_sync_skcipher_setkey(child, key, keylen);
- crypto_skcipher_set_flags(parent,
- crypto_sync_skcipher_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
- return err;
+ return crypto_sync_skcipher_setkey(child, key, keylen);
}
static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
@@ -270,13 +239,13 @@ static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
- int refcnt = atomic_read(&ctx->refcnt);
+ int refcnt = refcount_read(&ctx->refcnt);
local_bh_disable();
rctx->complete(&req->base, err);
local_bh_enable();
- if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
+ if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
crypto_free_skcipher(tfm);
}
@@ -421,8 +390,8 @@ static int cryptd_create_skcipher(struct crypto_template *tmpl,
ctx = skcipher_instance_ctx(inst);
ctx->queue = queue;
- crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst));
- err = crypto_grab_skcipher(&ctx->spawn, name, type, mask);
+ err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst),
+ name, type, mask);
if (err)
goto out_free_inst;
@@ -491,15 +460,11 @@ static int cryptd_hash_setkey(struct crypto_ahash *parent,
{
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
struct crypto_shash *child = ctx->child;
- int err;
crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_shash_setkey(child, key, keylen);
- crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
- return err;
+ return crypto_shash_setkey(child, key, keylen);
}
static int cryptd_hash_enqueue(struct ahash_request *req,
@@ -521,13 +486,13 @@ static void cryptd_hash_complete(struct ahash_request *req, int err)
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
- int refcnt = atomic_read(&ctx->refcnt);
+ int refcnt = refcount_read(&ctx->refcnt);
local_bh_disable();
rctx->complete(&req->base, err);
local_bh_enable();
- if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
+ if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
crypto_free_ahash(tfm);
}
@@ -666,44 +631,49 @@ static int cryptd_hash_import(struct ahash_request *req, const void *in)
return crypto_shash_import(desc, in);
}
+static void cryptd_hash_free(struct ahash_instance *inst)
+{
+ struct hashd_instance_ctx *ctx = ahash_instance_ctx(inst);
+
+ crypto_drop_shash(&ctx->spawn);
+ kfree(inst);
+}
+
static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
struct cryptd_queue *queue)
{
struct hashd_instance_ctx *ctx;
struct ahash_instance *inst;
- struct shash_alg *salg;
- struct crypto_alg *alg;
+ struct shash_alg *alg;
u32 type = 0;
u32 mask = 0;
int err;
cryptd_check_internal(tb, &type, &mask);
- salg = shash_attr_alg(tb[1], type, mask);
- if (IS_ERR(salg))
- return PTR_ERR(salg);
-
- alg = &salg->base;
- inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
- sizeof(*ctx));
- err = PTR_ERR(inst);
- if (IS_ERR(inst))
- goto out_put_alg;
+ inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
+ if (!inst)
+ return -ENOMEM;
ctx = ahash_instance_ctx(inst);
ctx->queue = queue;
- err = crypto_init_shash_spawn(&ctx->spawn, salg,
- ahash_crypto_instance(inst));
+ err = crypto_grab_shash(&ctx->spawn, ahash_crypto_instance(inst),
+ crypto_attr_alg_name(tb[1]), type, mask);
if (err)
- goto out_free_inst;
+ goto err_free_inst;
+ alg = crypto_spawn_shash_alg(&ctx->spawn);
+
+ err = cryptd_init_instance(ahash_crypto_instance(inst), &alg->base);
+ if (err)
+ goto err_free_inst;
inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC |
- (alg->cra_flags & (CRYPTO_ALG_INTERNAL |
- CRYPTO_ALG_OPTIONAL_KEY));
+ (alg->base.cra_flags & (CRYPTO_ALG_INTERNAL |
+ CRYPTO_ALG_OPTIONAL_KEY));
- inst->alg.halg.digestsize = salg->digestsize;
- inst->alg.halg.statesize = salg->statesize;
+ inst->alg.halg.digestsize = alg->digestsize;
+ inst->alg.halg.statesize = alg->statesize;
inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
@@ -715,19 +685,18 @@ static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
inst->alg.finup = cryptd_hash_finup_enqueue;
inst->alg.export = cryptd_hash_export;
inst->alg.import = cryptd_hash_import;
- if (crypto_shash_alg_has_setkey(salg))
+ if (crypto_shash_alg_has_setkey(alg))
inst->alg.setkey = cryptd_hash_setkey;
inst->alg.digest = cryptd_hash_digest_enqueue;
+ inst->free = cryptd_hash_free;
+
err = ahash_register_instance(tmpl, inst);
if (err) {
+err_free_inst:
crypto_drop_shash(&ctx->spawn);
-out_free_inst:
kfree(inst);
}
-
-out_put_alg:
- crypto_mod_put(alg);
return err;
}
@@ -772,13 +741,13 @@ static void cryptd_aead_crypt(struct aead_request *req,
out:
ctx = crypto_aead_ctx(tfm);
- refcnt = atomic_read(&ctx->refcnt);
+ refcnt = refcount_read(&ctx->refcnt);
local_bh_disable();
compl(&req->base, err);
local_bh_enable();
- if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
+ if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
crypto_free_aead(tfm);
}
@@ -849,6 +818,14 @@ static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
crypto_free_aead(ctx->child);
}
+static void cryptd_aead_free(struct aead_instance *inst)
+{
+ struct aead_instance_ctx *ctx = aead_instance_ctx(inst);
+
+ crypto_drop_aead(&ctx->aead_spawn);
+ kfree(inst);
+}
+
static int cryptd_create_aead(struct crypto_template *tmpl,
struct rtattr **tb,
struct cryptd_queue *queue)
@@ -874,8 +851,8 @@ static int cryptd_create_aead(struct crypto_template *tmpl,
ctx = aead_instance_ctx(inst);
ctx->queue = queue;
- crypto_set_aead_spawn(&ctx->aead_spawn, aead_crypto_instance(inst));
- err = crypto_grab_aead(&ctx->aead_spawn, name, type, mask);
+ err = crypto_grab_aead(&ctx->aead_spawn, aead_crypto_instance(inst),
+ name, type, mask);
if (err)
goto out_free_inst;
@@ -898,6 +875,8 @@ static int cryptd_create_aead(struct crypto_template *tmpl,
inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
+ inst->free = cryptd_aead_free;
+
err = aead_register_instance(tmpl, inst);
if (err) {
out_drop_aead:
@@ -919,7 +898,7 @@ static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
return PTR_ERR(algt);
switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
- case CRYPTO_ALG_TYPE_BLKCIPHER:
+ case CRYPTO_ALG_TYPE_SKCIPHER:
return cryptd_create_skcipher(tmpl, tb, &queue);
case CRYPTO_ALG_TYPE_HASH:
return cryptd_create_hash(tmpl, tb, &queue);
@@ -930,31 +909,9 @@ static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
return -EINVAL;
}
-static void cryptd_free(struct crypto_instance *inst)
-{
- struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
- struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
- struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
-
- switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
- case CRYPTO_ALG_TYPE_AHASH:
- crypto_drop_shash(&hctx->spawn);
- kfree(ahash_instance(inst));
- return;
- case CRYPTO_ALG_TYPE_AEAD:
- crypto_drop_aead(&aead_ctx->aead_spawn);
- kfree(aead_instance(inst));
- return;
- default:
- crypto_drop_spawn(&ctx->spawn);
- kfree(inst);
- }
-}
-
static struct crypto_template cryptd_tmpl = {
.name = "cryptd",
.create = cryptd_create,
- .free = cryptd_free,
.module = THIS_MODULE,
};
@@ -979,7 +936,7 @@ struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
}
ctx = crypto_skcipher_ctx(tfm);
- atomic_set(&ctx->refcnt, 1);
+ refcount_set(&ctx->refcnt, 1);
return container_of(tfm, struct cryptd_skcipher, base);
}
@@ -997,7 +954,7 @@ bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
{
struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
- return atomic_read(&ctx->refcnt) - 1;
+ return refcount_read(&ctx->refcnt) - 1;
}
EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
@@ -1005,7 +962,7 @@ void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
{
struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
- if (atomic_dec_and_test(&ctx->refcnt))
+ if (refcount_dec_and_test(&ctx->refcnt))
crypto_free_skcipher(&tfm->base);
}
EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
@@ -1029,7 +986,7 @@ struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
}
ctx = crypto_ahash_ctx(tfm);
- atomic_set(&ctx->refcnt, 1);
+ refcount_set(&ctx->refcnt, 1);
return __cryptd_ahash_cast(tfm);
}
@@ -1054,7 +1011,7 @@ bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
{
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
- return atomic_read(&ctx->refcnt) - 1;
+ return refcount_read(&ctx->refcnt) - 1;
}
EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
@@ -1062,7 +1019,7 @@ void cryptd_free_ahash(struct cryptd_ahash *tfm)
{
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
- if (atomic_dec_and_test(&ctx->refcnt))
+ if (refcount_dec_and_test(&ctx->refcnt))
crypto_free_ahash(&tfm->base);
}
EXPORT_SYMBOL_GPL(cryptd_free_ahash);
@@ -1086,7 +1043,7 @@ struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
}
ctx = crypto_aead_ctx(tfm);
- atomic_set(&ctx->refcnt, 1);
+ refcount_set(&ctx->refcnt, 1);
return __cryptd_aead_cast(tfm);
}
@@ -1104,7 +1061,7 @@ bool cryptd_aead_queued(struct cryptd_aead *tfm)
{
struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
- return atomic_read(&ctx->refcnt) - 1;
+ return refcount_read(&ctx->refcnt) - 1;
}
EXPORT_SYMBOL_GPL(cryptd_aead_queued);
@@ -1112,7 +1069,7 @@ void cryptd_free_aead(struct cryptd_aead *tfm)
{
struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
- if (atomic_dec_and_test(&ctx->refcnt))
+ if (refcount_dec_and_test(&ctx->refcnt))
crypto_free_aead(&tfm->base);
}
EXPORT_SYMBOL_GPL(cryptd_free_aead);
diff --git a/crypto/crypto_engine.c b/crypto/crypto_engine.c
index d7502ec37f20..eb029ff1e05a 100644
--- a/crypto/crypto_engine.c
+++ b/crypto/crypto_engine.c
@@ -214,20 +214,6 @@ static int crypto_transfer_request_to_engine(struct crypto_engine *engine,
}
/**
- * crypto_transfer_ablkcipher_request_to_engine - transfer one ablkcipher_request
- * to list into the engine queue
- * @engine: the hardware engine
- * @req: the request need to be listed into the engine queue
- * TODO: Remove this function when skcipher conversion is finished
- */
-int crypto_transfer_ablkcipher_request_to_engine(struct crypto_engine *engine,
- struct ablkcipher_request *req)
-{
- return crypto_transfer_request_to_engine(engine, &req->base);
-}
-EXPORT_SYMBOL_GPL(crypto_transfer_ablkcipher_request_to_engine);
-
-/**
* crypto_transfer_aead_request_to_engine - transfer one aead_request
* to list into the engine queue
* @engine: the hardware engine
@@ -280,21 +266,6 @@ int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine,
EXPORT_SYMBOL_GPL(crypto_transfer_skcipher_request_to_engine);
/**
- * crypto_finalize_ablkcipher_request - finalize one ablkcipher_request if
- * the request is done
- * @engine: the hardware engine
- * @req: the request need to be finalized
- * @err: error number
- * TODO: Remove this function when skcipher conversion is finished
- */
-void crypto_finalize_ablkcipher_request(struct crypto_engine *engine,
- struct ablkcipher_request *req, int err)
-{
- return crypto_finalize_request(engine, &req->base, err);
-}
-EXPORT_SYMBOL_GPL(crypto_finalize_ablkcipher_request);
-
-/**
* crypto_finalize_aead_request - finalize one aead_request if
* the request is done
* @engine: the hardware engine
@@ -425,7 +396,7 @@ EXPORT_SYMBOL_GPL(crypto_engine_stop);
*/
struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt)
{
- struct sched_param param = { .sched_priority = MAX_RT_PRIO - 1 };
+ struct sched_param param = { .sched_priority = MAX_RT_PRIO / 2 };
struct crypto_engine *engine;
if (!dev)
diff --git a/crypto/crypto_user_base.c b/crypto/crypto_user_base.c
index c65e39005ce2..3fa20f12989f 100644
--- a/crypto/crypto_user_base.c
+++ b/crypto/crypto_user_base.c
@@ -10,9 +10,10 @@
#include <linux/crypto.h>
#include <linux/cryptouser.h>
#include <linux/sched.h>
-#include <net/netlink.h>
#include <linux/security.h>
+#include <net/netlink.h>
#include <net/net_namespace.h>
+#include <net/sock.h>
#include <crypto/internal/skcipher.h>
#include <crypto/internal/rng.h>
#include <crypto/akcipher.h>
@@ -25,9 +26,6 @@
static DEFINE_MUTEX(crypto_cfg_mutex);
-/* The crypto netlink socket */
-struct sock *crypto_nlsk;
-
struct crypto_dump_info {
struct sk_buff *in_skb;
struct sk_buff *out_skb;
@@ -186,6 +184,7 @@ out:
static int crypto_report(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
struct nlattr **attrs)
{
+ struct net *net = sock_net(in_skb->sk);
struct crypto_user_alg *p = nlmsg_data(in_nlh);
struct crypto_alg *alg;
struct sk_buff *skb;
@@ -214,10 +213,12 @@ static int crypto_report(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
drop_alg:
crypto_mod_put(alg);
- if (err)
+ if (err) {
+ kfree_skb(skb);
return err;
+ }
- return nlmsg_unicast(crypto_nlsk, skb, NETLINK_CB(in_skb).portid);
+ return nlmsg_unicast(net->crypto_nlsk, skb, NETLINK_CB(in_skb).portid);
}
static int crypto_dump_report(struct sk_buff *skb, struct netlink_callback *cb)
@@ -322,7 +323,8 @@ static int crypto_del_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
if (refcount_read(&alg->cra_refcnt) > 2)
goto drop_alg;
- err = crypto_unregister_instance((struct crypto_instance *)alg);
+ crypto_unregister_instance((struct crypto_instance *)alg);
+ err = 0;
drop_alg:
crypto_mod_put(alg);
@@ -420,6 +422,7 @@ static const struct crypto_link {
static int crypto_user_rcv_msg(struct sk_buff *skb, struct nlmsghdr *nlh,
struct netlink_ext_ack *extack)
{
+ struct net *net = sock_net(skb->sk);
struct nlattr *attrs[CRYPTOCFGA_MAX+1];
const struct crypto_link *link;
int type, err;
@@ -450,7 +453,7 @@ static int crypto_user_rcv_msg(struct sk_buff *skb, struct nlmsghdr *nlh,
.done = link->done,
.min_dump_alloc = min(dump_alloc, 65535UL),
};
- err = netlink_dump_start(crypto_nlsk, skb, nlh, &c);
+ err = netlink_dump_start(net->crypto_nlsk, skb, nlh, &c);
}
return err;
@@ -474,22 +477,35 @@ static void crypto_netlink_rcv(struct sk_buff *skb)
mutex_unlock(&crypto_cfg_mutex);
}
-static int __init crypto_user_init(void)
+static int __net_init crypto_netlink_init(struct net *net)
{
struct netlink_kernel_cfg cfg = {
.input = crypto_netlink_rcv,
};
- crypto_nlsk = netlink_kernel_create(&init_net, NETLINK_CRYPTO, &cfg);
- if (!crypto_nlsk)
- return -ENOMEM;
+ net->crypto_nlsk = netlink_kernel_create(net, NETLINK_CRYPTO, &cfg);
+ return net->crypto_nlsk == NULL ? -ENOMEM : 0;
+}
- return 0;
+static void __net_exit crypto_netlink_exit(struct net *net)
+{
+ netlink_kernel_release(net->crypto_nlsk);
+ net->crypto_nlsk = NULL;
+}
+
+static struct pernet_operations crypto_netlink_net_ops = {
+ .init = crypto_netlink_init,
+ .exit = crypto_netlink_exit,
+};
+
+static int __init crypto_user_init(void)
+{
+ return register_pernet_subsys(&crypto_netlink_net_ops);
}
static void __exit crypto_user_exit(void)
{
- netlink_kernel_release(crypto_nlsk);
+ unregister_pernet_subsys(&crypto_netlink_net_ops);
}
module_init(crypto_user_init);
diff --git a/crypto/crypto_user_stat.c b/crypto/crypto_user_stat.c
index a03f326a63d3..154884bf9275 100644
--- a/crypto/crypto_user_stat.c
+++ b/crypto/crypto_user_stat.c
@@ -10,6 +10,7 @@
#include <linux/cryptouser.h>
#include <linux/sched.h>
#include <net/netlink.h>
+#include <net/sock.h>
#include <crypto/internal/skcipher.h>
#include <crypto/internal/rng.h>
#include <crypto/akcipher.h>
@@ -212,10 +213,6 @@ static int crypto_reportstat_one(struct crypto_alg *alg,
if (crypto_report_cipher(skb, alg))
goto nla_put_failure;
break;
- case CRYPTO_ALG_TYPE_BLKCIPHER:
- if (crypto_report_cipher(skb, alg))
- goto nla_put_failure;
- break;
case CRYPTO_ALG_TYPE_CIPHER:
if (crypto_report_cipher(skb, alg))
goto nla_put_failure;
@@ -298,6 +295,7 @@ out:
int crypto_reportstat(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
struct nlattr **attrs)
{
+ struct net *net = sock_net(in_skb->sk);
struct crypto_user_alg *p = nlmsg_data(in_nlh);
struct crypto_alg *alg;
struct sk_buff *skb;
@@ -326,10 +324,12 @@ int crypto_reportstat(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
drop_alg:
crypto_mod_put(alg);
- if (err)
+ if (err) {
+ kfree_skb(skb);
return err;
+ }
- return nlmsg_unicast(crypto_nlsk, skb, NETLINK_CB(in_skb).portid);
+ return nlmsg_unicast(net->crypto_nlsk, skb, NETLINK_CB(in_skb).portid);
}
MODULE_LICENSE("GPL");
diff --git a/crypto/ctr.c b/crypto/ctr.c
index 70a3fccb82f3..a8feab621c6c 100644
--- a/crypto/ctr.c
+++ b/crypto/ctr.c
@@ -129,10 +129,12 @@ static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb)
struct crypto_alg *alg;
int err;
- inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
+ inst = skcipher_alloc_instance_simple(tmpl, tb);
if (IS_ERR(inst))
return PTR_ERR(inst);
+ alg = skcipher_ialg_simple(inst);
+
/* Block size must be >= 4 bytes. */
err = -EINVAL;
if (alg->cra_blocksize < 4)
@@ -155,14 +157,11 @@ static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb)
inst->alg.decrypt = crypto_ctr_crypt;
err = skcipher_register_instance(tmpl, inst);
- if (err)
- goto out_free_inst;
- goto out_put_alg;
-
+ if (err) {
out_free_inst:
- inst->free(inst);
-out_put_alg:
- crypto_mod_put(alg);
+ inst->free(inst);
+ }
+
return err;
}
@@ -171,7 +170,6 @@ static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
{
struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
struct crypto_skcipher *child = ctx->child;
- int err;
/* the nonce is stored in bytes at end of key */
if (keylen < CTR_RFC3686_NONCE_SIZE)
@@ -185,11 +183,7 @@ static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_skcipher_setkey(child, key, keylen);
- crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
-
- return err;
+ return crypto_skcipher_setkey(child, key, keylen);
}
static int crypto_rfc3686_crypt(struct skcipher_request *req)
@@ -292,8 +286,8 @@ static int crypto_rfc3686_create(struct crypto_template *tmpl,
spawn = skcipher_instance_ctx(inst);
- crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
- err = crypto_grab_skcipher(spawn, cipher_name, 0, mask);
+ err = crypto_grab_skcipher(spawn, skcipher_crypto_instance(inst),
+ cipher_name, 0, mask);
if (err)
goto err_free_inst;
diff --git a/crypto/cts.c b/crypto/cts.c
index 6b6087dbb62a..48188adc8e91 100644
--- a/crypto/cts.c
+++ b/crypto/cts.c
@@ -78,15 +78,11 @@ static int crypto_cts_setkey(struct crypto_skcipher *parent, const u8 *key,
{
struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(parent);
struct crypto_skcipher *child = ctx->child;
- int err;
crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_skcipher_setkey(child, key, keylen);
- crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
- return err;
+ return crypto_skcipher_setkey(child, key, keylen);
}
static void cts_cbc_crypt_done(struct crypto_async_request *areq, int err)
@@ -332,6 +328,7 @@ static int crypto_cts_create(struct crypto_template *tmpl, struct rtattr **tb)
struct crypto_attr_type *algt;
struct skcipher_alg *alg;
const char *cipher_name;
+ u32 mask;
int err;
algt = crypto_get_attr_type(tb);
@@ -341,6 +338,8 @@ static int crypto_cts_create(struct crypto_template *tmpl, struct rtattr **tb)
if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
return -EINVAL;
+ mask = crypto_requires_sync(algt->type, algt->mask);
+
cipher_name = crypto_attr_alg_name(tb[1]);
if (IS_ERR(cipher_name))
return PTR_ERR(cipher_name);
@@ -351,10 +350,8 @@ static int crypto_cts_create(struct crypto_template *tmpl, struct rtattr **tb)
spawn = skcipher_instance_ctx(inst);
- crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
- err = crypto_grab_skcipher(spawn, cipher_name, 0,
- crypto_requires_sync(algt->type,
- algt->mask));
+ err = crypto_grab_skcipher(spawn, skcipher_crypto_instance(inst),
+ cipher_name, 0, mask);
if (err)
goto err_free_inst;
diff --git a/crypto/curve25519-generic.c b/crypto/curve25519-generic.c
new file mode 100644
index 000000000000..bd88fd571393
--- /dev/null
+++ b/crypto/curve25519-generic.c
@@ -0,0 +1,90 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+
+#include <crypto/curve25519.h>
+#include <crypto/internal/kpp.h>
+#include <crypto/kpp.h>
+#include <linux/module.h>
+#include <linux/scatterlist.h>
+
+static int curve25519_set_secret(struct crypto_kpp *tfm, const void *buf,
+ unsigned int len)
+{
+ u8 *secret = kpp_tfm_ctx(tfm);
+
+ if (!len)
+ curve25519_generate_secret(secret);
+ else if (len == CURVE25519_KEY_SIZE &&
+ crypto_memneq(buf, curve25519_null_point, CURVE25519_KEY_SIZE))
+ memcpy(secret, buf, CURVE25519_KEY_SIZE);
+ else
+ return -EINVAL;
+ return 0;
+}
+
+static int curve25519_compute_value(struct kpp_request *req)
+{
+ struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
+ const u8 *secret = kpp_tfm_ctx(tfm);
+ u8 public_key[CURVE25519_KEY_SIZE];
+ u8 buf[CURVE25519_KEY_SIZE];
+ int copied, nbytes;
+ u8 const *bp;
+
+ if (req->src) {
+ copied = sg_copy_to_buffer(req->src,
+ sg_nents_for_len(req->src,
+ CURVE25519_KEY_SIZE),
+ public_key, CURVE25519_KEY_SIZE);
+ if (copied != CURVE25519_KEY_SIZE)
+ return -EINVAL;
+ bp = public_key;
+ } else {
+ bp = curve25519_base_point;
+ }
+
+ curve25519_generic(buf, secret, bp);
+
+ /* might want less than we've got */
+ nbytes = min_t(size_t, CURVE25519_KEY_SIZE, req->dst_len);
+ copied = sg_copy_from_buffer(req->dst, sg_nents_for_len(req->dst,
+ nbytes),
+ buf, nbytes);
+ if (copied != nbytes)
+ return -EINVAL;
+ return 0;
+}
+
+static unsigned int curve25519_max_size(struct crypto_kpp *tfm)
+{
+ return CURVE25519_KEY_SIZE;
+}
+
+static struct kpp_alg curve25519_alg = {
+ .base.cra_name = "curve25519",
+ .base.cra_driver_name = "curve25519-generic",
+ .base.cra_priority = 100,
+ .base.cra_module = THIS_MODULE,
+ .base.cra_ctxsize = CURVE25519_KEY_SIZE,
+
+ .set_secret = curve25519_set_secret,
+ .generate_public_key = curve25519_compute_value,
+ .compute_shared_secret = curve25519_compute_value,
+ .max_size = curve25519_max_size,
+};
+
+static int curve25519_init(void)
+{
+ return crypto_register_kpp(&curve25519_alg);
+}
+
+static void curve25519_exit(void)
+{
+ crypto_unregister_kpp(&curve25519_alg);
+}
+
+subsys_initcall(curve25519_init);
+module_exit(curve25519_exit);
+
+MODULE_ALIAS_CRYPTO("curve25519");
+MODULE_ALIAS_CRYPTO("curve25519-generic");
+MODULE_LICENSE("GPL");
diff --git a/crypto/des_generic.c b/crypto/des_generic.c
index dc085514408a..c85354a5e94c 100644
--- a/crypto/des_generic.c
+++ b/crypto/des_generic.c
@@ -13,934 +13,73 @@
#include <linux/module.h>
#include <linux/errno.h>
#include <linux/crypto.h>
-#include <linux/types.h>
-#include <crypto/des.h>
-
-#define ROL(x, r) ((x) = rol32((x), (r)))
-#define ROR(x, r) ((x) = ror32((x), (r)))
-
-struct des_ctx {
- u32 expkey[DES_EXPKEY_WORDS];
-};
-
-struct des3_ede_ctx {
- u32 expkey[DES3_EDE_EXPKEY_WORDS];
-};
-
-/* Lookup tables for key expansion */
-
-static const u8 pc1[256] = {
- 0x00, 0x00, 0x40, 0x04, 0x10, 0x10, 0x50, 0x14,
- 0x04, 0x40, 0x44, 0x44, 0x14, 0x50, 0x54, 0x54,
- 0x02, 0x02, 0x42, 0x06, 0x12, 0x12, 0x52, 0x16,
- 0x06, 0x42, 0x46, 0x46, 0x16, 0x52, 0x56, 0x56,
- 0x80, 0x08, 0xc0, 0x0c, 0x90, 0x18, 0xd0, 0x1c,
- 0x84, 0x48, 0xc4, 0x4c, 0x94, 0x58, 0xd4, 0x5c,
- 0x82, 0x0a, 0xc2, 0x0e, 0x92, 0x1a, 0xd2, 0x1e,
- 0x86, 0x4a, 0xc6, 0x4e, 0x96, 0x5a, 0xd6, 0x5e,
- 0x20, 0x20, 0x60, 0x24, 0x30, 0x30, 0x70, 0x34,
- 0x24, 0x60, 0x64, 0x64, 0x34, 0x70, 0x74, 0x74,
- 0x22, 0x22, 0x62, 0x26, 0x32, 0x32, 0x72, 0x36,
- 0x26, 0x62, 0x66, 0x66, 0x36, 0x72, 0x76, 0x76,
- 0xa0, 0x28, 0xe0, 0x2c, 0xb0, 0x38, 0xf0, 0x3c,
- 0xa4, 0x68, 0xe4, 0x6c, 0xb4, 0x78, 0xf4, 0x7c,
- 0xa2, 0x2a, 0xe2, 0x2e, 0xb2, 0x3a, 0xf2, 0x3e,
- 0xa6, 0x6a, 0xe6, 0x6e, 0xb6, 0x7a, 0xf6, 0x7e,
- 0x08, 0x80, 0x48, 0x84, 0x18, 0x90, 0x58, 0x94,
- 0x0c, 0xc0, 0x4c, 0xc4, 0x1c, 0xd0, 0x5c, 0xd4,
- 0x0a, 0x82, 0x4a, 0x86, 0x1a, 0x92, 0x5a, 0x96,
- 0x0e, 0xc2, 0x4e, 0xc6, 0x1e, 0xd2, 0x5e, 0xd6,
- 0x88, 0x88, 0xc8, 0x8c, 0x98, 0x98, 0xd8, 0x9c,
- 0x8c, 0xc8, 0xcc, 0xcc, 0x9c, 0xd8, 0xdc, 0xdc,
- 0x8a, 0x8a, 0xca, 0x8e, 0x9a, 0x9a, 0xda, 0x9e,
- 0x8e, 0xca, 0xce, 0xce, 0x9e, 0xda, 0xde, 0xde,
- 0x28, 0xa0, 0x68, 0xa4, 0x38, 0xb0, 0x78, 0xb4,
- 0x2c, 0xe0, 0x6c, 0xe4, 0x3c, 0xf0, 0x7c, 0xf4,
- 0x2a, 0xa2, 0x6a, 0xa6, 0x3a, 0xb2, 0x7a, 0xb6,
- 0x2e, 0xe2, 0x6e, 0xe6, 0x3e, 0xf2, 0x7e, 0xf6,
- 0xa8, 0xa8, 0xe8, 0xac, 0xb8, 0xb8, 0xf8, 0xbc,
- 0xac, 0xe8, 0xec, 0xec, 0xbc, 0xf8, 0xfc, 0xfc,
- 0xaa, 0xaa, 0xea, 0xae, 0xba, 0xba, 0xfa, 0xbe,
- 0xae, 0xea, 0xee, 0xee, 0xbe, 0xfa, 0xfe, 0xfe
-};
-
-static const u8 rs[256] = {
- 0x00, 0x00, 0x80, 0x80, 0x02, 0x02, 0x82, 0x82,
- 0x04, 0x04, 0x84, 0x84, 0x06, 0x06, 0x86, 0x86,
- 0x08, 0x08, 0x88, 0x88, 0x0a, 0x0a, 0x8a, 0x8a,
- 0x0c, 0x0c, 0x8c, 0x8c, 0x0e, 0x0e, 0x8e, 0x8e,
- 0x10, 0x10, 0x90, 0x90, 0x12, 0x12, 0x92, 0x92,
- 0x14, 0x14, 0x94, 0x94, 0x16, 0x16, 0x96, 0x96,
- 0x18, 0x18, 0x98, 0x98, 0x1a, 0x1a, 0x9a, 0x9a,
- 0x1c, 0x1c, 0x9c, 0x9c, 0x1e, 0x1e, 0x9e, 0x9e,
- 0x20, 0x20, 0xa0, 0xa0, 0x22, 0x22, 0xa2, 0xa2,
- 0x24, 0x24, 0xa4, 0xa4, 0x26, 0x26, 0xa6, 0xa6,
- 0x28, 0x28, 0xa8, 0xa8, 0x2a, 0x2a, 0xaa, 0xaa,
- 0x2c, 0x2c, 0xac, 0xac, 0x2e, 0x2e, 0xae, 0xae,
- 0x30, 0x30, 0xb0, 0xb0, 0x32, 0x32, 0xb2, 0xb2,
- 0x34, 0x34, 0xb4, 0xb4, 0x36, 0x36, 0xb6, 0xb6,
- 0x38, 0x38, 0xb8, 0xb8, 0x3a, 0x3a, 0xba, 0xba,
- 0x3c, 0x3c, 0xbc, 0xbc, 0x3e, 0x3e, 0xbe, 0xbe,
- 0x40, 0x40, 0xc0, 0xc0, 0x42, 0x42, 0xc2, 0xc2,
- 0x44, 0x44, 0xc4, 0xc4, 0x46, 0x46, 0xc6, 0xc6,
- 0x48, 0x48, 0xc8, 0xc8, 0x4a, 0x4a, 0xca, 0xca,
- 0x4c, 0x4c, 0xcc, 0xcc, 0x4e, 0x4e, 0xce, 0xce,
- 0x50, 0x50, 0xd0, 0xd0, 0x52, 0x52, 0xd2, 0xd2,
- 0x54, 0x54, 0xd4, 0xd4, 0x56, 0x56, 0xd6, 0xd6,
- 0x58, 0x58, 0xd8, 0xd8, 0x5a, 0x5a, 0xda, 0xda,
- 0x5c, 0x5c, 0xdc, 0xdc, 0x5e, 0x5e, 0xde, 0xde,
- 0x60, 0x60, 0xe0, 0xe0, 0x62, 0x62, 0xe2, 0xe2,
- 0x64, 0x64, 0xe4, 0xe4, 0x66, 0x66, 0xe6, 0xe6,
- 0x68, 0x68, 0xe8, 0xe8, 0x6a, 0x6a, 0xea, 0xea,
- 0x6c, 0x6c, 0xec, 0xec, 0x6e, 0x6e, 0xee, 0xee,
- 0x70, 0x70, 0xf0, 0xf0, 0x72, 0x72, 0xf2, 0xf2,
- 0x74, 0x74, 0xf4, 0xf4, 0x76, 0x76, 0xf6, 0xf6,
- 0x78, 0x78, 0xf8, 0xf8, 0x7a, 0x7a, 0xfa, 0xfa,
- 0x7c, 0x7c, 0xfc, 0xfc, 0x7e, 0x7e, 0xfe, 0xfe
-};
-
-static const u32 pc2[1024] = {
- 0x00000000, 0x00000000, 0x00000000, 0x00000000,
- 0x00040000, 0x00000000, 0x04000000, 0x00100000,
- 0x00400000, 0x00000008, 0x00000800, 0x40000000,
- 0x00440000, 0x00000008, 0x04000800, 0x40100000,
- 0x00000400, 0x00000020, 0x08000000, 0x00000100,
- 0x00040400, 0x00000020, 0x0c000000, 0x00100100,
- 0x00400400, 0x00000028, 0x08000800, 0x40000100,
- 0x00440400, 0x00000028, 0x0c000800, 0x40100100,
- 0x80000000, 0x00000010, 0x00000000, 0x00800000,
- 0x80040000, 0x00000010, 0x04000000, 0x00900000,
- 0x80400000, 0x00000018, 0x00000800, 0x40800000,
- 0x80440000, 0x00000018, 0x04000800, 0x40900000,
- 0x80000400, 0x00000030, 0x08000000, 0x00800100,
- 0x80040400, 0x00000030, 0x0c000000, 0x00900100,
- 0x80400400, 0x00000038, 0x08000800, 0x40800100,
- 0x80440400, 0x00000038, 0x0c000800, 0x40900100,
- 0x10000000, 0x00000000, 0x00200000, 0x00001000,
- 0x10040000, 0x00000000, 0x04200000, 0x00101000,
- 0x10400000, 0x00000008, 0x00200800, 0x40001000,
- 0x10440000, 0x00000008, 0x04200800, 0x40101000,
- 0x10000400, 0x00000020, 0x08200000, 0x00001100,
- 0x10040400, 0x00000020, 0x0c200000, 0x00101100,
- 0x10400400, 0x00000028, 0x08200800, 0x40001100,
- 0x10440400, 0x00000028, 0x0c200800, 0x40101100,
- 0x90000000, 0x00000010, 0x00200000, 0x00801000,
- 0x90040000, 0x00000010, 0x04200000, 0x00901000,
- 0x90400000, 0x00000018, 0x00200800, 0x40801000,
- 0x90440000, 0x00000018, 0x04200800, 0x40901000,
- 0x90000400, 0x00000030, 0x08200000, 0x00801100,
- 0x90040400, 0x00000030, 0x0c200000, 0x00901100,
- 0x90400400, 0x00000038, 0x08200800, 0x40801100,
- 0x90440400, 0x00000038, 0x0c200800, 0x40901100,
- 0x00000200, 0x00080000, 0x00000000, 0x00000004,
- 0x00040200, 0x00080000, 0x04000000, 0x00100004,
- 0x00400200, 0x00080008, 0x00000800, 0x40000004,
- 0x00440200, 0x00080008, 0x04000800, 0x40100004,
- 0x00000600, 0x00080020, 0x08000000, 0x00000104,
- 0x00040600, 0x00080020, 0x0c000000, 0x00100104,
- 0x00400600, 0x00080028, 0x08000800, 0x40000104,
- 0x00440600, 0x00080028, 0x0c000800, 0x40100104,
- 0x80000200, 0x00080010, 0x00000000, 0x00800004,
- 0x80040200, 0x00080010, 0x04000000, 0x00900004,
- 0x80400200, 0x00080018, 0x00000800, 0x40800004,
- 0x80440200, 0x00080018, 0x04000800, 0x40900004,
- 0x80000600, 0x00080030, 0x08000000, 0x00800104,
- 0x80040600, 0x00080030, 0x0c000000, 0x00900104,
- 0x80400600, 0x00080038, 0x08000800, 0x40800104,
- 0x80440600, 0x00080038, 0x0c000800, 0x40900104,
- 0x10000200, 0x00080000, 0x00200000, 0x00001004,
- 0x10040200, 0x00080000, 0x04200000, 0x00101004,
- 0x10400200, 0x00080008, 0x00200800, 0x40001004,
- 0x10440200, 0x00080008, 0x04200800, 0x40101004,
- 0x10000600, 0x00080020, 0x08200000, 0x00001104,
- 0x10040600, 0x00080020, 0x0c200000, 0x00101104,
- 0x10400600, 0x00080028, 0x08200800, 0x40001104,
- 0x10440600, 0x00080028, 0x0c200800, 0x40101104,
- 0x90000200, 0x00080010, 0x00200000, 0x00801004,
- 0x90040200, 0x00080010, 0x04200000, 0x00901004,
- 0x90400200, 0x00080018, 0x00200800, 0x40801004,
- 0x90440200, 0x00080018, 0x04200800, 0x40901004,
- 0x90000600, 0x00080030, 0x08200000, 0x00801104,
- 0x90040600, 0x00080030, 0x0c200000, 0x00901104,
- 0x90400600, 0x00080038, 0x08200800, 0x40801104,
- 0x90440600, 0x00080038, 0x0c200800, 0x40901104,
- 0x00000002, 0x00002000, 0x20000000, 0x00000001,
- 0x00040002, 0x00002000, 0x24000000, 0x00100001,
- 0x00400002, 0x00002008, 0x20000800, 0x40000001,
- 0x00440002, 0x00002008, 0x24000800, 0x40100001,
- 0x00000402, 0x00002020, 0x28000000, 0x00000101,
- 0x00040402, 0x00002020, 0x2c000000, 0x00100101,
- 0x00400402, 0x00002028, 0x28000800, 0x40000101,
- 0x00440402, 0x00002028, 0x2c000800, 0x40100101,
- 0x80000002, 0x00002010, 0x20000000, 0x00800001,
- 0x80040002, 0x00002010, 0x24000000, 0x00900001,
- 0x80400002, 0x00002018, 0x20000800, 0x40800001,
- 0x80440002, 0x00002018, 0x24000800, 0x40900001,
- 0x80000402, 0x00002030, 0x28000000, 0x00800101,
- 0x80040402, 0x00002030, 0x2c000000, 0x00900101,
- 0x80400402, 0x00002038, 0x28000800, 0x40800101,
- 0x80440402, 0x00002038, 0x2c000800, 0x40900101,
- 0x10000002, 0x00002000, 0x20200000, 0x00001001,
- 0x10040002, 0x00002000, 0x24200000, 0x00101001,
- 0x10400002, 0x00002008, 0x20200800, 0x40001001,
- 0x10440002, 0x00002008, 0x24200800, 0x40101001,
- 0x10000402, 0x00002020, 0x28200000, 0x00001101,
- 0x10040402, 0x00002020, 0x2c200000, 0x00101101,
- 0x10400402, 0x00002028, 0x28200800, 0x40001101,
- 0x10440402, 0x00002028, 0x2c200800, 0x40101101,
- 0x90000002, 0x00002010, 0x20200000, 0x00801001,
- 0x90040002, 0x00002010, 0x24200000, 0x00901001,
- 0x90400002, 0x00002018, 0x20200800, 0x40801001,
- 0x90440002, 0x00002018, 0x24200800, 0x40901001,
- 0x90000402, 0x00002030, 0x28200000, 0x00801101,
- 0x90040402, 0x00002030, 0x2c200000, 0x00901101,
- 0x90400402, 0x00002038, 0x28200800, 0x40801101,
- 0x90440402, 0x00002038, 0x2c200800, 0x40901101,
- 0x00000202, 0x00082000, 0x20000000, 0x00000005,
- 0x00040202, 0x00082000, 0x24000000, 0x00100005,
- 0x00400202, 0x00082008, 0x20000800, 0x40000005,
- 0x00440202, 0x00082008, 0x24000800, 0x40100005,
- 0x00000602, 0x00082020, 0x28000000, 0x00000105,
- 0x00040602, 0x00082020, 0x2c000000, 0x00100105,
- 0x00400602, 0x00082028, 0x28000800, 0x40000105,
- 0x00440602, 0x00082028, 0x2c000800, 0x40100105,
- 0x80000202, 0x00082010, 0x20000000, 0x00800005,
- 0x80040202, 0x00082010, 0x24000000, 0x00900005,
- 0x80400202, 0x00082018, 0x20000800, 0x40800005,
- 0x80440202, 0x00082018, 0x24000800, 0x40900005,
- 0x80000602, 0x00082030, 0x28000000, 0x00800105,
- 0x80040602, 0x00082030, 0x2c000000, 0x00900105,
- 0x80400602, 0x00082038, 0x28000800, 0x40800105,
- 0x80440602, 0x00082038, 0x2c000800, 0x40900105,
- 0x10000202, 0x00082000, 0x20200000, 0x00001005,
- 0x10040202, 0x00082000, 0x24200000, 0x00101005,
- 0x10400202, 0x00082008, 0x20200800, 0x40001005,
- 0x10440202, 0x00082008, 0x24200800, 0x40101005,
- 0x10000602, 0x00082020, 0x28200000, 0x00001105,
- 0x10040602, 0x00082020, 0x2c200000, 0x00101105,
- 0x10400602, 0x00082028, 0x28200800, 0x40001105,
- 0x10440602, 0x00082028, 0x2c200800, 0x40101105,
- 0x90000202, 0x00082010, 0x20200000, 0x00801005,
- 0x90040202, 0x00082010, 0x24200000, 0x00901005,
- 0x90400202, 0x00082018, 0x20200800, 0x40801005,
- 0x90440202, 0x00082018, 0x24200800, 0x40901005,
- 0x90000602, 0x00082030, 0x28200000, 0x00801105,
- 0x90040602, 0x00082030, 0x2c200000, 0x00901105,
- 0x90400602, 0x00082038, 0x28200800, 0x40801105,
- 0x90440602, 0x00082038, 0x2c200800, 0x40901105,
-
- 0x00000000, 0x00000000, 0x00000000, 0x00000000,
- 0x00000000, 0x00000008, 0x00080000, 0x10000000,
- 0x02000000, 0x00000000, 0x00000080, 0x00001000,
- 0x02000000, 0x00000008, 0x00080080, 0x10001000,
- 0x00004000, 0x00000000, 0x00000040, 0x00040000,
- 0x00004000, 0x00000008, 0x00080040, 0x10040000,
- 0x02004000, 0x00000000, 0x000000c0, 0x00041000,
- 0x02004000, 0x00000008, 0x000800c0, 0x10041000,
- 0x00020000, 0x00008000, 0x08000000, 0x00200000,
- 0x00020000, 0x00008008, 0x08080000, 0x10200000,
- 0x02020000, 0x00008000, 0x08000080, 0x00201000,
- 0x02020000, 0x00008008, 0x08080080, 0x10201000,
- 0x00024000, 0x00008000, 0x08000040, 0x00240000,
- 0x00024000, 0x00008008, 0x08080040, 0x10240000,
- 0x02024000, 0x00008000, 0x080000c0, 0x00241000,
- 0x02024000, 0x00008008, 0x080800c0, 0x10241000,
- 0x00000000, 0x01000000, 0x00002000, 0x00000020,
- 0x00000000, 0x01000008, 0x00082000, 0x10000020,
- 0x02000000, 0x01000000, 0x00002080, 0x00001020,
- 0x02000000, 0x01000008, 0x00082080, 0x10001020,
- 0x00004000, 0x01000000, 0x00002040, 0x00040020,
- 0x00004000, 0x01000008, 0x00082040, 0x10040020,
- 0x02004000, 0x01000000, 0x000020c0, 0x00041020,
- 0x02004000, 0x01000008, 0x000820c0, 0x10041020,
- 0x00020000, 0x01008000, 0x08002000, 0x00200020,
- 0x00020000, 0x01008008, 0x08082000, 0x10200020,
- 0x02020000, 0x01008000, 0x08002080, 0x00201020,
- 0x02020000, 0x01008008, 0x08082080, 0x10201020,
- 0x00024000, 0x01008000, 0x08002040, 0x00240020,
- 0x00024000, 0x01008008, 0x08082040, 0x10240020,
- 0x02024000, 0x01008000, 0x080020c0, 0x00241020,
- 0x02024000, 0x01008008, 0x080820c0, 0x10241020,
- 0x00000400, 0x04000000, 0x00100000, 0x00000004,
- 0x00000400, 0x04000008, 0x00180000, 0x10000004,
- 0x02000400, 0x04000000, 0x00100080, 0x00001004,
- 0x02000400, 0x04000008, 0x00180080, 0x10001004,
- 0x00004400, 0x04000000, 0x00100040, 0x00040004,
- 0x00004400, 0x04000008, 0x00180040, 0x10040004,
- 0x02004400, 0x04000000, 0x001000c0, 0x00041004,
- 0x02004400, 0x04000008, 0x001800c0, 0x10041004,
- 0x00020400, 0x04008000, 0x08100000, 0x00200004,
- 0x00020400, 0x04008008, 0x08180000, 0x10200004,
- 0x02020400, 0x04008000, 0x08100080, 0x00201004,
- 0x02020400, 0x04008008, 0x08180080, 0x10201004,
- 0x00024400, 0x04008000, 0x08100040, 0x00240004,
- 0x00024400, 0x04008008, 0x08180040, 0x10240004,
- 0x02024400, 0x04008000, 0x081000c0, 0x00241004,
- 0x02024400, 0x04008008, 0x081800c0, 0x10241004,
- 0x00000400, 0x05000000, 0x00102000, 0x00000024,
- 0x00000400, 0x05000008, 0x00182000, 0x10000024,
- 0x02000400, 0x05000000, 0x00102080, 0x00001024,
- 0x02000400, 0x05000008, 0x00182080, 0x10001024,
- 0x00004400, 0x05000000, 0x00102040, 0x00040024,
- 0x00004400, 0x05000008, 0x00182040, 0x10040024,
- 0x02004400, 0x05000000, 0x001020c0, 0x00041024,
- 0x02004400, 0x05000008, 0x001820c0, 0x10041024,
- 0x00020400, 0x05008000, 0x08102000, 0x00200024,
- 0x00020400, 0x05008008, 0x08182000, 0x10200024,
- 0x02020400, 0x05008000, 0x08102080, 0x00201024,
- 0x02020400, 0x05008008, 0x08182080, 0x10201024,
- 0x00024400, 0x05008000, 0x08102040, 0x00240024,
- 0x00024400, 0x05008008, 0x08182040, 0x10240024,
- 0x02024400, 0x05008000, 0x081020c0, 0x00241024,
- 0x02024400, 0x05008008, 0x081820c0, 0x10241024,
- 0x00000800, 0x00010000, 0x20000000, 0x00000010,
- 0x00000800, 0x00010008, 0x20080000, 0x10000010,
- 0x02000800, 0x00010000, 0x20000080, 0x00001010,
- 0x02000800, 0x00010008, 0x20080080, 0x10001010,
- 0x00004800, 0x00010000, 0x20000040, 0x00040010,
- 0x00004800, 0x00010008, 0x20080040, 0x10040010,
- 0x02004800, 0x00010000, 0x200000c0, 0x00041010,
- 0x02004800, 0x00010008, 0x200800c0, 0x10041010,
- 0x00020800, 0x00018000, 0x28000000, 0x00200010,
- 0x00020800, 0x00018008, 0x28080000, 0x10200010,
- 0x02020800, 0x00018000, 0x28000080, 0x00201010,
- 0x02020800, 0x00018008, 0x28080080, 0x10201010,
- 0x00024800, 0x00018000, 0x28000040, 0x00240010,
- 0x00024800, 0x00018008, 0x28080040, 0x10240010,
- 0x02024800, 0x00018000, 0x280000c0, 0x00241010,
- 0x02024800, 0x00018008, 0x280800c0, 0x10241010,
- 0x00000800, 0x01010000, 0x20002000, 0x00000030,
- 0x00000800, 0x01010008, 0x20082000, 0x10000030,
- 0x02000800, 0x01010000, 0x20002080, 0x00001030,
- 0x02000800, 0x01010008, 0x20082080, 0x10001030,
- 0x00004800, 0x01010000, 0x20002040, 0x00040030,
- 0x00004800, 0x01010008, 0x20082040, 0x10040030,
- 0x02004800, 0x01010000, 0x200020c0, 0x00041030,
- 0x02004800, 0x01010008, 0x200820c0, 0x10041030,
- 0x00020800, 0x01018000, 0x28002000, 0x00200030,
- 0x00020800, 0x01018008, 0x28082000, 0x10200030,
- 0x02020800, 0x01018000, 0x28002080, 0x00201030,
- 0x02020800, 0x01018008, 0x28082080, 0x10201030,
- 0x00024800, 0x01018000, 0x28002040, 0x00240030,
- 0x00024800, 0x01018008, 0x28082040, 0x10240030,
- 0x02024800, 0x01018000, 0x280020c0, 0x00241030,
- 0x02024800, 0x01018008, 0x280820c0, 0x10241030,
- 0x00000c00, 0x04010000, 0x20100000, 0x00000014,
- 0x00000c00, 0x04010008, 0x20180000, 0x10000014,
- 0x02000c00, 0x04010000, 0x20100080, 0x00001014,
- 0x02000c00, 0x04010008, 0x20180080, 0x10001014,
- 0x00004c00, 0x04010000, 0x20100040, 0x00040014,
- 0x00004c00, 0x04010008, 0x20180040, 0x10040014,
- 0x02004c00, 0x04010000, 0x201000c0, 0x00041014,
- 0x02004c00, 0x04010008, 0x201800c0, 0x10041014,
- 0x00020c00, 0x04018000, 0x28100000, 0x00200014,
- 0x00020c00, 0x04018008, 0x28180000, 0x10200014,
- 0x02020c00, 0x04018000, 0x28100080, 0x00201014,
- 0x02020c00, 0x04018008, 0x28180080, 0x10201014,
- 0x00024c00, 0x04018000, 0x28100040, 0x00240014,
- 0x00024c00, 0x04018008, 0x28180040, 0x10240014,
- 0x02024c00, 0x04018000, 0x281000c0, 0x00241014,
- 0x02024c00, 0x04018008, 0x281800c0, 0x10241014,
- 0x00000c00, 0x05010000, 0x20102000, 0x00000034,
- 0x00000c00, 0x05010008, 0x20182000, 0x10000034,
- 0x02000c00, 0x05010000, 0x20102080, 0x00001034,
- 0x02000c00, 0x05010008, 0x20182080, 0x10001034,
- 0x00004c00, 0x05010000, 0x20102040, 0x00040034,
- 0x00004c00, 0x05010008, 0x20182040, 0x10040034,
- 0x02004c00, 0x05010000, 0x201020c0, 0x00041034,
- 0x02004c00, 0x05010008, 0x201820c0, 0x10041034,
- 0x00020c00, 0x05018000, 0x28102000, 0x00200034,
- 0x00020c00, 0x05018008, 0x28182000, 0x10200034,
- 0x02020c00, 0x05018000, 0x28102080, 0x00201034,
- 0x02020c00, 0x05018008, 0x28182080, 0x10201034,
- 0x00024c00, 0x05018000, 0x28102040, 0x00240034,
- 0x00024c00, 0x05018008, 0x28182040, 0x10240034,
- 0x02024c00, 0x05018000, 0x281020c0, 0x00241034,
- 0x02024c00, 0x05018008, 0x281820c0, 0x10241034
-};
-
-/* S-box lookup tables */
-
-static const u32 S1[64] = {
- 0x01010400, 0x00000000, 0x00010000, 0x01010404,
- 0x01010004, 0x00010404, 0x00000004, 0x00010000,
- 0x00000400, 0x01010400, 0x01010404, 0x00000400,
- 0x01000404, 0x01010004, 0x01000000, 0x00000004,
- 0x00000404, 0x01000400, 0x01000400, 0x00010400,
- 0x00010400, 0x01010000, 0x01010000, 0x01000404,
- 0x00010004, 0x01000004, 0x01000004, 0x00010004,
- 0x00000000, 0x00000404, 0x00010404, 0x01000000,
- 0x00010000, 0x01010404, 0x00000004, 0x01010000,
- 0x01010400, 0x01000000, 0x01000000, 0x00000400,
- 0x01010004, 0x00010000, 0x00010400, 0x01000004,
- 0x00000400, 0x00000004, 0x01000404, 0x00010404,
- 0x01010404, 0x00010004, 0x01010000, 0x01000404,
- 0x01000004, 0x00000404, 0x00010404, 0x01010400,
- 0x00000404, 0x01000400, 0x01000400, 0x00000000,
- 0x00010004, 0x00010400, 0x00000000, 0x01010004
-};
-
-static const u32 S2[64] = {
- 0x80108020, 0x80008000, 0x00008000, 0x00108020,
- 0x00100000, 0x00000020, 0x80100020, 0x80008020,
- 0x80000020, 0x80108020, 0x80108000, 0x80000000,
- 0x80008000, 0x00100000, 0x00000020, 0x80100020,
- 0x00108000, 0x00100020, 0x80008020, 0x00000000,
- 0x80000000, 0x00008000, 0x00108020, 0x80100000,
- 0x00100020, 0x80000020, 0x00000000, 0x00108000,
- 0x00008020, 0x80108000, 0x80100000, 0x00008020,
- 0x00000000, 0x00108020, 0x80100020, 0x00100000,
- 0x80008020, 0x80100000, 0x80108000, 0x00008000,
- 0x80100000, 0x80008000, 0x00000020, 0x80108020,
- 0x00108020, 0x00000020, 0x00008000, 0x80000000,
- 0x00008020, 0x80108000, 0x00100000, 0x80000020,
- 0x00100020, 0x80008020, 0x80000020, 0x00100020,
- 0x00108000, 0x00000000, 0x80008000, 0x00008020,
- 0x80000000, 0x80100020, 0x80108020, 0x00108000
-};
-
-static const u32 S3[64] = {
- 0x00000208, 0x08020200, 0x00000000, 0x08020008,
- 0x08000200, 0x00000000, 0x00020208, 0x08000200,
- 0x00020008, 0x08000008, 0x08000008, 0x00020000,
- 0x08020208, 0x00020008, 0x08020000, 0x00000208,
- 0x08000000, 0x00000008, 0x08020200, 0x00000200,
- 0x00020200, 0x08020000, 0x08020008, 0x00020208,
- 0x08000208, 0x00020200, 0x00020000, 0x08000208,
- 0x00000008, 0x08020208, 0x00000200, 0x08000000,
- 0x08020200, 0x08000000, 0x00020008, 0x00000208,
- 0x00020000, 0x08020200, 0x08000200, 0x00000000,
- 0x00000200, 0x00020008, 0x08020208, 0x08000200,
- 0x08000008, 0x00000200, 0x00000000, 0x08020008,
- 0x08000208, 0x00020000, 0x08000000, 0x08020208,
- 0x00000008, 0x00020208, 0x00020200, 0x08000008,
- 0x08020000, 0x08000208, 0x00000208, 0x08020000,
- 0x00020208, 0x00000008, 0x08020008, 0x00020200
-};
-
-static const u32 S4[64] = {
- 0x00802001, 0x00002081, 0x00002081, 0x00000080,
- 0x00802080, 0x00800081, 0x00800001, 0x00002001,
- 0x00000000, 0x00802000, 0x00802000, 0x00802081,
- 0x00000081, 0x00000000, 0x00800080, 0x00800001,
- 0x00000001, 0x00002000, 0x00800000, 0x00802001,
- 0x00000080, 0x00800000, 0x00002001, 0x00002080,
- 0x00800081, 0x00000001, 0x00002080, 0x00800080,
- 0x00002000, 0x00802080, 0x00802081, 0x00000081,
- 0x00800080, 0x00800001, 0x00802000, 0x00802081,
- 0x00000081, 0x00000000, 0x00000000, 0x00802000,
- 0x00002080, 0x00800080, 0x00800081, 0x00000001,
- 0x00802001, 0x00002081, 0x00002081, 0x00000080,
- 0x00802081, 0x00000081, 0x00000001, 0x00002000,
- 0x00800001, 0x00002001, 0x00802080, 0x00800081,
- 0x00002001, 0x00002080, 0x00800000, 0x00802001,
- 0x00000080, 0x00800000, 0x00002000, 0x00802080
-};
-
-static const u32 S5[64] = {
- 0x00000100, 0x02080100, 0x02080000, 0x42000100,
- 0x00080000, 0x00000100, 0x40000000, 0x02080000,
- 0x40080100, 0x00080000, 0x02000100, 0x40080100,
- 0x42000100, 0x42080000, 0x00080100, 0x40000000,
- 0x02000000, 0x40080000, 0x40080000, 0x00000000,
- 0x40000100, 0x42080100, 0x42080100, 0x02000100,
- 0x42080000, 0x40000100, 0x00000000, 0x42000000,
- 0x02080100, 0x02000000, 0x42000000, 0x00080100,
- 0x00080000, 0x42000100, 0x00000100, 0x02000000,
- 0x40000000, 0x02080000, 0x42000100, 0x40080100,
- 0x02000100, 0x40000000, 0x42080000, 0x02080100,
- 0x40080100, 0x00000100, 0x02000000, 0x42080000,
- 0x42080100, 0x00080100, 0x42000000, 0x42080100,
- 0x02080000, 0x00000000, 0x40080000, 0x42000000,
- 0x00080100, 0x02000100, 0x40000100, 0x00080000,
- 0x00000000, 0x40080000, 0x02080100, 0x40000100
-};
-
-static const u32 S6[64] = {
- 0x20000010, 0x20400000, 0x00004000, 0x20404010,
- 0x20400000, 0x00000010, 0x20404010, 0x00400000,
- 0x20004000, 0x00404010, 0x00400000, 0x20000010,
- 0x00400010, 0x20004000, 0x20000000, 0x00004010,
- 0x00000000, 0x00400010, 0x20004010, 0x00004000,
- 0x00404000, 0x20004010, 0x00000010, 0x20400010,
- 0x20400010, 0x00000000, 0x00404010, 0x20404000,
- 0x00004010, 0x00404000, 0x20404000, 0x20000000,
- 0x20004000, 0x00000010, 0x20400010, 0x00404000,
- 0x20404010, 0x00400000, 0x00004010, 0x20000010,
- 0x00400000, 0x20004000, 0x20000000, 0x00004010,
- 0x20000010, 0x20404010, 0x00404000, 0x20400000,
- 0x00404010, 0x20404000, 0x00000000, 0x20400010,
- 0x00000010, 0x00004000, 0x20400000, 0x00404010,
- 0x00004000, 0x00400010, 0x20004010, 0x00000000,
- 0x20404000, 0x20000000, 0x00400010, 0x20004010
-};
-
-static const u32 S7[64] = {
- 0x00200000, 0x04200002, 0x04000802, 0x00000000,
- 0x00000800, 0x04000802, 0x00200802, 0x04200800,
- 0x04200802, 0x00200000, 0x00000000, 0x04000002,
- 0x00000002, 0x04000000, 0x04200002, 0x00000802,
- 0x04000800, 0x00200802, 0x00200002, 0x04000800,
- 0x04000002, 0x04200000, 0x04200800, 0x00200002,
- 0x04200000, 0x00000800, 0x00000802, 0x04200802,
- 0x00200800, 0x00000002, 0x04000000, 0x00200800,
- 0x04000000, 0x00200800, 0x00200000, 0x04000802,
- 0x04000802, 0x04200002, 0x04200002, 0x00000002,
- 0x00200002, 0x04000000, 0x04000800, 0x00200000,
- 0x04200800, 0x00000802, 0x00200802, 0x04200800,
- 0x00000802, 0x04000002, 0x04200802, 0x04200000,
- 0x00200800, 0x00000000, 0x00000002, 0x04200802,
- 0x00000000, 0x00200802, 0x04200000, 0x00000800,
- 0x04000002, 0x04000800, 0x00000800, 0x00200002
-};
-
-static const u32 S8[64] = {
- 0x10001040, 0x00001000, 0x00040000, 0x10041040,
- 0x10000000, 0x10001040, 0x00000040, 0x10000000,
- 0x00040040, 0x10040000, 0x10041040, 0x00041000,
- 0x10041000, 0x00041040, 0x00001000, 0x00000040,
- 0x10040000, 0x10000040, 0x10001000, 0x00001040,
- 0x00041000, 0x00040040, 0x10040040, 0x10041000,
- 0x00001040, 0x00000000, 0x00000000, 0x10040040,
- 0x10000040, 0x10001000, 0x00041040, 0x00040000,
- 0x00041040, 0x00040000, 0x10041000, 0x00001000,
- 0x00000040, 0x10040040, 0x00001000, 0x00041040,
- 0x10001000, 0x00000040, 0x10000040, 0x10040000,
- 0x10040040, 0x10000000, 0x00040000, 0x10001040,
- 0x00000000, 0x10041040, 0x00040040, 0x10000040,
- 0x10040000, 0x10001000, 0x10001040, 0x00000000,
- 0x10041040, 0x00041000, 0x00041000, 0x00001040,
- 0x00001040, 0x00040040, 0x10000000, 0x10041000
-};
-
-/* Encryption components: IP, FP, and round function */
-
-#define IP(L, R, T) \
- ROL(R, 4); \
- T = L; \
- L ^= R; \
- L &= 0xf0f0f0f0; \
- R ^= L; \
- L ^= T; \
- ROL(R, 12); \
- T = L; \
- L ^= R; \
- L &= 0xffff0000; \
- R ^= L; \
- L ^= T; \
- ROR(R, 14); \
- T = L; \
- L ^= R; \
- L &= 0xcccccccc; \
- R ^= L; \
- L ^= T; \
- ROL(R, 6); \
- T = L; \
- L ^= R; \
- L &= 0xff00ff00; \
- R ^= L; \
- L ^= T; \
- ROR(R, 7); \
- T = L; \
- L ^= R; \
- L &= 0xaaaaaaaa; \
- R ^= L; \
- L ^= T; \
- ROL(L, 1);
-
-#define FP(L, R, T) \
- ROR(L, 1); \
- T = L; \
- L ^= R; \
- L &= 0xaaaaaaaa; \
- R ^= L; \
- L ^= T; \
- ROL(R, 7); \
- T = L; \
- L ^= R; \
- L &= 0xff00ff00; \
- R ^= L; \
- L ^= T; \
- ROR(R, 6); \
- T = L; \
- L ^= R; \
- L &= 0xcccccccc; \
- R ^= L; \
- L ^= T; \
- ROL(R, 14); \
- T = L; \
- L ^= R; \
- L &= 0xffff0000; \
- R ^= L; \
- L ^= T; \
- ROR(R, 12); \
- T = L; \
- L ^= R; \
- L &= 0xf0f0f0f0; \
- R ^= L; \
- L ^= T; \
- ROR(R, 4);
-
-#define ROUND(L, R, A, B, K, d) \
- B = K[0]; A = K[1]; K += d; \
- B ^= R; A ^= R; \
- B &= 0x3f3f3f3f; ROR(A, 4); \
- L ^= S8[0xff & B]; A &= 0x3f3f3f3f; \
- L ^= S6[0xff & (B >> 8)]; B >>= 16; \
- L ^= S7[0xff & A]; \
- L ^= S5[0xff & (A >> 8)]; A >>= 16; \
- L ^= S4[0xff & B]; \
- L ^= S2[0xff & (B >> 8)]; \
- L ^= S3[0xff & A]; \
- L ^= S1[0xff & (A >> 8)];
-
-/*
- * PC2 lookup tables are organized as 2 consecutive sets of 4 interleaved
- * tables of 128 elements. One set is for C_i and the other for D_i, while
- * the 4 interleaved tables correspond to four 7-bit subsets of C_i or D_i.
- *
- * After PC1 each of the variables a,b,c,d contains a 7 bit subset of C_i
- * or D_i in bits 7-1 (bit 0 being the least significant).
- */
-
-#define T1(x) pt[2 * (x) + 0]
-#define T2(x) pt[2 * (x) + 1]
-#define T3(x) pt[2 * (x) + 2]
-#define T4(x) pt[2 * (x) + 3]
-
-#define DES_PC2(a, b, c, d) (T4(d) | T3(c) | T2(b) | T1(a))
-
-/*
- * Encryption key expansion
- *
- * RFC2451: Weak key checks SHOULD be performed.
- *
- * FIPS 74:
- *
- * Keys having duals are keys which produce all zeros, all ones, or
- * alternating zero-one patterns in the C and D registers after Permuted
- * Choice 1 has operated on the key.
- *
- */
-unsigned long des_ekey(u32 *pe, const u8 *k)
-{
- /* K&R: long is at least 32 bits */
- unsigned long a, b, c, d, w;
- const u32 *pt = pc2;
-
- d = k[4]; d &= 0x0e; d <<= 4; d |= k[0] & 0x1e; d = pc1[d];
- c = k[5]; c &= 0x0e; c <<= 4; c |= k[1] & 0x1e; c = pc1[c];
- b = k[6]; b &= 0x0e; b <<= 4; b |= k[2] & 0x1e; b = pc1[b];
- a = k[7]; a &= 0x0e; a <<= 4; a |= k[3] & 0x1e; a = pc1[a];
-
- pe[15 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d];
- pe[14 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[13 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[12 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[11 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[10 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 9 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 8 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c];
- pe[ 7 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 6 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[ 5 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 4 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[ 3 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 2 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[ 1 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b];
- pe[ 0 * 2 + 0] = DES_PC2(b, c, d, a);
-
- /* Check if first half is weak */
- w = (a ^ c) | (b ^ d) | (rs[a] ^ c) | (b ^ rs[d]);
-
- /* Skip to next table set */
- pt += 512;
-
- d = k[0]; d &= 0xe0; d >>= 4; d |= k[4] & 0xf0; d = pc1[d + 1];
- c = k[1]; c &= 0xe0; c >>= 4; c |= k[5] & 0xf0; c = pc1[c + 1];
- b = k[2]; b &= 0xe0; b >>= 4; b |= k[6] & 0xf0; b = pc1[b + 1];
- a = k[3]; a &= 0xe0; a >>= 4; a |= k[7] & 0xf0; a = pc1[a + 1];
-
- /* Check if second half is weak */
- w |= (a ^ c) | (b ^ d) | (rs[a] ^ c) | (b ^ rs[d]);
-
- pe[15 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d];
- pe[14 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[13 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[12 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[11 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[10 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 9 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 8 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c];
- pe[ 7 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 6 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[ 5 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 4 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[ 3 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 2 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[ 1 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b];
- pe[ 0 * 2 + 1] = DES_PC2(b, c, d, a);
-
- /* Fixup: 2413 5768 -> 1357 2468 */
- for (d = 0; d < 16; ++d) {
- a = pe[2 * d];
- b = pe[2 * d + 1];
- c = a ^ b;
- c &= 0xffff0000;
- a ^= c;
- b ^= c;
- ROL(b, 18);
- pe[2 * d] = a;
- pe[2 * d + 1] = b;
- }
-
- /* Zero if weak key */
- return w;
-}
-EXPORT_SYMBOL_GPL(des_ekey);
-
-/*
- * Decryption key expansion
- *
- * No weak key checking is performed, as this is only used by triple DES
- *
- */
-static void dkey(u32 *pe, const u8 *k)
-{
- /* K&R: long is at least 32 bits */
- unsigned long a, b, c, d;
- const u32 *pt = pc2;
-
- d = k[4]; d &= 0x0e; d <<= 4; d |= k[0] & 0x1e; d = pc1[d];
- c = k[5]; c &= 0x0e; c <<= 4; c |= k[1] & 0x1e; c = pc1[c];
- b = k[6]; b &= 0x0e; b <<= 4; b |= k[2] & 0x1e; b = pc1[b];
- a = k[7]; a &= 0x0e; a <<= 4; a |= k[3] & 0x1e; a = pc1[a];
-
- pe[ 0 * 2] = DES_PC2(a, b, c, d); d = rs[d];
- pe[ 1 * 2] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 2 * 2] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 3 * 2] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 4 * 2] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 5 * 2] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 6 * 2] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 7 * 2] = DES_PC2(d, a, b, c); c = rs[c];
- pe[ 8 * 2] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 9 * 2] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[10 * 2] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[11 * 2] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[12 * 2] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[13 * 2] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[14 * 2] = DES_PC2(c, d, a, b); b = rs[b];
- pe[15 * 2] = DES_PC2(b, c, d, a);
-
- /* Skip to next table set */
- pt += 512;
-
- d = k[0]; d &= 0xe0; d >>= 4; d |= k[4] & 0xf0; d = pc1[d + 1];
- c = k[1]; c &= 0xe0; c >>= 4; c |= k[5] & 0xf0; c = pc1[c + 1];
- b = k[2]; b &= 0xe0; b >>= 4; b |= k[6] & 0xf0; b = pc1[b + 1];
- a = k[3]; a &= 0xe0; a >>= 4; a |= k[7] & 0xf0; a = pc1[a + 1];
-
- pe[ 0 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d];
- pe[ 1 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 2 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 3 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 4 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 5 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 6 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 7 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c];
- pe[ 8 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 9 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[10 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[11 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[12 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[13 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[14 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b];
- pe[15 * 2 + 1] = DES_PC2(b, c, d, a);
-
- /* Fixup: 2413 5768 -> 1357 2468 */
- for (d = 0; d < 16; ++d) {
- a = pe[2 * d];
- b = pe[2 * d + 1];
- c = a ^ b;
- c &= 0xffff0000;
- a ^= c;
- b ^= c;
- ROL(b, 18);
- pe[2 * d] = a;
- pe[2 * d + 1] = b;
- }
-}
+#include <crypto/internal/des.h>
static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
unsigned int keylen)
{
struct des_ctx *dctx = crypto_tfm_ctx(tfm);
- u32 *flags = &tfm->crt_flags;
- u32 tmp[DES_EXPKEY_WORDS];
- int ret;
-
- /* Expand to tmp */
- ret = des_ekey(tmp, key);
-
- if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
- *flags |= CRYPTO_TFM_RES_WEAK_KEY;
- return -EINVAL;
- }
-
- /* Copy to output */
- memcpy(dctx->expkey, tmp, sizeof(dctx->expkey));
-
- return 0;
-}
-
-static void des_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- struct des_ctx *ctx = crypto_tfm_ctx(tfm);
- const u32 *K = ctx->expkey;
- const __le32 *s = (const __le32 *)src;
- __le32 *d = (__le32 *)dst;
- u32 L, R, A, B;
- int i;
-
- L = le32_to_cpu(s[0]);
- R = le32_to_cpu(s[1]);
+ int err;
- IP(L, R, A);
- for (i = 0; i < 8; i++) {
- ROUND(L, R, A, B, K, 2);
- ROUND(R, L, A, B, K, 2);
+ err = des_expand_key(dctx, key, keylen);
+ if (err == -ENOKEY) {
+ if (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)
+ err = -EINVAL;
+ else
+ err = 0;
}
- FP(R, L, A);
-
- d[0] = cpu_to_le32(R);
- d[1] = cpu_to_le32(L);
+ if (err)
+ memset(dctx, 0, sizeof(*dctx));
+ return err;
}
-static void des_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+static void crypto_des_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
{
- struct des_ctx *ctx = crypto_tfm_ctx(tfm);
- const u32 *K = ctx->expkey + DES_EXPKEY_WORDS - 2;
- const __le32 *s = (const __le32 *)src;
- __le32 *d = (__le32 *)dst;
- u32 L, R, A, B;
- int i;
-
- L = le32_to_cpu(s[0]);
- R = le32_to_cpu(s[1]);
-
- IP(L, R, A);
- for (i = 0; i < 8; i++) {
- ROUND(L, R, A, B, K, -2);
- ROUND(R, L, A, B, K, -2);
- }
- FP(R, L, A);
+ const struct des_ctx *dctx = crypto_tfm_ctx(tfm);
- d[0] = cpu_to_le32(R);
- d[1] = cpu_to_le32(L);
+ des_encrypt(dctx, dst, src);
}
-/*
- * RFC2451:
- *
- * For DES-EDE3, there is no known need to reject weak or
- * complementation keys. Any weakness is obviated by the use of
- * multiple keys.
- *
- * However, if the first two or last two independent 64-bit keys are
- * equal (k1 == k2 or k2 == k3), then the DES3 operation is simply the
- * same as DES. Implementers MUST reject keys that exhibit this
- * property.
- *
- */
-int __des3_ede_setkey(u32 *expkey, u32 *flags, const u8 *key,
- unsigned int keylen)
+static void crypto_des_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
{
- int err;
-
- err = __des3_verify_key(flags, key);
- if (unlikely(err))
- return err;
+ const struct des_ctx *dctx = crypto_tfm_ctx(tfm);
- des_ekey(expkey, key); expkey += DES_EXPKEY_WORDS; key += DES_KEY_SIZE;
- dkey(expkey, key); expkey += DES_EXPKEY_WORDS; key += DES_KEY_SIZE;
- des_ekey(expkey, key);
-
- return 0;
+ des_decrypt(dctx, dst, src);
}
-EXPORT_SYMBOL_GPL(__des3_ede_setkey);
static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key,
unsigned int keylen)
{
struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm);
- u32 *flags = &tfm->crt_flags;
- u32 *expkey = dctx->expkey;
+ int err;
- return __des3_ede_setkey(expkey, flags, key, keylen);
+ err = des3_ede_expand_key(dctx, key, keylen);
+ if (err == -ENOKEY) {
+ if (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)
+ err = -EINVAL;
+ else
+ err = 0;
+ }
+ if (err)
+ memset(dctx, 0, sizeof(*dctx));
+ return err;
}
-static void des3_ede_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+static void crypto_des3_ede_encrypt(struct crypto_tfm *tfm, u8 *dst,
+ const u8 *src)
{
- struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm);
- const u32 *K = dctx->expkey;
- const __le32 *s = (const __le32 *)src;
- __le32 *d = (__le32 *)dst;
- u32 L, R, A, B;
- int i;
-
- L = le32_to_cpu(s[0]);
- R = le32_to_cpu(s[1]);
-
- IP(L, R, A);
- for (i = 0; i < 8; i++) {
- ROUND(L, R, A, B, K, 2);
- ROUND(R, L, A, B, K, 2);
- }
- for (i = 0; i < 8; i++) {
- ROUND(R, L, A, B, K, 2);
- ROUND(L, R, A, B, K, 2);
- }
- for (i = 0; i < 8; i++) {
- ROUND(L, R, A, B, K, 2);
- ROUND(R, L, A, B, K, 2);
- }
- FP(R, L, A);
+ const struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm);
- d[0] = cpu_to_le32(R);
- d[1] = cpu_to_le32(L);
+ des3_ede_encrypt(dctx, dst, src);
}
-static void des3_ede_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+static void crypto_des3_ede_decrypt(struct crypto_tfm *tfm, u8 *dst,
+ const u8 *src)
{
- struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm);
- const u32 *K = dctx->expkey + DES3_EDE_EXPKEY_WORDS - 2;
- const __le32 *s = (const __le32 *)src;
- __le32 *d = (__le32 *)dst;
- u32 L, R, A, B;
- int i;
-
- L = le32_to_cpu(s[0]);
- R = le32_to_cpu(s[1]);
-
- IP(L, R, A);
- for (i = 0; i < 8; i++) {
- ROUND(L, R, A, B, K, -2);
- ROUND(R, L, A, B, K, -2);
- }
- for (i = 0; i < 8; i++) {
- ROUND(R, L, A, B, K, -2);
- ROUND(L, R, A, B, K, -2);
- }
- for (i = 0; i < 8; i++) {
- ROUND(L, R, A, B, K, -2);
- ROUND(R, L, A, B, K, -2);
- }
- FP(R, L, A);
+ const struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm);
- d[0] = cpu_to_le32(R);
- d[1] = cpu_to_le32(L);
+ des3_ede_decrypt(dctx, dst, src);
}
static struct crypto_alg des_algs[2] = { {
@@ -951,13 +90,12 @@ static struct crypto_alg des_algs[2] = { {
.cra_blocksize = DES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct des_ctx),
.cra_module = THIS_MODULE,
- .cra_alignmask = 3,
.cra_u = { .cipher = {
.cia_min_keysize = DES_KEY_SIZE,
.cia_max_keysize = DES_KEY_SIZE,
.cia_setkey = des_setkey,
- .cia_encrypt = des_encrypt,
- .cia_decrypt = des_decrypt } }
+ .cia_encrypt = crypto_des_encrypt,
+ .cia_decrypt = crypto_des_decrypt } }
}, {
.cra_name = "des3_ede",
.cra_driver_name = "des3_ede-generic",
@@ -966,13 +104,12 @@ static struct crypto_alg des_algs[2] = { {
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct des3_ede_ctx),
.cra_module = THIS_MODULE,
- .cra_alignmask = 3,
.cra_u = { .cipher = {
.cia_min_keysize = DES3_EDE_KEY_SIZE,
.cia_max_keysize = DES3_EDE_KEY_SIZE,
.cia_setkey = des3_ede_setkey,
- .cia_encrypt = des3_ede_encrypt,
- .cia_decrypt = des3_ede_decrypt } }
+ .cia_encrypt = crypto_des3_ede_encrypt,
+ .cia_decrypt = crypto_des3_ede_decrypt } }
} };
static int __init des_generic_mod_init(void)
diff --git a/crypto/ecb.c b/crypto/ecb.c
index 9d6981ca7d5d..69a687cbdf21 100644
--- a/crypto/ecb.c
+++ b/crypto/ecb.c
@@ -61,10 +61,9 @@ static int crypto_ecb_decrypt(struct skcipher_request *req)
static int crypto_ecb_create(struct crypto_template *tmpl, struct rtattr **tb)
{
struct skcipher_instance *inst;
- struct crypto_alg *alg;
int err;
- inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
+ inst = skcipher_alloc_instance_simple(tmpl, tb);
if (IS_ERR(inst))
return PTR_ERR(inst);
@@ -76,7 +75,7 @@ static int crypto_ecb_create(struct crypto_template *tmpl, struct rtattr **tb)
err = skcipher_register_instance(tmpl, inst);
if (err)
inst->free(inst);
- crypto_mod_put(alg);
+
return err;
}
diff --git a/crypto/ecc.c b/crypto/ecc.c
index dfe114bc0c4a..02d35be7702b 100644
--- a/crypto/ecc.c
+++ b/crypto/ecc.c
@@ -336,7 +336,7 @@ static u64 vli_usub(u64 *result, const u64 *left, u64 right,
static uint128_t mul_64_64(u64 left, u64 right)
{
uint128_t result;
-#if defined(CONFIG_ARCH_SUPPORTS_INT128) && defined(__SIZEOF_INT128__)
+#if defined(CONFIG_ARCH_SUPPORTS_INT128)
unsigned __int128 m = (unsigned __int128)left * right;
result.m_low = m;
@@ -1284,10 +1284,11 @@ EXPORT_SYMBOL(ecc_point_mult_shamir);
static inline void ecc_swap_digits(const u64 *in, u64 *out,
unsigned int ndigits)
{
+ const __be64 *src = (__force __be64 *)in;
int i;
for (i = 0; i < ndigits; i++)
- out[i] = __swab64(in[ndigits - 1 - i]);
+ out[i] = be64_to_cpu(src[ndigits - 1 - i]);
}
static int __ecc_is_key_valid(const struct ecc_curve *curve,
diff --git a/crypto/echainiv.c b/crypto/echainiv.c
index a49cbf7b0929..4a2f02baba14 100644
--- a/crypto/echainiv.c
+++ b/crypto/echainiv.c
@@ -133,29 +133,17 @@ static int echainiv_aead_create(struct crypto_template *tmpl,
inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx);
inst->alg.base.cra_ctxsize += inst->alg.ivsize;
- inst->free = aead_geniv_free;
-
err = aead_register_instance(tmpl, inst);
- if (err)
- goto free_inst;
-
-out:
- return err;
-
+ if (err) {
free_inst:
- aead_geniv_free(inst);
- goto out;
-}
-
-static void echainiv_free(struct crypto_instance *inst)
-{
- aead_geniv_free(aead_instance(inst));
+ inst->free(inst);
+ }
+ return err;
}
static struct crypto_template echainiv_tmpl = {
.name = "echainiv",
.create = echainiv_aead_create,
- .free = echainiv_free,
.module = THIS_MODULE,
};
diff --git a/crypto/essiv.c b/crypto/essiv.c
new file mode 100644
index 000000000000..465a89c9d1ef
--- /dev/null
+++ b/crypto/essiv.c
@@ -0,0 +1,642 @@
+// SPDX-License-Identifier: GPL-2.0
+/*
+ * ESSIV skcipher and aead template for block encryption
+ *
+ * This template encapsulates the ESSIV IV generation algorithm used by
+ * dm-crypt and fscrypt, which converts the initial vector for the skcipher
+ * used for block encryption, by encrypting it using the hash of the
+ * skcipher key as encryption key. Usually, the input IV is a 64-bit sector
+ * number in LE representation zero-padded to the size of the IV, but this
+ * is not assumed by this driver.
+ *
+ * The typical use of this template is to instantiate the skcipher
+ * 'essiv(cbc(aes),sha256)', which is the only instantiation used by
+ * fscrypt, and the most relevant one for dm-crypt. However, dm-crypt
+ * also permits ESSIV to be used in combination with the authenc template,
+ * e.g., 'essiv(authenc(hmac(sha256),cbc(aes)),sha256)', in which case
+ * we need to instantiate an aead that accepts the same special key format
+ * as the authenc template, and deals with the way the encrypted IV is
+ * embedded into the AAD area of the aead request. This means the AEAD
+ * flavor produced by this template is tightly coupled to the way dm-crypt
+ * happens to use it.
+ *
+ * Copyright (c) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
+ *
+ * Heavily based on:
+ * adiantum length-preserving encryption mode
+ *
+ * Copyright 2018 Google LLC
+ */
+
+#include <crypto/authenc.h>
+#include <crypto/internal/aead.h>
+#include <crypto/internal/hash.h>
+#include <crypto/internal/skcipher.h>
+#include <crypto/scatterwalk.h>
+#include <linux/module.h>
+
+#include "internal.h"
+
+struct essiv_instance_ctx {
+ union {
+ struct crypto_skcipher_spawn skcipher_spawn;
+ struct crypto_aead_spawn aead_spawn;
+ } u;
+ char essiv_cipher_name[CRYPTO_MAX_ALG_NAME];
+ char shash_driver_name[CRYPTO_MAX_ALG_NAME];
+};
+
+struct essiv_tfm_ctx {
+ union {
+ struct crypto_skcipher *skcipher;
+ struct crypto_aead *aead;
+ } u;
+ struct crypto_cipher *essiv_cipher;
+ struct crypto_shash *hash;
+ int ivoffset;
+};
+
+struct essiv_aead_request_ctx {
+ struct scatterlist sg[4];
+ u8 *assoc;
+ struct aead_request aead_req;
+};
+
+static int essiv_skcipher_setkey(struct crypto_skcipher *tfm,
+ const u8 *key, unsigned int keylen)
+{
+ struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
+ SHASH_DESC_ON_STACK(desc, tctx->hash);
+ u8 salt[HASH_MAX_DIGESTSIZE];
+ int err;
+
+ crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK);
+ crypto_skcipher_set_flags(tctx->u.skcipher,
+ crypto_skcipher_get_flags(tfm) &
+ CRYPTO_TFM_REQ_MASK);
+ err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen);
+ if (err)
+ return err;
+
+ desc->tfm = tctx->hash;
+ err = crypto_shash_digest(desc, key, keylen, salt);
+ if (err)
+ return err;
+
+ crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
+ crypto_cipher_set_flags(tctx->essiv_cipher,
+ crypto_skcipher_get_flags(tfm) &
+ CRYPTO_TFM_REQ_MASK);
+ return crypto_cipher_setkey(tctx->essiv_cipher, salt,
+ crypto_shash_digestsize(tctx->hash));
+}
+
+static int essiv_aead_setkey(struct crypto_aead *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
+ SHASH_DESC_ON_STACK(desc, tctx->hash);
+ struct crypto_authenc_keys keys;
+ u8 salt[HASH_MAX_DIGESTSIZE];
+ int err;
+
+ crypto_aead_clear_flags(tctx->u.aead, CRYPTO_TFM_REQ_MASK);
+ crypto_aead_set_flags(tctx->u.aead, crypto_aead_get_flags(tfm) &
+ CRYPTO_TFM_REQ_MASK);
+ err = crypto_aead_setkey(tctx->u.aead, key, keylen);
+ if (err)
+ return err;
+
+ if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
+ return -EINVAL;
+
+ desc->tfm = tctx->hash;
+ err = crypto_shash_init(desc) ?:
+ crypto_shash_update(desc, keys.enckey, keys.enckeylen) ?:
+ crypto_shash_finup(desc, keys.authkey, keys.authkeylen, salt);
+ if (err)
+ return err;
+
+ crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
+ crypto_cipher_set_flags(tctx->essiv_cipher, crypto_aead_get_flags(tfm) &
+ CRYPTO_TFM_REQ_MASK);
+ return crypto_cipher_setkey(tctx->essiv_cipher, salt,
+ crypto_shash_digestsize(tctx->hash));
+}
+
+static int essiv_aead_setauthsize(struct crypto_aead *tfm,
+ unsigned int authsize)
+{
+ struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
+
+ return crypto_aead_setauthsize(tctx->u.aead, authsize);
+}
+
+static void essiv_skcipher_done(struct crypto_async_request *areq, int err)
+{
+ struct skcipher_request *req = areq->data;
+
+ skcipher_request_complete(req, err);
+}
+
+static int essiv_skcipher_crypt(struct skcipher_request *req, bool enc)
+{
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
+ const struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
+ struct skcipher_request *subreq = skcipher_request_ctx(req);
+
+ crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
+
+ skcipher_request_set_tfm(subreq, tctx->u.skcipher);
+ skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
+ req->iv);
+ skcipher_request_set_callback(subreq, skcipher_request_flags(req),
+ essiv_skcipher_done, req);
+
+ return enc ? crypto_skcipher_encrypt(subreq) :
+ crypto_skcipher_decrypt(subreq);
+}
+
+static int essiv_skcipher_encrypt(struct skcipher_request *req)
+{
+ return essiv_skcipher_crypt(req, true);
+}
+
+static int essiv_skcipher_decrypt(struct skcipher_request *req)
+{
+ return essiv_skcipher_crypt(req, false);
+}
+
+static void essiv_aead_done(struct crypto_async_request *areq, int err)
+{
+ struct aead_request *req = areq->data;
+ struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
+
+ kfree(rctx->assoc);
+ aead_request_complete(req, err);
+}
+
+static int essiv_aead_crypt(struct aead_request *req, bool enc)
+{
+ struct crypto_aead *tfm = crypto_aead_reqtfm(req);
+ const struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
+ struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
+ struct aead_request *subreq = &rctx->aead_req;
+ struct scatterlist *src = req->src;
+ int err;
+
+ crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
+
+ /*
+ * dm-crypt embeds the sector number and the IV in the AAD region, so
+ * we have to copy the converted IV into the right scatterlist before
+ * we pass it on.
+ */
+ rctx->assoc = NULL;
+ if (req->src == req->dst || !enc) {
+ scatterwalk_map_and_copy(req->iv, req->dst,
+ req->assoclen - crypto_aead_ivsize(tfm),
+ crypto_aead_ivsize(tfm), 1);
+ } else {
+ u8 *iv = (u8 *)aead_request_ctx(req) + tctx->ivoffset;
+ int ivsize = crypto_aead_ivsize(tfm);
+ int ssize = req->assoclen - ivsize;
+ struct scatterlist *sg;
+ int nents;
+
+ if (ssize < 0)
+ return -EINVAL;
+
+ nents = sg_nents_for_len(req->src, ssize);
+ if (nents < 0)
+ return -EINVAL;
+
+ memcpy(iv, req->iv, ivsize);
+ sg_init_table(rctx->sg, 4);
+
+ if (unlikely(nents > 1)) {
+ /*
+ * This is a case that rarely occurs in practice, but
+ * for correctness, we have to deal with it nonetheless.
+ */
+ rctx->assoc = kmalloc(ssize, GFP_ATOMIC);
+ if (!rctx->assoc)
+ return -ENOMEM;
+
+ scatterwalk_map_and_copy(rctx->assoc, req->src, 0,
+ ssize, 0);
+ sg_set_buf(rctx->sg, rctx->assoc, ssize);
+ } else {
+ sg_set_page(rctx->sg, sg_page(req->src), ssize,
+ req->src->offset);
+ }
+
+ sg_set_buf(rctx->sg + 1, iv, ivsize);
+ sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen);
+ if (sg != rctx->sg + 2)
+ sg_chain(rctx->sg, 3, sg);
+
+ src = rctx->sg;
+ }
+
+ aead_request_set_tfm(subreq, tctx->u.aead);
+ aead_request_set_ad(subreq, req->assoclen);
+ aead_request_set_callback(subreq, aead_request_flags(req),
+ essiv_aead_done, req);
+ aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv);
+
+ err = enc ? crypto_aead_encrypt(subreq) :
+ crypto_aead_decrypt(subreq);
+
+ if (rctx->assoc && err != -EINPROGRESS)
+ kfree(rctx->assoc);
+ return err;
+}
+
+static int essiv_aead_encrypt(struct aead_request *req)
+{
+ return essiv_aead_crypt(req, true);
+}
+
+static int essiv_aead_decrypt(struct aead_request *req)
+{
+ return essiv_aead_crypt(req, false);
+}
+
+static int essiv_init_tfm(struct essiv_instance_ctx *ictx,
+ struct essiv_tfm_ctx *tctx)
+{
+ struct crypto_cipher *essiv_cipher;
+ struct crypto_shash *hash;
+ int err;
+
+ essiv_cipher = crypto_alloc_cipher(ictx->essiv_cipher_name, 0, 0);
+ if (IS_ERR(essiv_cipher))
+ return PTR_ERR(essiv_cipher);
+
+ hash = crypto_alloc_shash(ictx->shash_driver_name, 0, 0);
+ if (IS_ERR(hash)) {
+ err = PTR_ERR(hash);
+ goto err_free_essiv_cipher;
+ }
+
+ tctx->essiv_cipher = essiv_cipher;
+ tctx->hash = hash;
+
+ return 0;
+
+err_free_essiv_cipher:
+ crypto_free_cipher(essiv_cipher);
+ return err;
+}
+
+static int essiv_skcipher_init_tfm(struct crypto_skcipher *tfm)
+{
+ struct skcipher_instance *inst = skcipher_alg_instance(tfm);
+ struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
+ struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
+ struct crypto_skcipher *skcipher;
+ int err;
+
+ skcipher = crypto_spawn_skcipher(&ictx->u.skcipher_spawn);
+ if (IS_ERR(skcipher))
+ return PTR_ERR(skcipher);
+
+ crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
+ crypto_skcipher_reqsize(skcipher));
+
+ err = essiv_init_tfm(ictx, tctx);
+ if (err) {
+ crypto_free_skcipher(skcipher);
+ return err;
+ }
+
+ tctx->u.skcipher = skcipher;
+ return 0;
+}
+
+static int essiv_aead_init_tfm(struct crypto_aead *tfm)
+{
+ struct aead_instance *inst = aead_alg_instance(tfm);
+ struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
+ struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
+ struct crypto_aead *aead;
+ unsigned int subreq_size;
+ int err;
+
+ BUILD_BUG_ON(offsetofend(struct essiv_aead_request_ctx, aead_req) !=
+ sizeof(struct essiv_aead_request_ctx));
+
+ aead = crypto_spawn_aead(&ictx->u.aead_spawn);
+ if (IS_ERR(aead))
+ return PTR_ERR(aead);
+
+ subreq_size = sizeof_field(struct essiv_aead_request_ctx, aead_req) +
+ crypto_aead_reqsize(aead);
+
+ tctx->ivoffset = offsetof(struct essiv_aead_request_ctx, aead_req) +
+ subreq_size;
+ crypto_aead_set_reqsize(tfm, tctx->ivoffset + crypto_aead_ivsize(aead));
+
+ err = essiv_init_tfm(ictx, tctx);
+ if (err) {
+ crypto_free_aead(aead);
+ return err;
+ }
+
+ tctx->u.aead = aead;
+ return 0;
+}
+
+static void essiv_skcipher_exit_tfm(struct crypto_skcipher *tfm)
+{
+ struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
+
+ crypto_free_skcipher(tctx->u.skcipher);
+ crypto_free_cipher(tctx->essiv_cipher);
+ crypto_free_shash(tctx->hash);
+}
+
+static void essiv_aead_exit_tfm(struct crypto_aead *tfm)
+{
+ struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
+
+ crypto_free_aead(tctx->u.aead);
+ crypto_free_cipher(tctx->essiv_cipher);
+ crypto_free_shash(tctx->hash);
+}
+
+static void essiv_skcipher_free_instance(struct skcipher_instance *inst)
+{
+ struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
+
+ crypto_drop_skcipher(&ictx->u.skcipher_spawn);
+ kfree(inst);
+}
+
+static void essiv_aead_free_instance(struct aead_instance *inst)
+{
+ struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
+
+ crypto_drop_aead(&ictx->u.aead_spawn);
+ kfree(inst);
+}
+
+static bool parse_cipher_name(char *essiv_cipher_name, const char *cra_name)
+{
+ const char *p, *q;
+ int len;
+
+ /* find the last opening parens */
+ p = strrchr(cra_name, '(');
+ if (!p++)
+ return false;
+
+ /* find the first closing parens in the tail of the string */
+ q = strchr(p, ')');
+ if (!q)
+ return false;
+
+ len = q - p;
+ if (len >= CRYPTO_MAX_ALG_NAME)
+ return false;
+
+ memcpy(essiv_cipher_name, p, len);
+ essiv_cipher_name[len] = '\0';
+ return true;
+}
+
+static bool essiv_supported_algorithms(const char *essiv_cipher_name,
+ struct shash_alg *hash_alg,
+ int ivsize)
+{
+ struct crypto_alg *alg;
+ bool ret = false;
+
+ alg = crypto_alg_mod_lookup(essiv_cipher_name,
+ CRYPTO_ALG_TYPE_CIPHER,
+ CRYPTO_ALG_TYPE_MASK);
+ if (IS_ERR(alg))
+ return false;
+
+ if (hash_alg->digestsize < alg->cra_cipher.cia_min_keysize ||
+ hash_alg->digestsize > alg->cra_cipher.cia_max_keysize)
+ goto out;
+
+ if (ivsize != alg->cra_blocksize)
+ goto out;
+
+ if (crypto_shash_alg_needs_key(hash_alg))
+ goto out;
+
+ ret = true;
+
+out:
+ crypto_mod_put(alg);
+ return ret;
+}
+
+static int essiv_create(struct crypto_template *tmpl, struct rtattr **tb)
+{
+ struct crypto_attr_type *algt;
+ const char *inner_cipher_name;
+ const char *shash_name;
+ struct skcipher_instance *skcipher_inst = NULL;
+ struct aead_instance *aead_inst = NULL;
+ struct crypto_instance *inst;
+ struct crypto_alg *base, *block_base;
+ struct essiv_instance_ctx *ictx;
+ struct skcipher_alg *skcipher_alg = NULL;
+ struct aead_alg *aead_alg = NULL;
+ struct crypto_alg *_hash_alg;
+ struct shash_alg *hash_alg;
+ int ivsize;
+ u32 type;
+ u32 mask;
+ int err;
+
+ algt = crypto_get_attr_type(tb);
+ if (IS_ERR(algt))
+ return PTR_ERR(algt);
+
+ inner_cipher_name = crypto_attr_alg_name(tb[1]);
+ if (IS_ERR(inner_cipher_name))
+ return PTR_ERR(inner_cipher_name);
+
+ shash_name = crypto_attr_alg_name(tb[2]);
+ if (IS_ERR(shash_name))
+ return PTR_ERR(shash_name);
+
+ type = algt->type & algt->mask;
+ mask = crypto_requires_sync(algt->type, algt->mask);
+
+ switch (type) {
+ case CRYPTO_ALG_TYPE_SKCIPHER:
+ skcipher_inst = kzalloc(sizeof(*skcipher_inst) +
+ sizeof(*ictx), GFP_KERNEL);
+ if (!skcipher_inst)
+ return -ENOMEM;
+ inst = skcipher_crypto_instance(skcipher_inst);
+ base = &skcipher_inst->alg.base;
+ ictx = crypto_instance_ctx(inst);
+
+ /* Symmetric cipher, e.g., "cbc(aes)" */
+ err = crypto_grab_skcipher(&ictx->u.skcipher_spawn, inst,
+ inner_cipher_name, 0, mask);
+ if (err)
+ goto out_free_inst;
+ skcipher_alg = crypto_spawn_skcipher_alg(&ictx->u.skcipher_spawn);
+ block_base = &skcipher_alg->base;
+ ivsize = crypto_skcipher_alg_ivsize(skcipher_alg);
+ break;
+
+ case CRYPTO_ALG_TYPE_AEAD:
+ aead_inst = kzalloc(sizeof(*aead_inst) +
+ sizeof(*ictx), GFP_KERNEL);
+ if (!aead_inst)
+ return -ENOMEM;
+ inst = aead_crypto_instance(aead_inst);
+ base = &aead_inst->alg.base;
+ ictx = crypto_instance_ctx(inst);
+
+ /* AEAD cipher, e.g., "authenc(hmac(sha256),cbc(aes))" */
+ err = crypto_grab_aead(&ictx->u.aead_spawn, inst,
+ inner_cipher_name, 0, mask);
+ if (err)
+ goto out_free_inst;
+ aead_alg = crypto_spawn_aead_alg(&ictx->u.aead_spawn);
+ block_base = &aead_alg->base;
+ if (!strstarts(block_base->cra_name, "authenc(")) {
+ pr_warn("Only authenc() type AEADs are supported by ESSIV\n");
+ err = -EINVAL;
+ goto out_drop_skcipher;
+ }
+ ivsize = aead_alg->ivsize;
+ break;
+
+ default:
+ return -EINVAL;
+ }
+
+ if (!parse_cipher_name(ictx->essiv_cipher_name, block_base->cra_name)) {
+ pr_warn("Failed to parse ESSIV cipher name from skcipher cra_name\n");
+ err = -EINVAL;
+ goto out_drop_skcipher;
+ }
+
+ /* Synchronous hash, e.g., "sha256" */
+ _hash_alg = crypto_alg_mod_lookup(shash_name,
+ CRYPTO_ALG_TYPE_SHASH,
+ CRYPTO_ALG_TYPE_MASK);
+ if (IS_ERR(_hash_alg)) {
+ err = PTR_ERR(_hash_alg);
+ goto out_drop_skcipher;
+ }
+ hash_alg = __crypto_shash_alg(_hash_alg);
+
+ /* Check the set of algorithms */
+ if (!essiv_supported_algorithms(ictx->essiv_cipher_name, hash_alg,
+ ivsize)) {
+ pr_warn("Unsupported essiv instantiation: essiv(%s,%s)\n",
+ block_base->cra_name, hash_alg->base.cra_name);
+ err = -EINVAL;
+ goto out_free_hash;
+ }
+
+ /* record the driver name so we can instantiate this exact algo later */
+ strlcpy(ictx->shash_driver_name, hash_alg->base.cra_driver_name,
+ CRYPTO_MAX_ALG_NAME);
+
+ /* Instance fields */
+
+ err = -ENAMETOOLONG;
+ if (snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME,
+ "essiv(%s,%s)", block_base->cra_name,
+ hash_alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
+ goto out_free_hash;
+ if (snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME,
+ "essiv(%s,%s)", block_base->cra_driver_name,
+ hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
+ goto out_free_hash;
+
+ base->cra_flags = block_base->cra_flags & CRYPTO_ALG_ASYNC;
+ base->cra_blocksize = block_base->cra_blocksize;
+ base->cra_ctxsize = sizeof(struct essiv_tfm_ctx);
+ base->cra_alignmask = block_base->cra_alignmask;
+ base->cra_priority = block_base->cra_priority;
+
+ if (type == CRYPTO_ALG_TYPE_SKCIPHER) {
+ skcipher_inst->alg.setkey = essiv_skcipher_setkey;
+ skcipher_inst->alg.encrypt = essiv_skcipher_encrypt;
+ skcipher_inst->alg.decrypt = essiv_skcipher_decrypt;
+ skcipher_inst->alg.init = essiv_skcipher_init_tfm;
+ skcipher_inst->alg.exit = essiv_skcipher_exit_tfm;
+
+ skcipher_inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(skcipher_alg);
+ skcipher_inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(skcipher_alg);
+ skcipher_inst->alg.ivsize = ivsize;
+ skcipher_inst->alg.chunksize = crypto_skcipher_alg_chunksize(skcipher_alg);
+ skcipher_inst->alg.walksize = crypto_skcipher_alg_walksize(skcipher_alg);
+
+ skcipher_inst->free = essiv_skcipher_free_instance;
+
+ err = skcipher_register_instance(tmpl, skcipher_inst);
+ } else {
+ aead_inst->alg.setkey = essiv_aead_setkey;
+ aead_inst->alg.setauthsize = essiv_aead_setauthsize;
+ aead_inst->alg.encrypt = essiv_aead_encrypt;
+ aead_inst->alg.decrypt = essiv_aead_decrypt;
+ aead_inst->alg.init = essiv_aead_init_tfm;
+ aead_inst->alg.exit = essiv_aead_exit_tfm;
+
+ aead_inst->alg.ivsize = ivsize;
+ aead_inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(aead_alg);
+ aead_inst->alg.chunksize = crypto_aead_alg_chunksize(aead_alg);
+
+ aead_inst->free = essiv_aead_free_instance;
+
+ err = aead_register_instance(tmpl, aead_inst);
+ }
+
+ if (err)
+ goto out_free_hash;
+
+ crypto_mod_put(_hash_alg);
+ return 0;
+
+out_free_hash:
+ crypto_mod_put(_hash_alg);
+out_drop_skcipher:
+ if (type == CRYPTO_ALG_TYPE_SKCIPHER)
+ crypto_drop_skcipher(&ictx->u.skcipher_spawn);
+ else
+ crypto_drop_aead(&ictx->u.aead_spawn);
+out_free_inst:
+ kfree(skcipher_inst);
+ kfree(aead_inst);
+ return err;
+}
+
+/* essiv(cipher_name, shash_name) */
+static struct crypto_template essiv_tmpl = {
+ .name = "essiv",
+ .create = essiv_create,
+ .module = THIS_MODULE,
+};
+
+static int __init essiv_module_init(void)
+{
+ return crypto_register_template(&essiv_tmpl);
+}
+
+static void __exit essiv_module_exit(void)
+{
+ crypto_unregister_template(&essiv_tmpl);
+}
+
+subsys_initcall(essiv_module_init);
+module_exit(essiv_module_exit);
+
+MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption");
+MODULE_LICENSE("GPL v2");
+MODULE_ALIAS_CRYPTO("essiv");
diff --git a/crypto/fips.c b/crypto/fips.c
index c0b3a3c3452d..7b1d8caee669 100644
--- a/crypto/fips.c
+++ b/crypto/fips.c
@@ -11,10 +11,14 @@
#include <linux/module.h>
#include <linux/kernel.h>
#include <linux/sysctl.h>
+#include <linux/notifier.h>
int fips_enabled;
EXPORT_SYMBOL_GPL(fips_enabled);
+ATOMIC_NOTIFIER_HEAD(fips_fail_notif_chain);
+EXPORT_SYMBOL_GPL(fips_fail_notif_chain);
+
/* Process kernel command-line parameter at boot time. fips=0 or fips=1 */
static int fips_enable(char *str)
{
@@ -58,6 +62,13 @@ static void crypto_proc_fips_exit(void)
unregister_sysctl_table(crypto_sysctls);
}
+void fips_fail_notify(void)
+{
+ if (fips_enabled)
+ atomic_notifier_call_chain(&fips_fail_notif_chain, 0, NULL);
+}
+EXPORT_SYMBOL_GPL(fips_fail_notify);
+
static int __init fips_init(void)
{
crypto_proc_fips_init();
diff --git a/crypto/gcm.c b/crypto/gcm.c
index f254e2d4c206..8e5c0ac65661 100644
--- a/crypto/gcm.c
+++ b/crypto/gcm.c
@@ -13,7 +13,6 @@
#include <crypto/scatterwalk.h>
#include <crypto/gcm.h>
#include <crypto/hash.h>
-#include "internal.h"
#include <linux/err.h>
#include <linux/init.h>
#include <linux/kernel.h>
@@ -111,8 +110,6 @@ static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
CRYPTO_TFM_REQ_MASK);
err = crypto_skcipher_setkey(ctr, key, keylen);
- crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) &
- CRYPTO_TFM_RES_MASK);
if (err)
return err;
@@ -141,9 +138,6 @@ static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) &
CRYPTO_TFM_REQ_MASK);
err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128));
- crypto_aead_set_flags(aead, crypto_ahash_get_flags(ghash) &
- CRYPTO_TFM_RES_MASK);
-
out:
kzfree(data);
return err;
@@ -152,20 +146,7 @@ out:
static int crypto_gcm_setauthsize(struct crypto_aead *tfm,
unsigned int authsize)
{
- switch (authsize) {
- case 4:
- case 8:
- case 12:
- case 13:
- case 14:
- case 15:
- case 16:
- break;
- default:
- return -EINVAL;
- }
-
- return 0;
+ return crypto_gcm_check_authsize(authsize);
}
static void crypto_gcm_init_common(struct aead_request *req)
@@ -598,11 +579,11 @@ static int crypto_gcm_create_common(struct crypto_template *tmpl,
const char *ghash_name)
{
struct crypto_attr_type *algt;
+ u32 mask;
struct aead_instance *inst;
+ struct gcm_instance_ctx *ctx;
struct skcipher_alg *ctr;
- struct crypto_alg *ghash_alg;
struct hash_alg_common *ghash;
- struct gcm_instance_ctx *ctx;
int err;
algt = crypto_get_attr_type(tb);
@@ -612,39 +593,28 @@ static int crypto_gcm_create_common(struct crypto_template *tmpl,
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;
- ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type,
- CRYPTO_ALG_TYPE_HASH,
- CRYPTO_ALG_TYPE_AHASH_MASK |
- crypto_requires_sync(algt->type,
- algt->mask));
- if (IS_ERR(ghash_alg))
- return PTR_ERR(ghash_alg);
+ mask = crypto_requires_sync(algt->type, algt->mask);
- ghash = __crypto_hash_alg_common(ghash_alg);
-
- err = -ENOMEM;
inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
if (!inst)
- goto out_put_ghash;
-
+ return -ENOMEM;
ctx = aead_instance_ctx(inst);
- err = crypto_init_ahash_spawn(&ctx->ghash, ghash,
- aead_crypto_instance(inst));
+
+ err = crypto_grab_ahash(&ctx->ghash, aead_crypto_instance(inst),
+ ghash_name, 0, mask);
if (err)
goto err_free_inst;
+ ghash = crypto_spawn_ahash_alg(&ctx->ghash);
err = -EINVAL;
if (strcmp(ghash->base.cra_name, "ghash") != 0 ||
ghash->digestsize != 16)
- goto err_drop_ghash;
+ goto err_free_inst;
- crypto_set_skcipher_spawn(&ctx->ctr, aead_crypto_instance(inst));
- err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0,
- crypto_requires_sync(algt->type,
- algt->mask));
+ err = crypto_grab_skcipher(&ctx->ctr, aead_crypto_instance(inst),
+ ctr_name, 0, mask);
if (err)
- goto err_drop_ghash;
-
+ goto err_free_inst;
ctr = crypto_spawn_skcipher_alg(&ctx->ctr);
/* The skcipher algorithm must be CTR mode, using 16-byte blocks. */
@@ -652,18 +622,18 @@ static int crypto_gcm_create_common(struct crypto_template *tmpl,
if (strncmp(ctr->base.cra_name, "ctr(", 4) != 0 ||
crypto_skcipher_alg_ivsize(ctr) != 16 ||
ctr->base.cra_blocksize != 1)
- goto out_put_ctr;
+ goto err_free_inst;
err = -ENAMETOOLONG;
if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
"gcm(%s", ctr->base.cra_name + 4) >= CRYPTO_MAX_ALG_NAME)
- goto out_put_ctr;
+ goto err_free_inst;
if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
"gcm_base(%s,%s)", ctr->base.cra_driver_name,
- ghash_alg->cra_driver_name) >=
+ ghash->base.cra_driver_name) >=
CRYPTO_MAX_ALG_NAME)
- goto out_put_ctr;
+ goto err_free_inst;
inst->alg.base.cra_flags = (ghash->base.cra_flags |
ctr->base.cra_flags) & CRYPTO_ALG_ASYNC;
@@ -686,20 +656,11 @@ static int crypto_gcm_create_common(struct crypto_template *tmpl,
inst->free = crypto_gcm_free;
err = aead_register_instance(tmpl, inst);
- if (err)
- goto out_put_ctr;
-
-out_put_ghash:
- crypto_mod_put(ghash_alg);
- return err;
-
-out_put_ctr:
- crypto_drop_skcipher(&ctx->ctr);
-err_drop_ghash:
- crypto_drop_ahash(&ctx->ghash);
+ if (err) {
err_free_inst:
- kfree(inst);
- goto out_put_ghash;
+ crypto_gcm_free(inst);
+ }
+ return err;
}
static int crypto_gcm_create(struct crypto_template *tmpl, struct rtattr **tb)
@@ -740,7 +701,6 @@ static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key,
{
struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
struct crypto_aead *child = ctx->child;
- int err;
if (keylen < 4)
return -EINVAL;
@@ -751,26 +711,18 @@ static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key,
crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_aead_setkey(child, key, keylen);
- crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
-
- return err;
+ return crypto_aead_setkey(child, key, keylen);
}
static int crypto_rfc4106_setauthsize(struct crypto_aead *parent,
unsigned int authsize)
{
struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
+ int err;
- switch (authsize) {
- case 8:
- case 12:
- case 16:
- break;
- default:
- return -EINVAL;
- }
+ err = crypto_rfc4106_check_authsize(authsize);
+ if (err)
+ return err;
return crypto_aead_setauthsize(ctx->child, authsize);
}
@@ -818,8 +770,11 @@ static struct aead_request *crypto_rfc4106_crypt(struct aead_request *req)
static int crypto_rfc4106_encrypt(struct aead_request *req)
{
- if (req->assoclen != 16 && req->assoclen != 20)
- return -EINVAL;
+ int err;
+
+ err = crypto_ipsec_check_assoclen(req->assoclen);
+ if (err)
+ return err;
req = crypto_rfc4106_crypt(req);
@@ -828,8 +783,11 @@ static int crypto_rfc4106_encrypt(struct aead_request *req)
static int crypto_rfc4106_decrypt(struct aead_request *req)
{
- if (req->assoclen != 16 && req->assoclen != 20)
- return -EINVAL;
+ int err;
+
+ err = crypto_ipsec_check_assoclen(req->assoclen);
+ if (err)
+ return err;
req = crypto_rfc4106_crypt(req);
@@ -878,6 +836,7 @@ static int crypto_rfc4106_create(struct crypto_template *tmpl,
struct rtattr **tb)
{
struct crypto_attr_type *algt;
+ u32 mask;
struct aead_instance *inst;
struct crypto_aead_spawn *spawn;
struct aead_alg *alg;
@@ -891,6 +850,8 @@ static int crypto_rfc4106_create(struct crypto_template *tmpl,
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;
+ mask = crypto_requires_sync(algt->type, algt->mask);
+
ccm_name = crypto_attr_alg_name(tb[1]);
if (IS_ERR(ccm_name))
return PTR_ERR(ccm_name);
@@ -900,9 +861,8 @@ static int crypto_rfc4106_create(struct crypto_template *tmpl,
return -ENOMEM;
spawn = aead_instance_ctx(inst);
- crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
- err = crypto_grab_aead(spawn, ccm_name, 0,
- crypto_requires_sync(algt->type, algt->mask));
+ err = crypto_grab_aead(spawn, aead_crypto_instance(inst),
+ ccm_name, 0, mask);
if (err)
goto out_free_inst;
@@ -967,7 +927,6 @@ static int crypto_rfc4543_setkey(struct crypto_aead *parent, const u8 *key,
{
struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
struct crypto_aead *child = ctx->child;
- int err;
if (keylen < 4)
return -EINVAL;
@@ -978,11 +937,7 @@ static int crypto_rfc4543_setkey(struct crypto_aead *parent, const u8 *key,
crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_aead_setkey(child, key, keylen);
- crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
-
- return err;
+ return crypto_aead_setkey(child, key, keylen);
}
static int crypto_rfc4543_setauthsize(struct crypto_aead *parent,
@@ -1045,12 +1000,14 @@ static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc)
static int crypto_rfc4543_encrypt(struct aead_request *req)
{
- return crypto_rfc4543_crypt(req, true);
+ return crypto_ipsec_check_assoclen(req->assoclen) ?:
+ crypto_rfc4543_crypt(req, true);
}
static int crypto_rfc4543_decrypt(struct aead_request *req)
{
- return crypto_rfc4543_crypt(req, false);
+ return crypto_ipsec_check_assoclen(req->assoclen) ?:
+ crypto_rfc4543_crypt(req, false);
}
static int crypto_rfc4543_init_tfm(struct crypto_aead *tfm)
@@ -1112,6 +1069,7 @@ static int crypto_rfc4543_create(struct crypto_template *tmpl,
struct rtattr **tb)
{
struct crypto_attr_type *algt;
+ u32 mask;
struct aead_instance *inst;
struct crypto_aead_spawn *spawn;
struct aead_alg *alg;
@@ -1126,6 +1084,8 @@ static int crypto_rfc4543_create(struct crypto_template *tmpl,
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;
+ mask = crypto_requires_sync(algt->type, algt->mask);
+
ccm_name = crypto_attr_alg_name(tb[1]);
if (IS_ERR(ccm_name))
return PTR_ERR(ccm_name);
@@ -1136,9 +1096,8 @@ static int crypto_rfc4543_create(struct crypto_template *tmpl,
ctx = aead_instance_ctx(inst);
spawn = &ctx->aead;
- crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
- err = crypto_grab_aead(spawn, ccm_name, 0,
- crypto_requires_sync(algt->type, algt->mask));
+ err = crypto_grab_aead(spawn, aead_crypto_instance(inst),
+ ccm_name, 0, mask);
if (err)
goto out_free_inst;
diff --git a/crypto/geniv.c b/crypto/geniv.c
new file mode 100644
index 000000000000..dbcc640274cd
--- /dev/null
+++ b/crypto/geniv.c
@@ -0,0 +1,177 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+/*
+ * geniv: Shared IV generator code
+ *
+ * This file provides common code to IV generators such as seqiv.
+ *
+ * Copyright (c) 2007-2019 Herbert Xu <herbert@gondor.apana.org.au>
+ */
+
+#include <crypto/internal/geniv.h>
+#include <crypto/internal/rng.h>
+#include <crypto/null.h>
+#include <linux/err.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+#include <linux/rtnetlink.h>
+#include <linux/slab.h>
+
+static int aead_geniv_setkey(struct crypto_aead *tfm,
+ const u8 *key, unsigned int keylen)
+{
+ struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
+
+ return crypto_aead_setkey(ctx->child, key, keylen);
+}
+
+static int aead_geniv_setauthsize(struct crypto_aead *tfm,
+ unsigned int authsize)
+{
+ struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
+
+ return crypto_aead_setauthsize(ctx->child, authsize);
+}
+
+static void aead_geniv_free(struct aead_instance *inst)
+{
+ crypto_drop_aead(aead_instance_ctx(inst));
+ kfree(inst);
+}
+
+struct aead_instance *aead_geniv_alloc(struct crypto_template *tmpl,
+ struct rtattr **tb, u32 type, u32 mask)
+{
+ const char *name;
+ struct crypto_aead_spawn *spawn;
+ struct crypto_attr_type *algt;
+ struct aead_instance *inst;
+ struct aead_alg *alg;
+ unsigned int ivsize;
+ unsigned int maxauthsize;
+ int err;
+
+ algt = crypto_get_attr_type(tb);
+ if (IS_ERR(algt))
+ return ERR_CAST(algt);
+
+ if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
+ return ERR_PTR(-EINVAL);
+
+ name = crypto_attr_alg_name(tb[1]);
+ if (IS_ERR(name))
+ return ERR_CAST(name);
+
+ inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+ if (!inst)
+ return ERR_PTR(-ENOMEM);
+
+ spawn = aead_instance_ctx(inst);
+
+ /* Ignore async algorithms if necessary. */
+ mask |= crypto_requires_sync(algt->type, algt->mask);
+
+ err = crypto_grab_aead(spawn, aead_crypto_instance(inst),
+ name, type, mask);
+ if (err)
+ goto err_free_inst;
+
+ alg = crypto_spawn_aead_alg(spawn);
+
+ ivsize = crypto_aead_alg_ivsize(alg);
+ maxauthsize = crypto_aead_alg_maxauthsize(alg);
+
+ err = -EINVAL;
+ if (ivsize < sizeof(u64))
+ goto err_drop_alg;
+
+ err = -ENAMETOOLONG;
+ if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
+ "%s(%s)", tmpl->name, alg->base.cra_name) >=
+ CRYPTO_MAX_ALG_NAME)
+ goto err_drop_alg;
+ if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+ "%s(%s)", tmpl->name, alg->base.cra_driver_name) >=
+ CRYPTO_MAX_ALG_NAME)
+ goto err_drop_alg;
+
+ inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_priority = alg->base.cra_priority;
+ inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
+ inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
+ inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx);
+
+ inst->alg.setkey = aead_geniv_setkey;
+ inst->alg.setauthsize = aead_geniv_setauthsize;
+
+ inst->alg.ivsize = ivsize;
+ inst->alg.maxauthsize = maxauthsize;
+
+ inst->free = aead_geniv_free;
+
+out:
+ return inst;
+
+err_drop_alg:
+ crypto_drop_aead(spawn);
+err_free_inst:
+ kfree(inst);
+ inst = ERR_PTR(err);
+ goto out;
+}
+EXPORT_SYMBOL_GPL(aead_geniv_alloc);
+
+int aead_init_geniv(struct crypto_aead *aead)
+{
+ struct aead_geniv_ctx *ctx = crypto_aead_ctx(aead);
+ struct aead_instance *inst = aead_alg_instance(aead);
+ struct crypto_aead *child;
+ int err;
+
+ spin_lock_init(&ctx->lock);
+
+ err = crypto_get_default_rng();
+ if (err)
+ goto out;
+
+ err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
+ crypto_aead_ivsize(aead));
+ crypto_put_default_rng();
+ if (err)
+ goto out;
+
+ ctx->sknull = crypto_get_default_null_skcipher();
+ err = PTR_ERR(ctx->sknull);
+ if (IS_ERR(ctx->sknull))
+ goto out;
+
+ child = crypto_spawn_aead(aead_instance_ctx(inst));
+ err = PTR_ERR(child);
+ if (IS_ERR(child))
+ goto drop_null;
+
+ ctx->child = child;
+ crypto_aead_set_reqsize(aead, crypto_aead_reqsize(child) +
+ sizeof(struct aead_request));
+
+ err = 0;
+
+out:
+ return err;
+
+drop_null:
+ crypto_put_default_null_skcipher();
+ goto out;
+}
+EXPORT_SYMBOL_GPL(aead_init_geniv);
+
+void aead_exit_geniv(struct crypto_aead *tfm)
+{
+ struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
+
+ crypto_free_aead(ctx->child);
+ crypto_put_default_null_skcipher();
+}
+EXPORT_SYMBOL_GPL(aead_exit_geniv);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Shared IV generator code");
diff --git a/crypto/ghash-generic.c b/crypto/ghash-generic.c
index dad9e1f91a78..c70d163c1ac9 100644
--- a/crypto/ghash-generic.c
+++ b/crypto/ghash-generic.c
@@ -1,12 +1,37 @@
// SPDX-License-Identifier: GPL-2.0-only
/*
- * GHASH: digest algorithm for GCM (Galois/Counter Mode).
+ * GHASH: hash function for GCM (Galois/Counter Mode).
*
* Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
* Copyright (c) 2009 Intel Corp.
* Author: Huang Ying <ying.huang@intel.com>
+ */
+
+/*
+ * GHASH is a keyed hash function used in GCM authentication tag generation.
+ *
+ * The original GCM paper [1] presents GHASH as a function GHASH(H, A, C) which
+ * takes a 16-byte hash key H, additional authenticated data A, and a ciphertext
+ * C. It formats A and C into a single byte string X, interprets X as a
+ * polynomial over GF(2^128), and evaluates this polynomial at the point H.
*
- * The algorithm implementation is copied from gcm.c.
+ * However, the NIST standard for GCM [2] presents GHASH as GHASH(H, X) where X
+ * is the already-formatted byte string containing both A and C.
+ *
+ * "ghash" in the Linux crypto API uses the 'X' (pre-formatted) convention,
+ * since the API supports only a single data stream per hash. Thus, the
+ * formatting of 'A' and 'C' is done in the "gcm" template, not in "ghash".
+ *
+ * The reason "ghash" is separate from "gcm" is to allow "gcm" to use an
+ * accelerated "ghash" when a standalone accelerated "gcm(aes)" is unavailable.
+ * It is generally inappropriate to use "ghash" for other purposes, since it is
+ * an "ε-almost-XOR-universal hash function", not a cryptographic hash function.
+ * It can only be used securely in crypto modes specially designed to use it.
+ *
+ * [1] The Galois/Counter Mode of Operation (GCM)
+ * (http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.694.695&rep=rep1&type=pdf)
+ * [2] Recommendation for Block Cipher Modes of Operation: Galois/Counter Mode (GCM) and GMAC
+ * (https://csrc.nist.gov/publications/detail/sp/800-38d/final)
*/
#include <crypto/algapi.h>
@@ -33,10 +58,8 @@ static int ghash_setkey(struct crypto_shash *tfm,
struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
be128 k;
- if (keylen != GHASH_BLOCK_SIZE) {
- crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ if (keylen != GHASH_BLOCK_SIZE)
return -EINVAL;
- }
if (ctx->gf128)
gf128mul_free_4k(ctx->gf128);
@@ -156,6 +179,6 @@ subsys_initcall(ghash_mod_init);
module_exit(ghash_mod_exit);
MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION("GHASH Message Digest Algorithm");
+MODULE_DESCRIPTION("GHASH hash function");
MODULE_ALIAS_CRYPTO("ghash");
MODULE_ALIAS_CRYPTO("ghash-generic");
diff --git a/crypto/hmac.c b/crypto/hmac.c
index 8b2a212eb0ad..e38bfb948278 100644
--- a/crypto/hmac.c
+++ b/crypto/hmac.c
@@ -138,12 +138,11 @@ static int hmac_finup(struct shash_desc *pdesc, const u8 *data,
crypto_shash_finup(desc, out, ds, out);
}
-static int hmac_init_tfm(struct crypto_tfm *tfm)
+static int hmac_init_tfm(struct crypto_shash *parent)
{
- struct crypto_shash *parent = __crypto_shash_cast(tfm);
struct crypto_shash *hash;
- struct crypto_instance *inst = (void *)tfm->__crt_alg;
- struct crypto_shash_spawn *spawn = crypto_instance_ctx(inst);
+ struct shash_instance *inst = shash_alg_instance(parent);
+ struct crypto_shash_spawn *spawn = shash_instance_ctx(inst);
struct hmac_ctx *ctx = hmac_ctx(parent);
hash = crypto_spawn_shash(spawn);
@@ -152,24 +151,21 @@ static int hmac_init_tfm(struct crypto_tfm *tfm)
parent->descsize = sizeof(struct shash_desc) +
crypto_shash_descsize(hash);
- if (WARN_ON(parent->descsize > HASH_MAX_DESCSIZE)) {
- crypto_free_shash(hash);
- return -EINVAL;
- }
ctx->hash = hash;
return 0;
}
-static void hmac_exit_tfm(struct crypto_tfm *tfm)
+static void hmac_exit_tfm(struct crypto_shash *parent)
{
- struct hmac_ctx *ctx = hmac_ctx(__crypto_shash_cast(tfm));
+ struct hmac_ctx *ctx = hmac_ctx(parent);
crypto_free_shash(ctx->hash);
}
static int hmac_create(struct crypto_template *tmpl, struct rtattr **tb)
{
struct shash_instance *inst;
+ struct crypto_shash_spawn *spawn;
struct crypto_alg *alg;
struct shash_alg *salg;
int err;
@@ -180,31 +176,32 @@ static int hmac_create(struct crypto_template *tmpl, struct rtattr **tb)
if (err)
return err;
- salg = shash_attr_alg(tb[1], 0, 0);
- if (IS_ERR(salg))
- return PTR_ERR(salg);
+ inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+ if (!inst)
+ return -ENOMEM;
+ spawn = shash_instance_ctx(inst);
+
+ err = crypto_grab_shash(spawn, shash_crypto_instance(inst),
+ crypto_attr_alg_name(tb[1]), 0, 0);
+ if (err)
+ goto err_free_inst;
+ salg = crypto_spawn_shash_alg(spawn);
alg = &salg->base;
- /* The underlying hash algorithm must be unkeyed */
+ /* The underlying hash algorithm must not require a key */
err = -EINVAL;
- if (crypto_shash_alg_has_setkey(salg))
- goto out_put_alg;
+ if (crypto_shash_alg_needs_key(salg))
+ goto err_free_inst;
ds = salg->digestsize;
ss = salg->statesize;
if (ds > alg->cra_blocksize ||
ss < alg->cra_blocksize)
- goto out_put_alg;
+ goto err_free_inst;
- inst = shash_alloc_instance("hmac", alg);
- err = PTR_ERR(inst);
- if (IS_ERR(inst))
- goto out_put_alg;
-
- err = crypto_init_shash_spawn(shash_instance_ctx(inst), salg,
- shash_crypto_instance(inst));
+ err = crypto_inst_setname(shash_crypto_instance(inst), tmpl->name, alg);
if (err)
- goto out_free_inst;
+ goto err_free_inst;
inst->alg.base.cra_priority = alg->cra_priority;
inst->alg.base.cra_blocksize = alg->cra_blocksize;
@@ -217,9 +214,6 @@ static int hmac_create(struct crypto_template *tmpl, struct rtattr **tb)
inst->alg.base.cra_ctxsize = sizeof(struct hmac_ctx) +
ALIGN(ss * 2, crypto_tfm_ctx_alignment());
- inst->alg.base.cra_init = hmac_init_tfm;
- inst->alg.base.cra_exit = hmac_exit_tfm;
-
inst->alg.init = hmac_init;
inst->alg.update = hmac_update;
inst->alg.final = hmac_final;
@@ -227,22 +221,22 @@ static int hmac_create(struct crypto_template *tmpl, struct rtattr **tb)
inst->alg.export = hmac_export;
inst->alg.import = hmac_import;
inst->alg.setkey = hmac_setkey;
+ inst->alg.init_tfm = hmac_init_tfm;
+ inst->alg.exit_tfm = hmac_exit_tfm;
+
+ inst->free = shash_free_singlespawn_instance;
err = shash_register_instance(tmpl, inst);
if (err) {
-out_free_inst:
- shash_free_instance(shash_crypto_instance(inst));
+err_free_inst:
+ shash_free_singlespawn_instance(inst);
}
-
-out_put_alg:
- crypto_mod_put(alg);
return err;
}
static struct crypto_template hmac_tmpl = {
.name = "hmac",
.create = hmac_create,
- .free = shash_free_instance,
.module = THIS_MODULE,
};
diff --git a/crypto/internal.h b/crypto/internal.h
index 93df7bec844a..d5ebc60c5143 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -58,9 +58,6 @@ static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg)
struct crypto_alg *crypto_mod_get(struct crypto_alg *alg);
struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask);
-int crypto_init_cipher_ops(struct crypto_tfm *tfm);
-int crypto_init_compress_ops(struct crypto_tfm *tfm);
-
struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask);
void crypto_larval_kill(struct crypto_alg *alg);
void crypto_alg_tested(const char *name, int err);
@@ -68,7 +65,6 @@ void crypto_alg_tested(const char *name, int err);
void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
struct crypto_alg *nalg);
void crypto_remove_final(struct list_head *list);
-void crypto_shoot_alg(struct crypto_alg *alg);
struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
u32 mask);
void *crypto_create_tfm(struct crypto_alg *alg,
diff --git a/crypto/jitterentropy-kcapi.c b/crypto/jitterentropy-kcapi.c
index 701b8d86ab49..a5ce8f96790f 100644
--- a/crypto/jitterentropy-kcapi.c
+++ b/crypto/jitterentropy-kcapi.c
@@ -44,13 +44,7 @@
#include <linux/crypto.h>
#include <crypto/internal/rng.h>
-struct rand_data;
-int jent_read_entropy(struct rand_data *ec, unsigned char *data,
- unsigned int len);
-int jent_entropy_init(void);
-struct rand_data *jent_entropy_collector_alloc(unsigned int osr,
- unsigned int flags);
-void jent_entropy_collector_free(struct rand_data *entropy_collector);
+#include "jitterentropy.h"
/***************************************************************************
* Helper function
diff --git a/crypto/jitterentropy.c b/crypto/jitterentropy.c
index 77fa2120fe0c..042157f0d28b 100644
--- a/crypto/jitterentropy.c
+++ b/crypto/jitterentropy.c
@@ -103,12 +103,7 @@ struct rand_data {
* Helper functions
***************************************************************************/
-void jent_get_nstime(__u64 *out);
-void *jent_zalloc(unsigned int len);
-void jent_zfree(void *ptr);
-int jent_fips_enabled(void);
-void jent_panic(char *s);
-void jent_memcpy(void *dest, const void *src, unsigned int n);
+#include "jitterentropy.h"
/**
* Update of the loop count used for the next round of
@@ -172,7 +167,7 @@ static __u64 jent_loop_shuffle(struct rand_data *ec,
* implies that careful retesting must be done.
*
* Input:
- * @ec entropy collector struct -- may be NULL
+ * @ec entropy collector struct
* @time time stamp to be injected
* @loop_cnt if a value not equal to 0 is set, use the given value as number of
* loops to perform the folding
@@ -400,8 +395,8 @@ static void jent_gen_entropy(struct rand_data *ec)
* primes the test if needed.
*
* Return:
- * 0 if FIPS test passed
- * < 0 if FIPS test failed
+ * returns normally if FIPS test passed
+ * panics the kernel if FIPS test failed
*/
static void jent_fips_test(struct rand_data *ec)
{
diff --git a/crypto/jitterentropy.h b/crypto/jitterentropy.h
new file mode 100644
index 000000000000..c83fff32d130
--- /dev/null
+++ b/crypto/jitterentropy.h
@@ -0,0 +1,17 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+
+extern void *jent_zalloc(unsigned int len);
+extern void jent_zfree(void *ptr);
+extern int jent_fips_enabled(void);
+extern void jent_panic(char *s);
+extern void jent_memcpy(void *dest, const void *src, unsigned int n);
+extern void jent_get_nstime(__u64 *out);
+
+struct rand_data;
+extern int jent_entropy_init(void);
+extern int jent_read_entropy(struct rand_data *ec, unsigned char *data,
+ unsigned int len);
+
+extern struct rand_data *jent_entropy_collector_alloc(unsigned int osr,
+ unsigned int flags);
+extern void jent_entropy_collector_free(struct rand_data *entropy_collector);
diff --git a/crypto/keywrap.c b/crypto/keywrap.c
index a155c88105ea..0355cce21b1e 100644
--- a/crypto/keywrap.c
+++ b/crypto/keywrap.c
@@ -266,10 +266,12 @@ static int crypto_kw_create(struct crypto_template *tmpl, struct rtattr **tb)
struct crypto_alg *alg;
int err;
- inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
+ inst = skcipher_alloc_instance_simple(tmpl, tb);
if (IS_ERR(inst))
return PTR_ERR(inst);
+ alg = skcipher_ialg_simple(inst);
+
err = -EINVAL;
/* Section 5.1 requirement for KW */
if (alg->cra_blocksize != sizeof(struct crypto_kw_block))
@@ -283,14 +285,11 @@ static int crypto_kw_create(struct crypto_template *tmpl, struct rtattr **tb)
inst->alg.decrypt = crypto_kw_decrypt;
err = skcipher_register_instance(tmpl, inst);
- if (err)
- goto out_free_inst;
- goto out_put_alg;
-
+ if (err) {
out_free_inst:
- inst->free(inst);
-out_put_alg:
- crypto_mod_put(alg);
+ inst->free(inst);
+ }
+
return err;
}
diff --git a/crypto/lrw.c b/crypto/lrw.c
index be829f6afc8e..63c485c0d8a6 100644
--- a/crypto/lrw.c
+++ b/crypto/lrw.c
@@ -79,8 +79,6 @@ static int setkey(struct crypto_skcipher *parent, const u8 *key,
crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
CRYPTO_TFM_REQ_MASK);
err = crypto_skcipher_setkey(child, key, keylen - bsize);
- crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
if (err)
return err;
@@ -303,6 +301,7 @@ static int create(struct crypto_template *tmpl, struct rtattr **tb)
struct skcipher_alg *alg;
const char *cipher_name;
char ecb_name[CRYPTO_MAX_ALG_NAME];
+ u32 mask;
int err;
algt = crypto_get_attr_type(tb);
@@ -312,6 +311,8 @@ static int create(struct crypto_template *tmpl, struct rtattr **tb)
if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
return -EINVAL;
+ mask = crypto_requires_sync(algt->type, algt->mask);
+
cipher_name = crypto_attr_alg_name(tb[1]);
if (IS_ERR(cipher_name))
return PTR_ERR(cipher_name);
@@ -322,19 +323,17 @@ static int create(struct crypto_template *tmpl, struct rtattr **tb)
spawn = skcipher_instance_ctx(inst);
- crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
- err = crypto_grab_skcipher(spawn, cipher_name, 0,
- crypto_requires_sync(algt->type,
- algt->mask));
+ err = crypto_grab_skcipher(spawn, skcipher_crypto_instance(inst),
+ cipher_name, 0, mask);
if (err == -ENOENT) {
err = -ENAMETOOLONG;
if (snprintf(ecb_name, CRYPTO_MAX_ALG_NAME, "ecb(%s)",
cipher_name) >= CRYPTO_MAX_ALG_NAME)
goto err_free_inst;
- err = crypto_grab_skcipher(spawn, ecb_name, 0,
- crypto_requires_sync(algt->type,
- algt->mask));
+ err = crypto_grab_skcipher(spawn,
+ skcipher_crypto_instance(inst),
+ ecb_name, 0, mask);
}
if (err)
diff --git a/crypto/michael_mic.c b/crypto/michael_mic.c
index 20e6220f46f6..63350c4ad461 100644
--- a/crypto/michael_mic.c
+++ b/crypto/michael_mic.c
@@ -137,10 +137,8 @@ static int michael_setkey(struct crypto_shash *tfm, const u8 *key,
const __le32 *data = (const __le32 *)key;
- if (keylen != 8) {
- crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ if (keylen != 8)
return -EINVAL;
- }
mctx->l = le32_to_cpu(data[0]);
mctx->r = le32_to_cpu(data[1]);
diff --git a/crypto/morus1280.c b/crypto/morus1280.c
deleted file mode 100644
index f8734c6576af..000000000000
--- a/crypto/morus1280.c
+++ /dev/null
@@ -1,542 +0,0 @@
-// SPDX-License-Identifier: GPL-2.0-or-later
-/*
- * The MORUS-1280 Authenticated-Encryption Algorithm
- *
- * Copyright (c) 2016-2018 Ondrej Mosnacek <omosnacek@gmail.com>
- * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
- */
-
-#include <asm/unaligned.h>
-#include <crypto/algapi.h>
-#include <crypto/internal/aead.h>
-#include <crypto/internal/skcipher.h>
-#include <crypto/morus_common.h>
-#include <crypto/scatterwalk.h>
-#include <linux/err.h>
-#include <linux/init.h>
-#include <linux/kernel.h>
-#include <linux/module.h>
-#include <linux/scatterlist.h>
-
-#define MORUS1280_WORD_SIZE 8
-#define MORUS1280_BLOCK_SIZE (MORUS_BLOCK_WORDS * MORUS1280_WORD_SIZE)
-#define MORUS1280_BLOCK_ALIGN (__alignof__(__le64))
-#define MORUS1280_ALIGNED(p) IS_ALIGNED((uintptr_t)p, MORUS1280_BLOCK_ALIGN)
-
-struct morus1280_block {
- u64 words[MORUS_BLOCK_WORDS];
-};
-
-union morus1280_block_in {
- __le64 words[MORUS_BLOCK_WORDS];
- u8 bytes[MORUS1280_BLOCK_SIZE];
-};
-
-struct morus1280_state {
- struct morus1280_block s[MORUS_STATE_BLOCKS];
-};
-
-struct morus1280_ctx {
- struct morus1280_block key;
-};
-
-struct morus1280_ops {
- int (*skcipher_walk_init)(struct skcipher_walk *walk,
- struct aead_request *req, bool atomic);
-
- void (*crypt_chunk)(struct morus1280_state *state,
- u8 *dst, const u8 *src, unsigned int size);
-};
-
-static const struct morus1280_block crypto_morus1280_const[1] = {
- { .words = {
- U64_C(0x0d08050302010100),
- U64_C(0x6279e99059372215),
- U64_C(0xf12fc26d55183ddb),
- U64_C(0xdd28b57342311120),
- } },
-};
-
-static void crypto_morus1280_round(struct morus1280_block *b0,
- struct morus1280_block *b1,
- struct morus1280_block *b2,
- struct morus1280_block *b3,
- struct morus1280_block *b4,
- const struct morus1280_block *m,
- unsigned int b, unsigned int w)
-{
- unsigned int i;
- struct morus1280_block tmp;
-
- for (i = 0; i < MORUS_BLOCK_WORDS; i++) {
- b0->words[i] ^= b1->words[i] & b2->words[i];
- b0->words[i] ^= b3->words[i];
- b0->words[i] ^= m->words[i];
- b0->words[i] = rol64(b0->words[i], b);
- }
-
- tmp = *b3;
- for (i = 0; i < MORUS_BLOCK_WORDS; i++)
- b3->words[(i + w) % MORUS_BLOCK_WORDS] = tmp.words[i];
-}
-
-static void crypto_morus1280_update(struct morus1280_state *state,
- const struct morus1280_block *m)
-{
- static const struct morus1280_block z = {};
-
- struct morus1280_block *s = state->s;
-
- crypto_morus1280_round(&s[0], &s[1], &s[2], &s[3], &s[4], &z, 13, 1);
- crypto_morus1280_round(&s[1], &s[2], &s[3], &s[4], &s[0], m, 46, 2);
- crypto_morus1280_round(&s[2], &s[3], &s[4], &s[0], &s[1], m, 38, 3);
- crypto_morus1280_round(&s[3], &s[4], &s[0], &s[1], &s[2], m, 7, 2);
- crypto_morus1280_round(&s[4], &s[0], &s[1], &s[2], &s[3], m, 4, 1);
-}
-
-static void crypto_morus1280_load_a(struct morus1280_block *dst, const u8 *src)
-{
- unsigned int i;
- for (i = 0; i < MORUS_BLOCK_WORDS; i++) {
- dst->words[i] = le64_to_cpu(*(const __le64 *)src);
- src += MORUS1280_WORD_SIZE;
- }
-}
-
-static void crypto_morus1280_load_u(struct morus1280_block *dst, const u8 *src)
-{
- unsigned int i;
- for (i = 0; i < MORUS_BLOCK_WORDS; i++) {
- dst->words[i] = get_unaligned_le64(src);
- src += MORUS1280_WORD_SIZE;
- }
-}
-
-static void crypto_morus1280_load(struct morus1280_block *dst, const u8 *src)
-{
- if (MORUS1280_ALIGNED(src))
- crypto_morus1280_load_a(dst, src);
- else
- crypto_morus1280_load_u(dst, src);
-}
-
-static void crypto_morus1280_store_a(u8 *dst, const struct morus1280_block *src)
-{
- unsigned int i;
- for (i = 0; i < MORUS_BLOCK_WORDS; i++) {
- *(__le64 *)dst = cpu_to_le64(src->words[i]);
- dst += MORUS1280_WORD_SIZE;
- }
-}
-
-static void crypto_morus1280_store_u(u8 *dst, const struct morus1280_block *src)
-{
- unsigned int i;
- for (i = 0; i < MORUS_BLOCK_WORDS; i++) {
- put_unaligned_le64(src->words[i], dst);
- dst += MORUS1280_WORD_SIZE;
- }
-}
-
-static void crypto_morus1280_store(u8 *dst, const struct morus1280_block *src)
-{
- if (MORUS1280_ALIGNED(dst))
- crypto_morus1280_store_a(dst, src);
- else
- crypto_morus1280_store_u(dst, src);
-}
-
-static void crypto_morus1280_ad(struct morus1280_state *state, const u8 *src,
- unsigned int size)
-{
- struct morus1280_block m;
-
- if (MORUS1280_ALIGNED(src)) {
- while (size >= MORUS1280_BLOCK_SIZE) {
- crypto_morus1280_load_a(&m, src);
- crypto_morus1280_update(state, &m);
-
- size -= MORUS1280_BLOCK_SIZE;
- src += MORUS1280_BLOCK_SIZE;
- }
- } else {
- while (size >= MORUS1280_BLOCK_SIZE) {
- crypto_morus1280_load_u(&m, src);
- crypto_morus1280_update(state, &m);
-
- size -= MORUS1280_BLOCK_SIZE;
- src += MORUS1280_BLOCK_SIZE;
- }
- }
-}
-
-static void crypto_morus1280_core(const struct morus1280_state *state,
- struct morus1280_block *blk)
-{
- unsigned int i;
-
- for (i = 0; i < MORUS_BLOCK_WORDS; i++)
- blk->words[(i + 3) % MORUS_BLOCK_WORDS] ^= state->s[1].words[i];
-
- for (i = 0; i < MORUS_BLOCK_WORDS; i++) {
- blk->words[i] ^= state->s[0].words[i];
- blk->words[i] ^= state->s[2].words[i] & state->s[3].words[i];
- }
-}
-
-static void crypto_morus1280_encrypt_chunk(struct morus1280_state *state,
- u8 *dst, const u8 *src,
- unsigned int size)
-{
- struct morus1280_block c, m;
-
- if (MORUS1280_ALIGNED(src) && MORUS1280_ALIGNED(dst)) {
- while (size >= MORUS1280_BLOCK_SIZE) {
- crypto_morus1280_load_a(&m, src);
- c = m;
- crypto_morus1280_core(state, &c);
- crypto_morus1280_store_a(dst, &c);
- crypto_morus1280_update(state, &m);
-
- src += MORUS1280_BLOCK_SIZE;
- dst += MORUS1280_BLOCK_SIZE;
- size -= MORUS1280_BLOCK_SIZE;
- }
- } else {
- while (size >= MORUS1280_BLOCK_SIZE) {
- crypto_morus1280_load_u(&m, src);
- c = m;
- crypto_morus1280_core(state, &c);
- crypto_morus1280_store_u(dst, &c);
- crypto_morus1280_update(state, &m);
-
- src += MORUS1280_BLOCK_SIZE;
- dst += MORUS1280_BLOCK_SIZE;
- size -= MORUS1280_BLOCK_SIZE;
- }
- }
-
- if (size > 0) {
- union morus1280_block_in tail;
-
- memcpy(tail.bytes, src, size);
- memset(tail.bytes + size, 0, MORUS1280_BLOCK_SIZE - size);
-
- crypto_morus1280_load_a(&m, tail.bytes);
- c = m;
- crypto_morus1280_core(state, &c);
- crypto_morus1280_store_a(tail.bytes, &c);
- crypto_morus1280_update(state, &m);
-
- memcpy(dst, tail.bytes, size);
- }
-}
-
-static void crypto_morus1280_decrypt_chunk(struct morus1280_state *state,
- u8 *dst, const u8 *src,
- unsigned int size)
-{
- struct morus1280_block m;
-
- if (MORUS1280_ALIGNED(src) && MORUS1280_ALIGNED(dst)) {
- while (size >= MORUS1280_BLOCK_SIZE) {
- crypto_morus1280_load_a(&m, src);
- crypto_morus1280_core(state, &m);
- crypto_morus1280_store_a(dst, &m);
- crypto_morus1280_update(state, &m);
-
- src += MORUS1280_BLOCK_SIZE;
- dst += MORUS1280_BLOCK_SIZE;
- size -= MORUS1280_BLOCK_SIZE;
- }
- } else {
- while (size >= MORUS1280_BLOCK_SIZE) {
- crypto_morus1280_load_u(&m, src);
- crypto_morus1280_core(state, &m);
- crypto_morus1280_store_u(dst, &m);
- crypto_morus1280_update(state, &m);
-
- src += MORUS1280_BLOCK_SIZE;
- dst += MORUS1280_BLOCK_SIZE;
- size -= MORUS1280_BLOCK_SIZE;
- }
- }
-
- if (size > 0) {
- union morus1280_block_in tail;
-
- memcpy(tail.bytes, src, size);
- memset(tail.bytes + size, 0, MORUS1280_BLOCK_SIZE - size);
-
- crypto_morus1280_load_a(&m, tail.bytes);
- crypto_morus1280_core(state, &m);
- crypto_morus1280_store_a(tail.bytes, &m);
- memset(tail.bytes + size, 0, MORUS1280_BLOCK_SIZE - size);
- crypto_morus1280_load_a(&m, tail.bytes);
- crypto_morus1280_update(state, &m);
-
- memcpy(dst, tail.bytes, size);
- }
-}
-
-static void crypto_morus1280_init(struct morus1280_state *state,
- const struct morus1280_block *key,
- const u8 *iv)
-{
- static const struct morus1280_block z = {};
-
- union morus1280_block_in tmp;
- unsigned int i;
-
- memcpy(tmp.bytes, iv, MORUS_NONCE_SIZE);
- memset(tmp.bytes + MORUS_NONCE_SIZE, 0,
- MORUS1280_BLOCK_SIZE - MORUS_NONCE_SIZE);
-
- crypto_morus1280_load(&state->s[0], tmp.bytes);
- state->s[1] = *key;
- for (i = 0; i < MORUS_BLOCK_WORDS; i++)
- state->s[2].words[i] = U64_C(0xFFFFFFFFFFFFFFFF);
- state->s[3] = z;
- state->s[4] = crypto_morus1280_const[0];
-
- for (i = 0; i < 16; i++)
- crypto_morus1280_update(state, &z);
-
- for (i = 0; i < MORUS_BLOCK_WORDS; i++)
- state->s[1].words[i] ^= key->words[i];
-}
-
-static void crypto_morus1280_process_ad(struct morus1280_state *state,
- struct scatterlist *sg_src,
- unsigned int assoclen)
-{
- struct scatter_walk walk;
- struct morus1280_block m;
- union morus1280_block_in buf;
- unsigned int pos = 0;
-
- scatterwalk_start(&walk, sg_src);
- while (assoclen != 0) {
- unsigned int size = scatterwalk_clamp(&walk, assoclen);
- unsigned int left = size;
- void *mapped = scatterwalk_map(&walk);
- const u8 *src = (const u8 *)mapped;
-
- if (pos + size >= MORUS1280_BLOCK_SIZE) {
- if (pos > 0) {
- unsigned int fill = MORUS1280_BLOCK_SIZE - pos;
- memcpy(buf.bytes + pos, src, fill);
-
- crypto_morus1280_load_a(&m, buf.bytes);
- crypto_morus1280_update(state, &m);
-
- pos = 0;
- left -= fill;
- src += fill;
- }
-
- crypto_morus1280_ad(state, src, left);
- src += left & ~(MORUS1280_BLOCK_SIZE - 1);
- left &= MORUS1280_BLOCK_SIZE - 1;
- }
-
- memcpy(buf.bytes + pos, src, left);
-
- pos += left;
- assoclen -= size;
- scatterwalk_unmap(mapped);
- scatterwalk_advance(&walk, size);
- scatterwalk_done(&walk, 0, assoclen);
- }
-
- if (pos > 0) {
- memset(buf.bytes + pos, 0, MORUS1280_BLOCK_SIZE - pos);
-
- crypto_morus1280_load_a(&m, buf.bytes);
- crypto_morus1280_update(state, &m);
- }
-}
-
-static void crypto_morus1280_process_crypt(struct morus1280_state *state,
- struct aead_request *req,
- const struct morus1280_ops *ops)
-{
- struct skcipher_walk walk;
-
- ops->skcipher_walk_init(&walk, req, false);
-
- while (walk.nbytes) {
- unsigned int nbytes = walk.nbytes;
-
- if (nbytes < walk.total)
- nbytes = round_down(nbytes, walk.stride);
-
- ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
- nbytes);
-
- skcipher_walk_done(&walk, walk.nbytes - nbytes);
- }
-}
-
-static void crypto_morus1280_final(struct morus1280_state *state,
- struct morus1280_block *tag_xor,
- u64 assoclen, u64 cryptlen)
-{
- struct morus1280_block tmp;
- unsigned int i;
-
- tmp.words[0] = assoclen * 8;
- tmp.words[1] = cryptlen * 8;
- tmp.words[2] = 0;
- tmp.words[3] = 0;
-
- for (i = 0; i < MORUS_BLOCK_WORDS; i++)
- state->s[4].words[i] ^= state->s[0].words[i];
-
- for (i = 0; i < 10; i++)
- crypto_morus1280_update(state, &tmp);
-
- crypto_morus1280_core(state, tag_xor);
-}
-
-static int crypto_morus1280_setkey(struct crypto_aead *aead, const u8 *key,
- unsigned int keylen)
-{
- struct morus1280_ctx *ctx = crypto_aead_ctx(aead);
- union morus1280_block_in tmp;
-
- if (keylen == MORUS1280_BLOCK_SIZE)
- crypto_morus1280_load(&ctx->key, key);
- else if (keylen == MORUS1280_BLOCK_SIZE / 2) {
- memcpy(tmp.bytes, key, keylen);
- memcpy(tmp.bytes + keylen, key, keylen);
-
- crypto_morus1280_load(&ctx->key, tmp.bytes);
- } else {
- crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
- return -EINVAL;
- }
-
- return 0;
-}
-
-static int crypto_morus1280_setauthsize(struct crypto_aead *tfm,
- unsigned int authsize)
-{
- return (authsize <= MORUS_MAX_AUTH_SIZE) ? 0 : -EINVAL;
-}
-
-static void crypto_morus1280_crypt(struct aead_request *req,
- struct morus1280_block *tag_xor,
- unsigned int cryptlen,
- const struct morus1280_ops *ops)
-{
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- struct morus1280_ctx *ctx = crypto_aead_ctx(tfm);
- struct morus1280_state state;
-
- crypto_morus1280_init(&state, &ctx->key, req->iv);
- crypto_morus1280_process_ad(&state, req->src, req->assoclen);
- crypto_morus1280_process_crypt(&state, req, ops);
- crypto_morus1280_final(&state, tag_xor, req->assoclen, cryptlen);
-}
-
-static int crypto_morus1280_encrypt(struct aead_request *req)
-{
- static const struct morus1280_ops ops = {
- .skcipher_walk_init = skcipher_walk_aead_encrypt,
- .crypt_chunk = crypto_morus1280_encrypt_chunk,
- };
-
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- struct morus1280_block tag = {};
- union morus1280_block_in tag_out;
- unsigned int authsize = crypto_aead_authsize(tfm);
- unsigned int cryptlen = req->cryptlen;
-
- crypto_morus1280_crypt(req, &tag, cryptlen, &ops);
- crypto_morus1280_store(tag_out.bytes, &tag);
-
- scatterwalk_map_and_copy(tag_out.bytes, req->dst,
- req->assoclen + cryptlen, authsize, 1);
- return 0;
-}
-
-static int crypto_morus1280_decrypt(struct aead_request *req)
-{
- static const struct morus1280_ops ops = {
- .skcipher_walk_init = skcipher_walk_aead_decrypt,
- .crypt_chunk = crypto_morus1280_decrypt_chunk,
- };
- static const u8 zeros[MORUS1280_BLOCK_SIZE] = {};
-
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- union morus1280_block_in tag_in;
- struct morus1280_block tag;
- unsigned int authsize = crypto_aead_authsize(tfm);
- unsigned int cryptlen = req->cryptlen - authsize;
-
- scatterwalk_map_and_copy(tag_in.bytes, req->src,
- req->assoclen + cryptlen, authsize, 0);
-
- crypto_morus1280_load(&tag, tag_in.bytes);
- crypto_morus1280_crypt(req, &tag, cryptlen, &ops);
- crypto_morus1280_store(tag_in.bytes, &tag);
-
- return crypto_memneq(tag_in.bytes, zeros, authsize) ? -EBADMSG : 0;
-}
-
-static int crypto_morus1280_init_tfm(struct crypto_aead *tfm)
-{
- return 0;
-}
-
-static void crypto_morus1280_exit_tfm(struct crypto_aead *tfm)
-{
-}
-
-static struct aead_alg crypto_morus1280_alg = {
- .setkey = crypto_morus1280_setkey,
- .setauthsize = crypto_morus1280_setauthsize,
- .encrypt = crypto_morus1280_encrypt,
- .decrypt = crypto_morus1280_decrypt,
- .init = crypto_morus1280_init_tfm,
- .exit = crypto_morus1280_exit_tfm,
-
- .ivsize = MORUS_NONCE_SIZE,
- .maxauthsize = MORUS_MAX_AUTH_SIZE,
- .chunksize = MORUS1280_BLOCK_SIZE,
-
- .base = {
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct morus1280_ctx),
- .cra_alignmask = 0,
-
- .cra_priority = 100,
-
- .cra_name = "morus1280",
- .cra_driver_name = "morus1280-generic",
-
- .cra_module = THIS_MODULE,
- }
-};
-
-
-static int __init crypto_morus1280_module_init(void)
-{
- return crypto_register_aead(&crypto_morus1280_alg);
-}
-
-static void __exit crypto_morus1280_module_exit(void)
-{
- crypto_unregister_aead(&crypto_morus1280_alg);
-}
-
-subsys_initcall(crypto_morus1280_module_init);
-module_exit(crypto_morus1280_module_exit);
-
-MODULE_LICENSE("GPL");
-MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
-MODULE_DESCRIPTION("MORUS-1280 AEAD algorithm");
-MODULE_ALIAS_CRYPTO("morus1280");
-MODULE_ALIAS_CRYPTO("morus1280-generic");
diff --git a/crypto/morus640.c b/crypto/morus640.c
deleted file mode 100644
index ae5aa9482cb4..000000000000
--- a/crypto/morus640.c
+++ /dev/null
@@ -1,533 +0,0 @@
-// SPDX-License-Identifier: GPL-2.0-or-later
-/*
- * The MORUS-640 Authenticated-Encryption Algorithm
- *
- * Copyright (c) 2016-2018 Ondrej Mosnacek <omosnacek@gmail.com>
- * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
- */
-
-#include <asm/unaligned.h>
-#include <crypto/algapi.h>
-#include <crypto/internal/aead.h>
-#include <crypto/internal/skcipher.h>
-#include <crypto/morus_common.h>
-#include <crypto/scatterwalk.h>
-#include <linux/err.h>
-#include <linux/init.h>
-#include <linux/kernel.h>
-#include <linux/module.h>
-#include <linux/scatterlist.h>
-
-#define MORUS640_WORD_SIZE 4
-#define MORUS640_BLOCK_SIZE (MORUS_BLOCK_WORDS * MORUS640_WORD_SIZE)
-#define MORUS640_BLOCK_ALIGN (__alignof__(__le32))
-#define MORUS640_ALIGNED(p) IS_ALIGNED((uintptr_t)p, MORUS640_BLOCK_ALIGN)
-
-struct morus640_block {
- u32 words[MORUS_BLOCK_WORDS];
-};
-
-union morus640_block_in {
- __le32 words[MORUS_BLOCK_WORDS];
- u8 bytes[MORUS640_BLOCK_SIZE];
-};
-
-struct morus640_state {
- struct morus640_block s[MORUS_STATE_BLOCKS];
-};
-
-struct morus640_ctx {
- struct morus640_block key;
-};
-
-struct morus640_ops {
- int (*skcipher_walk_init)(struct skcipher_walk *walk,
- struct aead_request *req, bool atomic);
-
- void (*crypt_chunk)(struct morus640_state *state,
- u8 *dst, const u8 *src, unsigned int size);
-};
-
-static const struct morus640_block crypto_morus640_const[2] = {
- { .words = {
- U32_C(0x02010100),
- U32_C(0x0d080503),
- U32_C(0x59372215),
- U32_C(0x6279e990),
- } },
- { .words = {
- U32_C(0x55183ddb),
- U32_C(0xf12fc26d),
- U32_C(0x42311120),
- U32_C(0xdd28b573),
- } },
-};
-
-static void crypto_morus640_round(struct morus640_block *b0,
- struct morus640_block *b1,
- struct morus640_block *b2,
- struct morus640_block *b3,
- struct morus640_block *b4,
- const struct morus640_block *m,
- unsigned int b, unsigned int w)
-{
- unsigned int i;
- struct morus640_block tmp;
-
- for (i = 0; i < MORUS_BLOCK_WORDS; i++) {
- b0->words[i] ^= b1->words[i] & b2->words[i];
- b0->words[i] ^= b3->words[i];
- b0->words[i] ^= m->words[i];
- b0->words[i] = rol32(b0->words[i], b);
- }
-
- tmp = *b3;
- for (i = 0; i < MORUS_BLOCK_WORDS; i++)
- b3->words[(i + w) % MORUS_BLOCK_WORDS] = tmp.words[i];
-}
-
-static void crypto_morus640_update(struct morus640_state *state,
- const struct morus640_block *m)
-{
- static const struct morus640_block z = {};
-
- struct morus640_block *s = state->s;
-
- crypto_morus640_round(&s[0], &s[1], &s[2], &s[3], &s[4], &z, 5, 1);
- crypto_morus640_round(&s[1], &s[2], &s[3], &s[4], &s[0], m, 31, 2);
- crypto_morus640_round(&s[2], &s[3], &s[4], &s[0], &s[1], m, 7, 3);
- crypto_morus640_round(&s[3], &s[4], &s[0], &s[1], &s[2], m, 22, 2);
- crypto_morus640_round(&s[4], &s[0], &s[1], &s[2], &s[3], m, 13, 1);
-}
-
-static void crypto_morus640_load_a(struct morus640_block *dst, const u8 *src)
-{
- unsigned int i;
- for (i = 0; i < MORUS_BLOCK_WORDS; i++) {
- dst->words[i] = le32_to_cpu(*(const __le32 *)src);
- src += MORUS640_WORD_SIZE;
- }
-}
-
-static void crypto_morus640_load_u(struct morus640_block *dst, const u8 *src)
-{
- unsigned int i;
- for (i = 0; i < MORUS_BLOCK_WORDS; i++) {
- dst->words[i] = get_unaligned_le32(src);
- src += MORUS640_WORD_SIZE;
- }
-}
-
-static void crypto_morus640_load(struct morus640_block *dst, const u8 *src)
-{
- if (MORUS640_ALIGNED(src))
- crypto_morus640_load_a(dst, src);
- else
- crypto_morus640_load_u(dst, src);
-}
-
-static void crypto_morus640_store_a(u8 *dst, const struct morus640_block *src)
-{
- unsigned int i;
- for (i = 0; i < MORUS_BLOCK_WORDS; i++) {
- *(__le32 *)dst = cpu_to_le32(src->words[i]);
- dst += MORUS640_WORD_SIZE;
- }
-}
-
-static void crypto_morus640_store_u(u8 *dst, const struct morus640_block *src)
-{
- unsigned int i;
- for (i = 0; i < MORUS_BLOCK_WORDS; i++) {
- put_unaligned_le32(src->words[i], dst);
- dst += MORUS640_WORD_SIZE;
- }
-}
-
-static void crypto_morus640_store(u8 *dst, const struct morus640_block *src)
-{
- if (MORUS640_ALIGNED(dst))
- crypto_morus640_store_a(dst, src);
- else
- crypto_morus640_store_u(dst, src);
-}
-
-static void crypto_morus640_ad(struct morus640_state *state, const u8 *src,
- unsigned int size)
-{
- struct morus640_block m;
-
- if (MORUS640_ALIGNED(src)) {
- while (size >= MORUS640_BLOCK_SIZE) {
- crypto_morus640_load_a(&m, src);
- crypto_morus640_update(state, &m);
-
- size -= MORUS640_BLOCK_SIZE;
- src += MORUS640_BLOCK_SIZE;
- }
- } else {
- while (size >= MORUS640_BLOCK_SIZE) {
- crypto_morus640_load_u(&m, src);
- crypto_morus640_update(state, &m);
-
- size -= MORUS640_BLOCK_SIZE;
- src += MORUS640_BLOCK_SIZE;
- }
- }
-}
-
-static void crypto_morus640_core(const struct morus640_state *state,
- struct morus640_block *blk)
-{
- unsigned int i;
-
- for (i = 0; i < MORUS_BLOCK_WORDS; i++)
- blk->words[(i + 3) % MORUS_BLOCK_WORDS] ^= state->s[1].words[i];
-
- for (i = 0; i < MORUS_BLOCK_WORDS; i++) {
- blk->words[i] ^= state->s[0].words[i];
- blk->words[i] ^= state->s[2].words[i] & state->s[3].words[i];
- }
-}
-
-static void crypto_morus640_encrypt_chunk(struct morus640_state *state, u8 *dst,
- const u8 *src, unsigned int size)
-{
- struct morus640_block c, m;
-
- if (MORUS640_ALIGNED(src) && MORUS640_ALIGNED(dst)) {
- while (size >= MORUS640_BLOCK_SIZE) {
- crypto_morus640_load_a(&m, src);
- c = m;
- crypto_morus640_core(state, &c);
- crypto_morus640_store_a(dst, &c);
- crypto_morus640_update(state, &m);
-
- src += MORUS640_BLOCK_SIZE;
- dst += MORUS640_BLOCK_SIZE;
- size -= MORUS640_BLOCK_SIZE;
- }
- } else {
- while (size >= MORUS640_BLOCK_SIZE) {
- crypto_morus640_load_u(&m, src);
- c = m;
- crypto_morus640_core(state, &c);
- crypto_morus640_store_u(dst, &c);
- crypto_morus640_update(state, &m);
-
- src += MORUS640_BLOCK_SIZE;
- dst += MORUS640_BLOCK_SIZE;
- size -= MORUS640_BLOCK_SIZE;
- }
- }
-
- if (size > 0) {
- union morus640_block_in tail;
-
- memcpy(tail.bytes, src, size);
- memset(tail.bytes + size, 0, MORUS640_BLOCK_SIZE - size);
-
- crypto_morus640_load_a(&m, tail.bytes);
- c = m;
- crypto_morus640_core(state, &c);
- crypto_morus640_store_a(tail.bytes, &c);
- crypto_morus640_update(state, &m);
-
- memcpy(dst, tail.bytes, size);
- }
-}
-
-static void crypto_morus640_decrypt_chunk(struct morus640_state *state, u8 *dst,
- const u8 *src, unsigned int size)
-{
- struct morus640_block m;
-
- if (MORUS640_ALIGNED(src) && MORUS640_ALIGNED(dst)) {
- while (size >= MORUS640_BLOCK_SIZE) {
- crypto_morus640_load_a(&m, src);
- crypto_morus640_core(state, &m);
- crypto_morus640_store_a(dst, &m);
- crypto_morus640_update(state, &m);
-
- src += MORUS640_BLOCK_SIZE;
- dst += MORUS640_BLOCK_SIZE;
- size -= MORUS640_BLOCK_SIZE;
- }
- } else {
- while (size >= MORUS640_BLOCK_SIZE) {
- crypto_morus640_load_u(&m, src);
- crypto_morus640_core(state, &m);
- crypto_morus640_store_u(dst, &m);
- crypto_morus640_update(state, &m);
-
- src += MORUS640_BLOCK_SIZE;
- dst += MORUS640_BLOCK_SIZE;
- size -= MORUS640_BLOCK_SIZE;
- }
- }
-
- if (size > 0) {
- union morus640_block_in tail;
-
- memcpy(tail.bytes, src, size);
- memset(tail.bytes + size, 0, MORUS640_BLOCK_SIZE - size);
-
- crypto_morus640_load_a(&m, tail.bytes);
- crypto_morus640_core(state, &m);
- crypto_morus640_store_a(tail.bytes, &m);
- memset(tail.bytes + size, 0, MORUS640_BLOCK_SIZE - size);
- crypto_morus640_load_a(&m, tail.bytes);
- crypto_morus640_update(state, &m);
-
- memcpy(dst, tail.bytes, size);
- }
-}
-
-static void crypto_morus640_init(struct morus640_state *state,
- const struct morus640_block *key,
- const u8 *iv)
-{
- static const struct morus640_block z = {};
-
- unsigned int i;
-
- crypto_morus640_load(&state->s[0], iv);
- state->s[1] = *key;
- for (i = 0; i < MORUS_BLOCK_WORDS; i++)
- state->s[2].words[i] = U32_C(0xFFFFFFFF);
- state->s[3] = crypto_morus640_const[0];
- state->s[4] = crypto_morus640_const[1];
-
- for (i = 0; i < 16; i++)
- crypto_morus640_update(state, &z);
-
- for (i = 0; i < MORUS_BLOCK_WORDS; i++)
- state->s[1].words[i] ^= key->words[i];
-}
-
-static void crypto_morus640_process_ad(struct morus640_state *state,
- struct scatterlist *sg_src,
- unsigned int assoclen)
-{
- struct scatter_walk walk;
- struct morus640_block m;
- union morus640_block_in buf;
- unsigned int pos = 0;
-
- scatterwalk_start(&walk, sg_src);
- while (assoclen != 0) {
- unsigned int size = scatterwalk_clamp(&walk, assoclen);
- unsigned int left = size;
- void *mapped = scatterwalk_map(&walk);
- const u8 *src = (const u8 *)mapped;
-
- if (pos + size >= MORUS640_BLOCK_SIZE) {
- if (pos > 0) {
- unsigned int fill = MORUS640_BLOCK_SIZE - pos;
- memcpy(buf.bytes + pos, src, fill);
-
- crypto_morus640_load_a(&m, buf.bytes);
- crypto_morus640_update(state, &m);
-
- pos = 0;
- left -= fill;
- src += fill;
- }
-
- crypto_morus640_ad(state, src, left);
- src += left & ~(MORUS640_BLOCK_SIZE - 1);
- left &= MORUS640_BLOCK_SIZE - 1;
- }
-
- memcpy(buf.bytes + pos, src, left);
-
- pos += left;
- assoclen -= size;
- scatterwalk_unmap(mapped);
- scatterwalk_advance(&walk, size);
- scatterwalk_done(&walk, 0, assoclen);
- }
-
- if (pos > 0) {
- memset(buf.bytes + pos, 0, MORUS640_BLOCK_SIZE - pos);
-
- crypto_morus640_load_a(&m, buf.bytes);
- crypto_morus640_update(state, &m);
- }
-}
-
-static void crypto_morus640_process_crypt(struct morus640_state *state,
- struct aead_request *req,
- const struct morus640_ops *ops)
-{
- struct skcipher_walk walk;
-
- ops->skcipher_walk_init(&walk, req, false);
-
- while (walk.nbytes) {
- unsigned int nbytes = walk.nbytes;
-
- if (nbytes < walk.total)
- nbytes = round_down(nbytes, walk.stride);
-
- ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
- nbytes);
-
- skcipher_walk_done(&walk, walk.nbytes - nbytes);
- }
-}
-
-static void crypto_morus640_final(struct morus640_state *state,
- struct morus640_block *tag_xor,
- u64 assoclen, u64 cryptlen)
-{
- struct morus640_block tmp;
- unsigned int i;
-
- tmp.words[0] = lower_32_bits(assoclen * 8);
- tmp.words[1] = upper_32_bits(assoclen * 8);
- tmp.words[2] = lower_32_bits(cryptlen * 8);
- tmp.words[3] = upper_32_bits(cryptlen * 8);
-
- for (i = 0; i < MORUS_BLOCK_WORDS; i++)
- state->s[4].words[i] ^= state->s[0].words[i];
-
- for (i = 0; i < 10; i++)
- crypto_morus640_update(state, &tmp);
-
- crypto_morus640_core(state, tag_xor);
-}
-
-static int crypto_morus640_setkey(struct crypto_aead *aead, const u8 *key,
- unsigned int keylen)
-{
- struct morus640_ctx *ctx = crypto_aead_ctx(aead);
-
- if (keylen != MORUS640_BLOCK_SIZE) {
- crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
- return -EINVAL;
- }
-
- crypto_morus640_load(&ctx->key, key);
- return 0;
-}
-
-static int crypto_morus640_setauthsize(struct crypto_aead *tfm,
- unsigned int authsize)
-{
- return (authsize <= MORUS_MAX_AUTH_SIZE) ? 0 : -EINVAL;
-}
-
-static void crypto_morus640_crypt(struct aead_request *req,
- struct morus640_block *tag_xor,
- unsigned int cryptlen,
- const struct morus640_ops *ops)
-{
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- struct morus640_ctx *ctx = crypto_aead_ctx(tfm);
- struct morus640_state state;
-
- crypto_morus640_init(&state, &ctx->key, req->iv);
- crypto_morus640_process_ad(&state, req->src, req->assoclen);
- crypto_morus640_process_crypt(&state, req, ops);
- crypto_morus640_final(&state, tag_xor, req->assoclen, cryptlen);
-}
-
-static int crypto_morus640_encrypt(struct aead_request *req)
-{
- static const struct morus640_ops ops = {
- .skcipher_walk_init = skcipher_walk_aead_encrypt,
- .crypt_chunk = crypto_morus640_encrypt_chunk,
- };
-
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- struct morus640_block tag = {};
- union morus640_block_in tag_out;
- unsigned int authsize = crypto_aead_authsize(tfm);
- unsigned int cryptlen = req->cryptlen;
-
- crypto_morus640_crypt(req, &tag, cryptlen, &ops);
- crypto_morus640_store(tag_out.bytes, &tag);
-
- scatterwalk_map_and_copy(tag_out.bytes, req->dst,
- req->assoclen + cryptlen, authsize, 1);
- return 0;
-}
-
-static int crypto_morus640_decrypt(struct aead_request *req)
-{
- static const struct morus640_ops ops = {
- .skcipher_walk_init = skcipher_walk_aead_decrypt,
- .crypt_chunk = crypto_morus640_decrypt_chunk,
- };
- static const u8 zeros[MORUS640_BLOCK_SIZE] = {};
-
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- union morus640_block_in tag_in;
- struct morus640_block tag;
- unsigned int authsize = crypto_aead_authsize(tfm);
- unsigned int cryptlen = req->cryptlen - authsize;
-
- scatterwalk_map_and_copy(tag_in.bytes, req->src,
- req->assoclen + cryptlen, authsize, 0);
-
- crypto_morus640_load(&tag, tag_in.bytes);
- crypto_morus640_crypt(req, &tag, cryptlen, &ops);
- crypto_morus640_store(tag_in.bytes, &tag);
-
- return crypto_memneq(tag_in.bytes, zeros, authsize) ? -EBADMSG : 0;
-}
-
-static int crypto_morus640_init_tfm(struct crypto_aead *tfm)
-{
- return 0;
-}
-
-static void crypto_morus640_exit_tfm(struct crypto_aead *tfm)
-{
-}
-
-static struct aead_alg crypto_morus640_alg = {
- .setkey = crypto_morus640_setkey,
- .setauthsize = crypto_morus640_setauthsize,
- .encrypt = crypto_morus640_encrypt,
- .decrypt = crypto_morus640_decrypt,
- .init = crypto_morus640_init_tfm,
- .exit = crypto_morus640_exit_tfm,
-
- .ivsize = MORUS_NONCE_SIZE,
- .maxauthsize = MORUS_MAX_AUTH_SIZE,
- .chunksize = MORUS640_BLOCK_SIZE,
-
- .base = {
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct morus640_ctx),
- .cra_alignmask = 0,
-
- .cra_priority = 100,
-
- .cra_name = "morus640",
- .cra_driver_name = "morus640-generic",
-
- .cra_module = THIS_MODULE,
- }
-};
-
-static int __init crypto_morus640_module_init(void)
-{
- return crypto_register_aead(&crypto_morus640_alg);
-}
-
-static void __exit crypto_morus640_module_exit(void)
-{
- crypto_unregister_aead(&crypto_morus640_alg);
-}
-
-subsys_initcall(crypto_morus640_module_init);
-module_exit(crypto_morus640_module_exit);
-
-MODULE_LICENSE("GPL");
-MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
-MODULE_DESCRIPTION("MORUS-640 AEAD algorithm");
-MODULE_ALIAS_CRYPTO("morus640");
-MODULE_ALIAS_CRYPTO("morus640-generic");
diff --git a/crypto/nhpoly1305.c b/crypto/nhpoly1305.c
index 9ab4e07cde4d..8a3006c3b51b 100644
--- a/crypto/nhpoly1305.c
+++ b/crypto/nhpoly1305.c
@@ -33,6 +33,7 @@
#include <asm/unaligned.h>
#include <crypto/algapi.h>
#include <crypto/internal/hash.h>
+#include <crypto/internal/poly1305.h>
#include <crypto/nhpoly1305.h>
#include <linux/crypto.h>
#include <linux/kernel.h>
@@ -78,7 +79,7 @@ static void process_nh_hash_value(struct nhpoly1305_state *state,
BUILD_BUG_ON(NH_HASH_BYTES % POLY1305_BLOCK_SIZE != 0);
poly1305_core_blocks(&state->poly_state, &key->poly_key, state->nh_hash,
- NH_HASH_BYTES / POLY1305_BLOCK_SIZE);
+ NH_HASH_BYTES / POLY1305_BLOCK_SIZE, 1);
}
/*
@@ -209,7 +210,7 @@ int crypto_nhpoly1305_final_helper(struct shash_desc *desc, u8 *dst, nh_t nh_fn)
if (state->nh_remaining)
process_nh_hash_value(state, key);
- poly1305_core_emit(&state->poly_state, dst);
+ poly1305_core_emit(&state->poly_state, NULL, dst);
return 0;
}
EXPORT_SYMBOL(crypto_nhpoly1305_final_helper);
diff --git a/crypto/ofb.c b/crypto/ofb.c
index 133ff4c7f2c6..2ec68e3f2c55 100644
--- a/crypto/ofb.c
+++ b/crypto/ofb.c
@@ -55,10 +55,12 @@ static int crypto_ofb_create(struct crypto_template *tmpl, struct rtattr **tb)
struct crypto_alg *alg;
int err;
- inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
+ inst = skcipher_alloc_instance_simple(tmpl, tb);
if (IS_ERR(inst))
return PTR_ERR(inst);
+ alg = skcipher_ialg_simple(inst);
+
/* OFB mode is a stream cipher. */
inst->alg.base.cra_blocksize = 1;
@@ -75,7 +77,6 @@ static int crypto_ofb_create(struct crypto_template *tmpl, struct rtattr **tb)
if (err)
inst->free(inst);
- crypto_mod_put(alg);
return err;
}
diff --git a/crypto/pcbc.c b/crypto/pcbc.c
index 862cdb8d8b6c..ae921fb74dc9 100644
--- a/crypto/pcbc.c
+++ b/crypto/pcbc.c
@@ -153,10 +153,9 @@ static int crypto_pcbc_decrypt(struct skcipher_request *req)
static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
{
struct skcipher_instance *inst;
- struct crypto_alg *alg;
int err;
- inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
+ inst = skcipher_alloc_instance_simple(tmpl, tb);
if (IS_ERR(inst))
return PTR_ERR(inst);
@@ -166,7 +165,7 @@ static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
err = skcipher_register_instance(tmpl, inst);
if (err)
inst->free(inst);
- crypto_mod_put(alg);
+
return err;
}
diff --git a/crypto/pcrypt.c b/crypto/pcrypt.c
index 0edf5b54fc77..1b632139a8c1 100644
--- a/crypto/pcrypt.c
+++ b/crypto/pcrypt.c
@@ -13,43 +13,18 @@
#include <linux/init.h>
#include <linux/module.h>
#include <linux/slab.h>
-#include <linux/notifier.h>
#include <linux/kobject.h>
#include <linux/cpu.h>
#include <crypto/pcrypt.h>
-struct padata_pcrypt {
- struct padata_instance *pinst;
- struct workqueue_struct *wq;
-
- /*
- * Cpumask for callback CPUs. It should be
- * equal to serial cpumask of corresponding padata instance,
- * so it is updated when padata notifies us about serial
- * cpumask change.
- *
- * cb_cpumask is protected by RCU. This fact prevents us from
- * using cpumask_var_t directly because the actual type of
- * cpumsak_var_t depends on kernel configuration(particularly on
- * CONFIG_CPUMASK_OFFSTACK macro). Depending on the configuration
- * cpumask_var_t may be either a pointer to the struct cpumask
- * or a variable allocated on the stack. Thus we can not safely use
- * cpumask_var_t with RCU operations such as rcu_assign_pointer or
- * rcu_dereference. So cpumask_var_t is wrapped with struct
- * pcrypt_cpumask which makes possible to use it with RCU.
- */
- struct pcrypt_cpumask {
- cpumask_var_t mask;
- } *cb_cpumask;
- struct notifier_block nblock;
-};
-
-static struct padata_pcrypt pencrypt;
-static struct padata_pcrypt pdecrypt;
+static struct padata_instance *pencrypt;
+static struct padata_instance *pdecrypt;
static struct kset *pcrypt_kset;
struct pcrypt_instance_ctx {
struct crypto_aead_spawn spawn;
+ struct padata_shell *psenc;
+ struct padata_shell *psdec;
atomic_t tfm_count;
};
@@ -58,33 +33,10 @@ struct pcrypt_aead_ctx {
unsigned int cb_cpu;
};
-static int pcrypt_do_parallel(struct padata_priv *padata, unsigned int *cb_cpu,
- struct padata_pcrypt *pcrypt)
+static inline struct pcrypt_instance_ctx *pcrypt_tfm_ictx(
+ struct crypto_aead *tfm)
{
- unsigned int cpu_index, cpu, i;
- struct pcrypt_cpumask *cpumask;
-
- cpu = *cb_cpu;
-
- rcu_read_lock_bh();
- cpumask = rcu_dereference_bh(pcrypt->cb_cpumask);
- if (cpumask_test_cpu(cpu, cpumask->mask))
- goto out;
-
- if (!cpumask_weight(cpumask->mask))
- goto out;
-
- cpu_index = cpu % cpumask_weight(cpumask->mask);
-
- cpu = cpumask_first(cpumask->mask);
- for (i = 0; i < cpu_index; i++)
- cpu = cpumask_next(cpu, cpumask->mask);
-
- *cb_cpu = cpu;
-
-out:
- rcu_read_unlock_bh();
- return padata_do_parallel(pcrypt->pinst, padata, cpu);
+ return aead_instance_ctx(aead_alg_instance(tfm));
}
static int pcrypt_aead_setkey(struct crypto_aead *parent,
@@ -118,7 +70,6 @@ static void pcrypt_aead_done(struct crypto_async_request *areq, int err)
struct padata_priv *padata = pcrypt_request_padata(preq);
padata->info = err;
- req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
padata_do_serial(padata);
}
@@ -145,6 +96,9 @@ static int pcrypt_aead_encrypt(struct aead_request *req)
struct crypto_aead *aead = crypto_aead_reqtfm(req);
struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
u32 flags = aead_request_flags(req);
+ struct pcrypt_instance_ctx *ictx;
+
+ ictx = pcrypt_tfm_ictx(aead);
memset(padata, 0, sizeof(struct padata_priv));
@@ -158,7 +112,7 @@ static int pcrypt_aead_encrypt(struct aead_request *req)
req->cryptlen, req->iv);
aead_request_set_ad(creq, req->assoclen);
- err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pencrypt);
+ err = padata_do_parallel(ictx->psenc, padata, &ctx->cb_cpu);
if (!err)
return -EINPROGRESS;
@@ -187,6 +141,9 @@ static int pcrypt_aead_decrypt(struct aead_request *req)
struct crypto_aead *aead = crypto_aead_reqtfm(req);
struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
u32 flags = aead_request_flags(req);
+ struct pcrypt_instance_ctx *ictx;
+
+ ictx = pcrypt_tfm_ictx(aead);
memset(padata, 0, sizeof(struct padata_priv));
@@ -200,7 +157,7 @@ static int pcrypt_aead_decrypt(struct aead_request *req)
req->cryptlen, req->iv);
aead_request_set_ad(creq, req->assoclen);
- err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pdecrypt);
+ err = padata_do_parallel(ictx->psdec, padata, &ctx->cb_cpu);
if (!err)
return -EINPROGRESS;
@@ -247,6 +204,8 @@ static void pcrypt_free(struct aead_instance *inst)
struct pcrypt_instance_ctx *ctx = aead_instance_ctx(inst);
crypto_drop_aead(&ctx->spawn);
+ padata_free_shell(ctx->psdec);
+ padata_free_shell(ctx->psenc);
kfree(inst);
}
@@ -288,12 +247,21 @@ static int pcrypt_create_aead(struct crypto_template *tmpl, struct rtattr **tb,
if (!inst)
return -ENOMEM;
+ err = -ENOMEM;
+
ctx = aead_instance_ctx(inst);
- crypto_set_aead_spawn(&ctx->spawn, aead_crypto_instance(inst));
+ ctx->psenc = padata_alloc_shell(pencrypt);
+ if (!ctx->psenc)
+ goto out_free_inst;
+
+ ctx->psdec = padata_alloc_shell(pdecrypt);
+ if (!ctx->psdec)
+ goto out_free_psenc;
- err = crypto_grab_aead(&ctx->spawn, name, 0, 0);
+ err = crypto_grab_aead(&ctx->spawn, aead_crypto_instance(inst),
+ name, 0, 0);
if (err)
- goto out_free_inst;
+ goto out_free_psdec;
alg = crypto_spawn_aead_alg(&ctx->spawn);
err = pcrypt_init_instance(aead_crypto_instance(inst), &alg->base);
@@ -326,6 +294,10 @@ out:
out_drop_aead:
crypto_drop_aead(&ctx->spawn);
+out_free_psdec:
+ padata_free_shell(ctx->psdec);
+out_free_psenc:
+ padata_free_shell(ctx->psenc);
out_free_inst:
kfree(inst);
goto out;
@@ -347,36 +319,6 @@ static int pcrypt_create(struct crypto_template *tmpl, struct rtattr **tb)
return -EINVAL;
}
-static int pcrypt_cpumask_change_notify(struct notifier_block *self,
- unsigned long val, void *data)
-{
- struct padata_pcrypt *pcrypt;
- struct pcrypt_cpumask *new_mask, *old_mask;
- struct padata_cpumask *cpumask = (struct padata_cpumask *)data;
-
- if (!(val & PADATA_CPU_SERIAL))
- return 0;
-
- pcrypt = container_of(self, struct padata_pcrypt, nblock);
- new_mask = kmalloc(sizeof(*new_mask), GFP_KERNEL);
- if (!new_mask)
- return -ENOMEM;
- if (!alloc_cpumask_var(&new_mask->mask, GFP_KERNEL)) {
- kfree(new_mask);
- return -ENOMEM;
- }
-
- old_mask = pcrypt->cb_cpumask;
-
- cpumask_copy(new_mask->mask, cpumask->cbcpu);
- rcu_assign_pointer(pcrypt->cb_cpumask, new_mask);
- synchronize_rcu();
-
- free_cpumask_var(old_mask->mask);
- kfree(old_mask);
- return 0;
-}
-
static int pcrypt_sysfs_add(struct padata_instance *pinst, const char *name)
{
int ret;
@@ -389,71 +331,25 @@ static int pcrypt_sysfs_add(struct padata_instance *pinst, const char *name)
return ret;
}
-static int pcrypt_init_padata(struct padata_pcrypt *pcrypt,
- const char *name)
+static int pcrypt_init_padata(struct padata_instance **pinst, const char *name)
{
int ret = -ENOMEM;
- struct pcrypt_cpumask *mask;
-
- get_online_cpus();
-
- pcrypt->wq = alloc_workqueue("%s", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE,
- 1, name);
- if (!pcrypt->wq)
- goto err;
-
- pcrypt->pinst = padata_alloc_possible(pcrypt->wq);
- if (!pcrypt->pinst)
- goto err_destroy_workqueue;
-
- mask = kmalloc(sizeof(*mask), GFP_KERNEL);
- if (!mask)
- goto err_free_padata;
- if (!alloc_cpumask_var(&mask->mask, GFP_KERNEL)) {
- kfree(mask);
- goto err_free_padata;
- }
- cpumask_and(mask->mask, cpu_possible_mask, cpu_online_mask);
- rcu_assign_pointer(pcrypt->cb_cpumask, mask);
+ *pinst = padata_alloc_possible(name);
+ if (!*pinst)
+ return ret;
- pcrypt->nblock.notifier_call = pcrypt_cpumask_change_notify;
- ret = padata_register_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
+ ret = pcrypt_sysfs_add(*pinst, name);
if (ret)
- goto err_free_cpumask;
-
- ret = pcrypt_sysfs_add(pcrypt->pinst, name);
- if (ret)
- goto err_unregister_notifier;
-
- put_online_cpus();
-
- return ret;
-
-err_unregister_notifier:
- padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
-err_free_cpumask:
- free_cpumask_var(mask->mask);
- kfree(mask);
-err_free_padata:
- padata_free(pcrypt->pinst);
-err_destroy_workqueue:
- destroy_workqueue(pcrypt->wq);
-err:
- put_online_cpus();
+ padata_free(*pinst);
return ret;
}
-static void pcrypt_fini_padata(struct padata_pcrypt *pcrypt)
+static void pcrypt_fini_padata(struct padata_instance *pinst)
{
- free_cpumask_var(pcrypt->cb_cpumask->mask);
- kfree(pcrypt->cb_cpumask);
-
- padata_stop(pcrypt->pinst);
- padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
- destroy_workqueue(pcrypt->wq);
- padata_free(pcrypt->pinst);
+ padata_stop(pinst);
+ padata_free(pinst);
}
static struct crypto_template pcrypt_tmpl = {
@@ -478,13 +374,13 @@ static int __init pcrypt_init(void)
if (err)
goto err_deinit_pencrypt;
- padata_start(pencrypt.pinst);
- padata_start(pdecrypt.pinst);
+ padata_start(pencrypt);
+ padata_start(pdecrypt);
return crypto_register_template(&pcrypt_tmpl);
err_deinit_pencrypt:
- pcrypt_fini_padata(&pencrypt);
+ pcrypt_fini_padata(pencrypt);
err_unreg_kset:
kset_unregister(pcrypt_kset);
err:
@@ -493,11 +389,12 @@ err:
static void __exit pcrypt_exit(void)
{
- pcrypt_fini_padata(&pencrypt);
- pcrypt_fini_padata(&pdecrypt);
+ crypto_unregister_template(&pcrypt_tmpl);
+
+ pcrypt_fini_padata(pencrypt);
+ pcrypt_fini_padata(pdecrypt);
kset_unregister(pcrypt_kset);
- crypto_unregister_template(&pcrypt_tmpl);
}
subsys_initcall(pcrypt_init);
diff --git a/crypto/poly1305_generic.c b/crypto/poly1305_generic.c
index adc40298c749..94af47eb6fa6 100644
--- a/crypto/poly1305_generic.c
+++ b/crypto/poly1305_generic.c
@@ -13,65 +13,33 @@
#include <crypto/algapi.h>
#include <crypto/internal/hash.h>
-#include <crypto/poly1305.h>
+#include <crypto/internal/poly1305.h>
#include <linux/crypto.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <asm/unaligned.h>
-static inline u64 mlt(u64 a, u64 b)
-{
- return a * b;
-}
-
-static inline u32 sr(u64 v, u_char n)
-{
- return v >> n;
-}
-
-static inline u32 and(u32 v, u32 mask)
-{
- return v & mask;
-}
-
-int crypto_poly1305_init(struct shash_desc *desc)
+static int crypto_poly1305_init(struct shash_desc *desc)
{
struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
poly1305_core_init(&dctx->h);
dctx->buflen = 0;
- dctx->rset = false;
+ dctx->rset = 0;
dctx->sset = false;
return 0;
}
-EXPORT_SYMBOL_GPL(crypto_poly1305_init);
-void poly1305_core_setkey(struct poly1305_key *key, const u8 *raw_key)
-{
- /* r &= 0xffffffc0ffffffc0ffffffc0fffffff */
- key->r[0] = (get_unaligned_le32(raw_key + 0) >> 0) & 0x3ffffff;
- key->r[1] = (get_unaligned_le32(raw_key + 3) >> 2) & 0x3ffff03;
- key->r[2] = (get_unaligned_le32(raw_key + 6) >> 4) & 0x3ffc0ff;
- key->r[3] = (get_unaligned_le32(raw_key + 9) >> 6) & 0x3f03fff;
- key->r[4] = (get_unaligned_le32(raw_key + 12) >> 8) & 0x00fffff;
-}
-EXPORT_SYMBOL_GPL(poly1305_core_setkey);
-
-/*
- * Poly1305 requires a unique key for each tag, which implies that we can't set
- * it on the tfm that gets accessed by multiple users simultaneously. Instead we
- * expect the key as the first 32 bytes in the update() call.
- */
-unsigned int crypto_poly1305_setdesckey(struct poly1305_desc_ctx *dctx,
- const u8 *src, unsigned int srclen)
+static unsigned int crypto_poly1305_setdesckey(struct poly1305_desc_ctx *dctx,
+ const u8 *src, unsigned int srclen)
{
if (!dctx->sset) {
if (!dctx->rset && srclen >= POLY1305_BLOCK_SIZE) {
- poly1305_core_setkey(&dctx->r, src);
+ poly1305_core_setkey(&dctx->core_r, src);
src += POLY1305_BLOCK_SIZE;
srclen -= POLY1305_BLOCK_SIZE;
- dctx->rset = true;
+ dctx->rset = 2;
}
if (srclen >= POLY1305_BLOCK_SIZE) {
dctx->s[0] = get_unaligned_le32(src + 0);
@@ -85,86 +53,9 @@ unsigned int crypto_poly1305_setdesckey(struct poly1305_desc_ctx *dctx,
}
return srclen;
}
-EXPORT_SYMBOL_GPL(crypto_poly1305_setdesckey);
-
-static void poly1305_blocks_internal(struct poly1305_state *state,
- const struct poly1305_key *key,
- const void *src, unsigned int nblocks,
- u32 hibit)
-{
- u32 r0, r1, r2, r3, r4;
- u32 s1, s2, s3, s4;
- u32 h0, h1, h2, h3, h4;
- u64 d0, d1, d2, d3, d4;
-
- if (!nblocks)
- return;
-
- r0 = key->r[0];
- r1 = key->r[1];
- r2 = key->r[2];
- r3 = key->r[3];
- r4 = key->r[4];
-
- s1 = r1 * 5;
- s2 = r2 * 5;
- s3 = r3 * 5;
- s4 = r4 * 5;
-
- h0 = state->h[0];
- h1 = state->h[1];
- h2 = state->h[2];
- h3 = state->h[3];
- h4 = state->h[4];
-
- do {
- /* h += m[i] */
- h0 += (get_unaligned_le32(src + 0) >> 0) & 0x3ffffff;
- h1 += (get_unaligned_le32(src + 3) >> 2) & 0x3ffffff;
- h2 += (get_unaligned_le32(src + 6) >> 4) & 0x3ffffff;
- h3 += (get_unaligned_le32(src + 9) >> 6) & 0x3ffffff;
- h4 += (get_unaligned_le32(src + 12) >> 8) | hibit;
-
- /* h *= r */
- d0 = mlt(h0, r0) + mlt(h1, s4) + mlt(h2, s3) +
- mlt(h3, s2) + mlt(h4, s1);
- d1 = mlt(h0, r1) + mlt(h1, r0) + mlt(h2, s4) +
- mlt(h3, s3) + mlt(h4, s2);
- d2 = mlt(h0, r2) + mlt(h1, r1) + mlt(h2, r0) +
- mlt(h3, s4) + mlt(h4, s3);
- d3 = mlt(h0, r3) + mlt(h1, r2) + mlt(h2, r1) +
- mlt(h3, r0) + mlt(h4, s4);
- d4 = mlt(h0, r4) + mlt(h1, r3) + mlt(h2, r2) +
- mlt(h3, r1) + mlt(h4, r0);
-
- /* (partial) h %= p */
- d1 += sr(d0, 26); h0 = and(d0, 0x3ffffff);
- d2 += sr(d1, 26); h1 = and(d1, 0x3ffffff);
- d3 += sr(d2, 26); h2 = and(d2, 0x3ffffff);
- d4 += sr(d3, 26); h3 = and(d3, 0x3ffffff);
- h0 += sr(d4, 26) * 5; h4 = and(d4, 0x3ffffff);
- h1 += h0 >> 26; h0 = h0 & 0x3ffffff;
-
- src += POLY1305_BLOCK_SIZE;
- } while (--nblocks);
-
- state->h[0] = h0;
- state->h[1] = h1;
- state->h[2] = h2;
- state->h[3] = h3;
- state->h[4] = h4;
-}
-void poly1305_core_blocks(struct poly1305_state *state,
- const struct poly1305_key *key,
- const void *src, unsigned int nblocks)
-{
- poly1305_blocks_internal(state, key, src, nblocks, 1 << 24);
-}
-EXPORT_SYMBOL_GPL(poly1305_core_blocks);
-
-static void poly1305_blocks(struct poly1305_desc_ctx *dctx,
- const u8 *src, unsigned int srclen, u32 hibit)
+static void poly1305_blocks(struct poly1305_desc_ctx *dctx, const u8 *src,
+ unsigned int srclen)
{
unsigned int datalen;
@@ -174,12 +65,12 @@ static void poly1305_blocks(struct poly1305_desc_ctx *dctx,
srclen = datalen;
}
- poly1305_blocks_internal(&dctx->h, &dctx->r,
- src, srclen / POLY1305_BLOCK_SIZE, hibit);
+ poly1305_core_blocks(&dctx->h, &dctx->core_r, src,
+ srclen / POLY1305_BLOCK_SIZE, 1);
}
-int crypto_poly1305_update(struct shash_desc *desc,
- const u8 *src, unsigned int srclen)
+static int crypto_poly1305_update(struct shash_desc *desc,
+ const u8 *src, unsigned int srclen)
{
struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
unsigned int bytes;
@@ -193,13 +84,13 @@ int crypto_poly1305_update(struct shash_desc *desc,
if (dctx->buflen == POLY1305_BLOCK_SIZE) {
poly1305_blocks(dctx, dctx->buf,
- POLY1305_BLOCK_SIZE, 1 << 24);
+ POLY1305_BLOCK_SIZE);
dctx->buflen = 0;
}
}
if (likely(srclen >= POLY1305_BLOCK_SIZE)) {
- poly1305_blocks(dctx, src, srclen, 1 << 24);
+ poly1305_blocks(dctx, src, srclen);
src += srclen - (srclen % POLY1305_BLOCK_SIZE);
srclen %= POLY1305_BLOCK_SIZE;
}
@@ -211,87 +102,17 @@ int crypto_poly1305_update(struct shash_desc *desc,
return 0;
}
-EXPORT_SYMBOL_GPL(crypto_poly1305_update);
-void poly1305_core_emit(const struct poly1305_state *state, void *dst)
-{
- u32 h0, h1, h2, h3, h4;
- u32 g0, g1, g2, g3, g4;
- u32 mask;
-
- /* fully carry h */
- h0 = state->h[0];
- h1 = state->h[1];
- h2 = state->h[2];
- h3 = state->h[3];
- h4 = state->h[4];
-
- h2 += (h1 >> 26); h1 = h1 & 0x3ffffff;
- h3 += (h2 >> 26); h2 = h2 & 0x3ffffff;
- h4 += (h3 >> 26); h3 = h3 & 0x3ffffff;
- h0 += (h4 >> 26) * 5; h4 = h4 & 0x3ffffff;
- h1 += (h0 >> 26); h0 = h0 & 0x3ffffff;
-
- /* compute h + -p */
- g0 = h0 + 5;
- g1 = h1 + (g0 >> 26); g0 &= 0x3ffffff;
- g2 = h2 + (g1 >> 26); g1 &= 0x3ffffff;
- g3 = h3 + (g2 >> 26); g2 &= 0x3ffffff;
- g4 = h4 + (g3 >> 26) - (1 << 26); g3 &= 0x3ffffff;
-
- /* select h if h < p, or h + -p if h >= p */
- mask = (g4 >> ((sizeof(u32) * 8) - 1)) - 1;
- g0 &= mask;
- g1 &= mask;
- g2 &= mask;
- g3 &= mask;
- g4 &= mask;
- mask = ~mask;
- h0 = (h0 & mask) | g0;
- h1 = (h1 & mask) | g1;
- h2 = (h2 & mask) | g2;
- h3 = (h3 & mask) | g3;
- h4 = (h4 & mask) | g4;
-
- /* h = h % (2^128) */
- put_unaligned_le32((h0 >> 0) | (h1 << 26), dst + 0);
- put_unaligned_le32((h1 >> 6) | (h2 << 20), dst + 4);
- put_unaligned_le32((h2 >> 12) | (h3 << 14), dst + 8);
- put_unaligned_le32((h3 >> 18) | (h4 << 8), dst + 12);
-}
-EXPORT_SYMBOL_GPL(poly1305_core_emit);
-
-int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
+static int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
{
struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
- __le32 digest[4];
- u64 f = 0;
if (unlikely(!dctx->sset))
return -ENOKEY;
- if (unlikely(dctx->buflen)) {
- dctx->buf[dctx->buflen++] = 1;
- memset(dctx->buf + dctx->buflen, 0,
- POLY1305_BLOCK_SIZE - dctx->buflen);
- poly1305_blocks(dctx, dctx->buf, POLY1305_BLOCK_SIZE, 0);
- }
-
- poly1305_core_emit(&dctx->h, digest);
-
- /* mac = (h + s) % (2^128) */
- f = (f >> 32) + le32_to_cpu(digest[0]) + dctx->s[0];
- put_unaligned_le32(f, dst + 0);
- f = (f >> 32) + le32_to_cpu(digest[1]) + dctx->s[1];
- put_unaligned_le32(f, dst + 4);
- f = (f >> 32) + le32_to_cpu(digest[2]) + dctx->s[2];
- put_unaligned_le32(f, dst + 8);
- f = (f >> 32) + le32_to_cpu(digest[3]) + dctx->s[3];
- put_unaligned_le32(f, dst + 12);
-
+ poly1305_final_generic(dctx, dst);
return 0;
}
-EXPORT_SYMBOL_GPL(crypto_poly1305_final);
static struct shash_alg poly1305_alg = {
.digestsize = POLY1305_DIGEST_SIZE,
diff --git a/crypto/rsa-pkcs1pad.c b/crypto/rsa-pkcs1pad.c
index 0aa489711ec4..176b63afec8d 100644
--- a/crypto/rsa-pkcs1pad.c
+++ b/crypto/rsa-pkcs1pad.c
@@ -598,6 +598,7 @@ static int pkcs1pad_create(struct crypto_template *tmpl, struct rtattr **tb)
{
const struct rsa_asn1_template *digest_info;
struct crypto_attr_type *algt;
+ u32 mask;
struct akcipher_instance *inst;
struct pkcs1pad_inst_ctx *ctx;
struct crypto_akcipher_spawn *spawn;
@@ -613,6 +614,8 @@ static int pkcs1pad_create(struct crypto_template *tmpl, struct rtattr **tb)
if ((algt->type ^ CRYPTO_ALG_TYPE_AKCIPHER) & algt->mask)
return -EINVAL;
+ mask = crypto_requires_sync(algt->type, algt->mask);
+
rsa_alg_name = crypto_attr_alg_name(tb[1]);
if (IS_ERR(rsa_alg_name))
return PTR_ERR(rsa_alg_name);
@@ -636,9 +639,8 @@ static int pkcs1pad_create(struct crypto_template *tmpl, struct rtattr **tb)
spawn = &ctx->spawn;
ctx->digest_info = digest_info;
- crypto_set_spawn(&spawn->base, akcipher_crypto_instance(inst));
- err = crypto_grab_akcipher(spawn, rsa_alg_name, 0,
- crypto_requires_sync(algt->type, algt->mask));
+ err = crypto_grab_akcipher(spawn, akcipher_crypto_instance(inst),
+ rsa_alg_name, 0, mask);
if (err)
goto out_free_inst;
diff --git a/crypto/scompress.c b/crypto/scompress.c
index 4d50750d01c6..738f4f8f0f41 100644
--- a/crypto/scompress.c
+++ b/crypto/scompress.c
@@ -266,9 +266,9 @@ int crypto_register_scomp(struct scomp_alg *alg)
}
EXPORT_SYMBOL_GPL(crypto_register_scomp);
-int crypto_unregister_scomp(struct scomp_alg *alg)
+void crypto_unregister_scomp(struct scomp_alg *alg)
{
- return crypto_unregister_alg(&alg->base);
+ crypto_unregister_alg(&alg->base);
}
EXPORT_SYMBOL_GPL(crypto_unregister_scomp);
diff --git a/crypto/seqiv.c b/crypto/seqiv.c
index 96d222c32acc..f124b9b54e15 100644
--- a/crypto/seqiv.c
+++ b/crypto/seqiv.c
@@ -18,8 +18,6 @@
#include <linux/slab.h>
#include <linux/string.h>
-static void seqiv_free(struct crypto_instance *inst);
-
static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err)
{
struct aead_request *subreq = aead_request_ctx(req);
@@ -159,15 +157,11 @@ static int seqiv_aead_create(struct crypto_template *tmpl, struct rtattr **tb)
inst->alg.base.cra_ctxsize += inst->alg.ivsize;
err = aead_register_instance(tmpl, inst);
- if (err)
- goto free_inst;
-
-out:
- return err;
-
+ if (err) {
free_inst:
- aead_geniv_free(inst);
- goto out;
+ inst->free(inst);
+ }
+ return err;
}
static int seqiv_create(struct crypto_template *tmpl, struct rtattr **tb)
@@ -184,15 +178,9 @@ static int seqiv_create(struct crypto_template *tmpl, struct rtattr **tb)
return seqiv_aead_create(tmpl, tb);
}
-static void seqiv_free(struct crypto_instance *inst)
-{
- aead_geniv_free(aead_instance(inst));
-}
-
static struct crypto_template seqiv_tmpl = {
.name = "seqiv",
.create = seqiv_create,
- .free = seqiv_free,
.module = THIS_MODULE,
};
diff --git a/crypto/serpent_generic.c b/crypto/serpent_generic.c
index 56fa665a4f01..492c1d0bfe06 100644
--- a/crypto/serpent_generic.c
+++ b/crypto/serpent_generic.c
@@ -449,8 +449,9 @@ int serpent_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
}
EXPORT_SYMBOL_GPL(serpent_setkey);
-void __serpent_encrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src)
+void __serpent_encrypt(const void *c, u8 *dst, const u8 *src)
{
+ const struct serpent_ctx *ctx = c;
const u32 *k = ctx->expkey;
const __le32 *s = (const __le32 *)src;
__le32 *d = (__le32 *)dst;
@@ -514,8 +515,9 @@ static void serpent_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
__serpent_encrypt(ctx, dst, src);
}
-void __serpent_decrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src)
+void __serpent_decrypt(const void *c, u8 *dst, const u8 *src)
{
+ const struct serpent_ctx *ctx = c;
const u32 *k = ctx->expkey;
const __le32 *s = (const __le32 *)src;
__le32 *d = (__le32 *)dst;
diff --git a/crypto/sha256_generic.c b/crypto/sha256_generic.c
index b7502a96a0d4..f2d7095d4f2d 100644
--- a/crypto/sha256_generic.c
+++ b/crypto/sha256_generic.c
@@ -1,11 +1,6 @@
// SPDX-License-Identifier: GPL-2.0-or-later
/*
- * Cryptographic API.
- *
- * SHA-256, as specified in
- * http://csrc.nist.gov/groups/STM/cavp/documents/shs/sha256-384-512.pdf
- *
- * SHA-256 code by Jean-Luc Cooke <jlcooke@certainkey.com>.
+ * Crypto API wrapper for the generic SHA256 code from lib/crypto/sha256.c
*
* Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
* Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
@@ -38,229 +33,44 @@ const u8 sha256_zero_message_hash[SHA256_DIGEST_SIZE] = {
};
EXPORT_SYMBOL_GPL(sha256_zero_message_hash);
-static inline u32 Ch(u32 x, u32 y, u32 z)
-{
- return z ^ (x & (y ^ z));
-}
-
-static inline u32 Maj(u32 x, u32 y, u32 z)
+static int crypto_sha256_init(struct shash_desc *desc)
{
- return (x & y) | (z & (x | y));
+ return sha256_init(shash_desc_ctx(desc));
}
-#define e0(x) (ror32(x, 2) ^ ror32(x,13) ^ ror32(x,22))
-#define e1(x) (ror32(x, 6) ^ ror32(x,11) ^ ror32(x,25))
-#define s0(x) (ror32(x, 7) ^ ror32(x,18) ^ (x >> 3))
-#define s1(x) (ror32(x,17) ^ ror32(x,19) ^ (x >> 10))
-
-static inline void LOAD_OP(int I, u32 *W, const u8 *input)
+static int crypto_sha224_init(struct shash_desc *desc)
{
- W[I] = get_unaligned_be32((__u32 *)input + I);
-}
-
-static inline void BLEND_OP(int I, u32 *W)
-{
- W[I] = s1(W[I-2]) + W[I-7] + s0(W[I-15]) + W[I-16];
-}
-
-static void sha256_transform(u32 *state, const u8 *input)
-{
- u32 a, b, c, d, e, f, g, h, t1, t2;
- u32 W[64];
- int i;
-
- /* load the input */
- for (i = 0; i < 16; i++)
- LOAD_OP(i, W, input);
-
- /* now blend */
- for (i = 16; i < 64; i++)
- BLEND_OP(i, W);
-
- /* load the state into our registers */
- a=state[0]; b=state[1]; c=state[2]; d=state[3];
- e=state[4]; f=state[5]; g=state[6]; h=state[7];
-
- /* now iterate */
- t1 = h + e1(e) + Ch(e,f,g) + 0x428a2f98 + W[ 0];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0x71374491 + W[ 1];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0xb5c0fbcf + W[ 2];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0xe9b5dba5 + W[ 3];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0x3956c25b + W[ 4];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0x59f111f1 + W[ 5];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0x923f82a4 + W[ 6];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0xab1c5ed5 + W[ 7];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0xd807aa98 + W[ 8];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0x12835b01 + W[ 9];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0x243185be + W[10];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0x550c7dc3 + W[11];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0x72be5d74 + W[12];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0x80deb1fe + W[13];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0x9bdc06a7 + W[14];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0xc19bf174 + W[15];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0xe49b69c1 + W[16];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0xefbe4786 + W[17];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0x0fc19dc6 + W[18];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0x240ca1cc + W[19];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0x2de92c6f + W[20];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0x4a7484aa + W[21];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0x5cb0a9dc + W[22];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0x76f988da + W[23];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0x983e5152 + W[24];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0xa831c66d + W[25];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0xb00327c8 + W[26];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0xbf597fc7 + W[27];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0xc6e00bf3 + W[28];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0xd5a79147 + W[29];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0x06ca6351 + W[30];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0x14292967 + W[31];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0x27b70a85 + W[32];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0x2e1b2138 + W[33];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0x4d2c6dfc + W[34];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0x53380d13 + W[35];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0x650a7354 + W[36];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0x766a0abb + W[37];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0x81c2c92e + W[38];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0x92722c85 + W[39];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0xa2bfe8a1 + W[40];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0xa81a664b + W[41];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0xc24b8b70 + W[42];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0xc76c51a3 + W[43];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0xd192e819 + W[44];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0xd6990624 + W[45];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0xf40e3585 + W[46];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0x106aa070 + W[47];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0x19a4c116 + W[48];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0x1e376c08 + W[49];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0x2748774c + W[50];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0x34b0bcb5 + W[51];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0x391c0cb3 + W[52];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0x4ed8aa4a + W[53];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0x5b9cca4f + W[54];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0x682e6ff3 + W[55];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0x748f82ee + W[56];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0x78a5636f + W[57];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0x84c87814 + W[58];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0x8cc70208 + W[59];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0x90befffa + W[60];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0xa4506ceb + W[61];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0xbef9a3f7 + W[62];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0xc67178f2 + W[63];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- state[0] += a; state[1] += b; state[2] += c; state[3] += d;
- state[4] += e; state[5] += f; state[6] += g; state[7] += h;
-
- /* clear any sensitive info... */
- a = b = c = d = e = f = g = h = t1 = t2 = 0;
- memzero_explicit(W, 64 * sizeof(u32));
-}
-
-static void sha256_generic_block_fn(struct sha256_state *sst, u8 const *src,
- int blocks)
-{
- while (blocks--) {
- sha256_transform(sst->state, src);
- src += SHA256_BLOCK_SIZE;
- }
+ return sha224_init(shash_desc_ctx(desc));
}
int crypto_sha256_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
- return sha256_base_do_update(desc, data, len, sha256_generic_block_fn);
+ return sha256_update(shash_desc_ctx(desc), data, len);
}
EXPORT_SYMBOL(crypto_sha256_update);
-static int sha256_final(struct shash_desc *desc, u8 *out)
+static int crypto_sha256_final(struct shash_desc *desc, u8 *out)
{
- sha256_base_do_finalize(desc, sha256_generic_block_fn);
- return sha256_base_finish(desc, out);
+ if (crypto_shash_digestsize(desc->tfm) == SHA224_DIGEST_SIZE)
+ return sha224_final(shash_desc_ctx(desc), out);
+ else
+ return sha256_final(shash_desc_ctx(desc), out);
}
int crypto_sha256_finup(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *hash)
{
- sha256_base_do_update(desc, data, len, sha256_generic_block_fn);
- return sha256_final(desc, hash);
+ sha256_update(shash_desc_ctx(desc), data, len);
+ return crypto_sha256_final(desc, hash);
}
EXPORT_SYMBOL(crypto_sha256_finup);
static struct shash_alg sha256_algs[2] = { {
.digestsize = SHA256_DIGEST_SIZE,
- .init = sha256_base_init,
+ .init = crypto_sha256_init,
.update = crypto_sha256_update,
- .final = sha256_final,
+ .final = crypto_sha256_final,
.finup = crypto_sha256_finup,
.descsize = sizeof(struct sha256_state),
.base = {
@@ -272,9 +82,9 @@ static struct shash_alg sha256_algs[2] = { {
}
}, {
.digestsize = SHA224_DIGEST_SIZE,
- .init = sha224_base_init,
+ .init = crypto_sha224_init,
.update = crypto_sha256_update,
- .final = sha256_final,
+ .final = crypto_sha256_final,
.finup = crypto_sha256_finup,
.descsize = sizeof(struct sha256_state),
.base = {
diff --git a/crypto/shash.c b/crypto/shash.c
index e83c5124f6eb..c075b26c2a1d 100644
--- a/crypto/shash.c
+++ b/crypto/shash.c
@@ -50,8 +50,7 @@ static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
{
- if (crypto_shash_alg_has_setkey(alg) &&
- !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
+ if (crypto_shash_alg_needs_key(alg))
crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
}
@@ -386,18 +385,51 @@ int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
return 0;
}
+static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
+{
+ struct crypto_shash *hash = __crypto_shash_cast(tfm);
+ struct shash_alg *alg = crypto_shash_alg(hash);
+
+ alg->exit_tfm(hash);
+}
+
static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
{
struct crypto_shash *hash = __crypto_shash_cast(tfm);
struct shash_alg *alg = crypto_shash_alg(hash);
+ int err;
hash->descsize = alg->descsize;
shash_set_needkey(hash, alg);
+ if (alg->exit_tfm)
+ tfm->exit = crypto_shash_exit_tfm;
+
+ if (!alg->init_tfm)
+ return 0;
+
+ err = alg->init_tfm(hash);
+ if (err)
+ return err;
+
+ /* ->init_tfm() may have increased the descsize. */
+ if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
+ if (alg->exit_tfm)
+ alg->exit_tfm(hash);
+ return -EINVAL;
+ }
+
return 0;
}
+static void crypto_shash_free_instance(struct crypto_instance *inst)
+{
+ struct shash_instance *shash = shash_instance(inst);
+
+ shash->free(shash);
+}
+
#ifdef CONFIG_NET
static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
{
@@ -434,6 +466,7 @@ static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
static const struct crypto_type crypto_shash_type = {
.extsize = crypto_alg_extsize,
.init_tfm = crypto_shash_init_tfm,
+ .free = crypto_shash_free_instance,
#ifdef CONFIG_PROC_FS
.show = crypto_shash_show,
#endif
@@ -444,6 +477,15 @@ static const struct crypto_type crypto_shash_type = {
.tfmsize = offsetof(struct crypto_shash, base),
};
+int crypto_grab_shash(struct crypto_shash_spawn *spawn,
+ struct crypto_instance *inst,
+ const char *name, u32 type, u32 mask)
+{
+ spawn->base.frontend = &crypto_shash_type;
+ return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
+}
+EXPORT_SYMBOL_GPL(crypto_grab_shash);
+
struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
u32 mask)
{
@@ -495,9 +537,9 @@ int crypto_register_shash(struct shash_alg *alg)
}
EXPORT_SYMBOL_GPL(crypto_register_shash);
-int crypto_unregister_shash(struct shash_alg *alg)
+void crypto_unregister_shash(struct shash_alg *alg)
{
- return crypto_unregister_alg(&alg->base);
+ crypto_unregister_alg(&alg->base);
}
EXPORT_SYMBOL_GPL(crypto_unregister_shash);
@@ -521,19 +563,12 @@ err:
}
EXPORT_SYMBOL_GPL(crypto_register_shashes);
-int crypto_unregister_shashes(struct shash_alg *algs, int count)
+void crypto_unregister_shashes(struct shash_alg *algs, int count)
{
- int i, ret;
+ int i;
- for (i = count - 1; i >= 0; --i) {
- ret = crypto_unregister_shash(&algs[i]);
- if (ret)
- pr_err("Failed to unregister %s %s: %d\n",
- algs[i].base.cra_driver_name,
- algs[i].base.cra_name, ret);
- }
-
- return 0;
+ for (i = count - 1; i >= 0; --i)
+ crypto_unregister_shash(&algs[i]);
}
EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
@@ -542,6 +577,9 @@ int shash_register_instance(struct crypto_template *tmpl,
{
int err;
+ if (WARN_ON(!inst->free))
+ return -EINVAL;
+
err = shash_prepare_alg(&inst->alg);
if (err)
return err;
@@ -550,31 +588,12 @@ int shash_register_instance(struct crypto_template *tmpl,
}
EXPORT_SYMBOL_GPL(shash_register_instance);
-void shash_free_instance(struct crypto_instance *inst)
-{
- crypto_drop_spawn(crypto_instance_ctx(inst));
- kfree(shash_instance(inst));
-}
-EXPORT_SYMBOL_GPL(shash_free_instance);
-
-int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
- struct shash_alg *alg,
- struct crypto_instance *inst)
+void shash_free_singlespawn_instance(struct shash_instance *inst)
{
- return crypto_init_spawn2(&spawn->base, &alg->base, inst,
- &crypto_shash_type);
-}
-EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
-
-struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
-{
- struct crypto_alg *alg;
-
- alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
- return IS_ERR(alg) ? ERR_CAST(alg) :
- container_of(alg, struct shash_alg, base);
+ crypto_drop_spawn(shash_instance_ctx(inst));
+ kfree(inst);
}
-EXPORT_SYMBOL_GPL(shash_attr_alg);
+EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Synchronous cryptographic hash type");
diff --git a/crypto/simd.c b/crypto/simd.c
index 48876266cf2d..56885af49c24 100644
--- a/crypto/simd.c
+++ b/crypto/simd.c
@@ -52,15 +52,11 @@ static int simd_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
{
struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
struct crypto_skcipher *child = &ctx->cryptd_tfm->base;
- int err;
crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(tfm) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_skcipher_setkey(child, key, key_len);
- crypto_skcipher_set_flags(tfm, crypto_skcipher_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
- return err;
+ return crypto_skcipher_setkey(child, key, key_len);
}
static int simd_skcipher_encrypt(struct skcipher_request *req)
@@ -295,15 +291,11 @@ static int simd_aead_setkey(struct crypto_aead *tfm, const u8 *key,
{
struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
struct crypto_aead *child = &ctx->cryptd_tfm->base;
- int err;
crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_aead_set_flags(child, crypto_aead_get_flags(tfm) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_aead_setkey(child, key, key_len);
- crypto_aead_set_flags(tfm, crypto_aead_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
- return err;
+ return crypto_aead_setkey(child, key, key_len);
}
static int simd_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
diff --git a/crypto/skcipher.c b/crypto/skcipher.c
index 5d836fc3df3e..7221def7b9a7 100644
--- a/crypto/skcipher.c
+++ b/crypto/skcipher.c
@@ -90,7 +90,7 @@ static inline u8 *skcipher_get_spot(u8 *start, unsigned int len)
return max(start, end_page);
}
-static void skcipher_done_slow(struct skcipher_walk *walk, unsigned int bsize)
+static int skcipher_done_slow(struct skcipher_walk *walk, unsigned int bsize)
{
u8 *addr;
@@ -98,19 +98,21 @@ static void skcipher_done_slow(struct skcipher_walk *walk, unsigned int bsize)
addr = skcipher_get_spot(addr, bsize);
scatterwalk_copychunks(addr, &walk->out, bsize,
(walk->flags & SKCIPHER_WALK_PHYS) ? 2 : 1);
+ return 0;
}
int skcipher_walk_done(struct skcipher_walk *walk, int err)
{
- unsigned int n; /* bytes processed */
- bool more;
+ unsigned int n = walk->nbytes;
+ unsigned int nbytes = 0;
- if (unlikely(err < 0))
+ if (!n)
goto finish;
- n = walk->nbytes - err;
- walk->total -= n;
- more = (walk->total != 0);
+ if (likely(err >= 0)) {
+ n -= err;
+ nbytes = walk->total - n;
+ }
if (likely(!(walk->flags & (SKCIPHER_WALK_PHYS |
SKCIPHER_WALK_SLOW |
@@ -126,7 +128,7 @@ unmap_src:
memcpy(walk->dst.virt.addr, walk->page, n);
skcipher_unmap_dst(walk);
} else if (unlikely(walk->flags & SKCIPHER_WALK_SLOW)) {
- if (err) {
+ if (err > 0) {
/*
* Didn't process all bytes. Either the algorithm is
* broken, or this was the last step and it turned out
@@ -134,27 +136,29 @@ unmap_src:
* the algorithm requires it.
*/
err = -EINVAL;
- goto finish;
- }
- skcipher_done_slow(walk, n);
- goto already_advanced;
+ nbytes = 0;
+ } else
+ n = skcipher_done_slow(walk, n);
}
+ if (err > 0)
+ err = 0;
+
+ walk->total = nbytes;
+ walk->nbytes = 0;
+
scatterwalk_advance(&walk->in, n);
scatterwalk_advance(&walk->out, n);
-already_advanced:
- scatterwalk_done(&walk->in, 0, more);
- scatterwalk_done(&walk->out, 1, more);
+ scatterwalk_done(&walk->in, 0, nbytes);
+ scatterwalk_done(&walk->out, 1, nbytes);
- if (more) {
+ if (nbytes) {
crypto_yield(walk->flags & SKCIPHER_WALK_SLEEP ?
CRYPTO_TFM_REQ_MAY_SLEEP : 0);
return skcipher_walk_next(walk);
}
- err = 0;
-finish:
- walk->nbytes = 0;
+finish:
/* Short-circuit for the common/fast path. */
if (!((unsigned long)walk->buffer | (unsigned long)walk->page))
goto out;
@@ -545,15 +549,6 @@ static int skcipher_walk_aead_common(struct skcipher_walk *walk,
return err;
}
-int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req,
- bool atomic)
-{
- walk->total = req->cryptlen;
-
- return skcipher_walk_aead_common(walk, req, atomic);
-}
-EXPORT_SYMBOL_GPL(skcipher_walk_aead);
-
int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
struct aead_request *req, bool atomic)
{
@@ -574,222 +569,12 @@ int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
}
EXPORT_SYMBOL_GPL(skcipher_walk_aead_decrypt);
-static unsigned int crypto_skcipher_extsize(struct crypto_alg *alg)
-{
- if (alg->cra_type == &crypto_blkcipher_type)
- return sizeof(struct crypto_blkcipher *);
-
- if (alg->cra_type == &crypto_ablkcipher_type)
- return sizeof(struct crypto_ablkcipher *);
-
- return crypto_alg_extsize(alg);
-}
-
static void skcipher_set_needkey(struct crypto_skcipher *tfm)
{
- if (tfm->keysize)
+ if (crypto_skcipher_max_keysize(tfm) != 0)
crypto_skcipher_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
}
-static int skcipher_setkey_blkcipher(struct crypto_skcipher *tfm,
- const u8 *key, unsigned int keylen)
-{
- struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm);
- struct crypto_blkcipher *blkcipher = *ctx;
- int err;
-
- crypto_blkcipher_clear_flags(blkcipher, ~0);
- crypto_blkcipher_set_flags(blkcipher, crypto_skcipher_get_flags(tfm) &
- CRYPTO_TFM_REQ_MASK);
- err = crypto_blkcipher_setkey(blkcipher, key, keylen);
- crypto_skcipher_set_flags(tfm, crypto_blkcipher_get_flags(blkcipher) &
- CRYPTO_TFM_RES_MASK);
- if (unlikely(err)) {
- skcipher_set_needkey(tfm);
- return err;
- }
-
- crypto_skcipher_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
- return 0;
-}
-
-static int skcipher_crypt_blkcipher(struct skcipher_request *req,
- int (*crypt)(struct blkcipher_desc *,
- struct scatterlist *,
- struct scatterlist *,
- unsigned int))
-{
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
- struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm);
- struct blkcipher_desc desc = {
- .tfm = *ctx,
- .info = req->iv,
- .flags = req->base.flags,
- };
-
-
- return crypt(&desc, req->dst, req->src, req->cryptlen);
-}
-
-static int skcipher_encrypt_blkcipher(struct skcipher_request *req)
-{
- struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
- struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
- struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
-
- return skcipher_crypt_blkcipher(req, alg->encrypt);
-}
-
-static int skcipher_decrypt_blkcipher(struct skcipher_request *req)
-{
- struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
- struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
- struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
-
- return skcipher_crypt_blkcipher(req, alg->decrypt);
-}
-
-static void crypto_exit_skcipher_ops_blkcipher(struct crypto_tfm *tfm)
-{
- struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm);
-
- crypto_free_blkcipher(*ctx);
-}
-
-static int crypto_init_skcipher_ops_blkcipher(struct crypto_tfm *tfm)
-{
- struct crypto_alg *calg = tfm->__crt_alg;
- struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
- struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm);
- struct crypto_blkcipher *blkcipher;
- struct crypto_tfm *btfm;
-
- if (!crypto_mod_get(calg))
- return -EAGAIN;
-
- btfm = __crypto_alloc_tfm(calg, CRYPTO_ALG_TYPE_BLKCIPHER,
- CRYPTO_ALG_TYPE_MASK);
- if (IS_ERR(btfm)) {
- crypto_mod_put(calg);
- return PTR_ERR(btfm);
- }
-
- blkcipher = __crypto_blkcipher_cast(btfm);
- *ctx = blkcipher;
- tfm->exit = crypto_exit_skcipher_ops_blkcipher;
-
- skcipher->setkey = skcipher_setkey_blkcipher;
- skcipher->encrypt = skcipher_encrypt_blkcipher;
- skcipher->decrypt = skcipher_decrypt_blkcipher;
-
- skcipher->ivsize = crypto_blkcipher_ivsize(blkcipher);
- skcipher->keysize = calg->cra_blkcipher.max_keysize;
-
- skcipher_set_needkey(skcipher);
-
- return 0;
-}
-
-static int skcipher_setkey_ablkcipher(struct crypto_skcipher *tfm,
- const u8 *key, unsigned int keylen)
-{
- struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm);
- struct crypto_ablkcipher *ablkcipher = *ctx;
- int err;
-
- crypto_ablkcipher_clear_flags(ablkcipher, ~0);
- crypto_ablkcipher_set_flags(ablkcipher,
- crypto_skcipher_get_flags(tfm) &
- CRYPTO_TFM_REQ_MASK);
- err = crypto_ablkcipher_setkey(ablkcipher, key, keylen);
- crypto_skcipher_set_flags(tfm,
- crypto_ablkcipher_get_flags(ablkcipher) &
- CRYPTO_TFM_RES_MASK);
- if (unlikely(err)) {
- skcipher_set_needkey(tfm);
- return err;
- }
-
- crypto_skcipher_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
- return 0;
-}
-
-static int skcipher_crypt_ablkcipher(struct skcipher_request *req,
- int (*crypt)(struct ablkcipher_request *))
-{
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
- struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm);
- struct ablkcipher_request *subreq = skcipher_request_ctx(req);
-
- ablkcipher_request_set_tfm(subreq, *ctx);
- ablkcipher_request_set_callback(subreq, skcipher_request_flags(req),
- req->base.complete, req->base.data);
- ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
- req->iv);
-
- return crypt(subreq);
-}
-
-static int skcipher_encrypt_ablkcipher(struct skcipher_request *req)
-{
- struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
- struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
- struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher;
-
- return skcipher_crypt_ablkcipher(req, alg->encrypt);
-}
-
-static int skcipher_decrypt_ablkcipher(struct skcipher_request *req)
-{
- struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
- struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
- struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher;
-
- return skcipher_crypt_ablkcipher(req, alg->decrypt);
-}
-
-static void crypto_exit_skcipher_ops_ablkcipher(struct crypto_tfm *tfm)
-{
- struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm);
-
- crypto_free_ablkcipher(*ctx);
-}
-
-static int crypto_init_skcipher_ops_ablkcipher(struct crypto_tfm *tfm)
-{
- struct crypto_alg *calg = tfm->__crt_alg;
- struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
- struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm);
- struct crypto_ablkcipher *ablkcipher;
- struct crypto_tfm *abtfm;
-
- if (!crypto_mod_get(calg))
- return -EAGAIN;
-
- abtfm = __crypto_alloc_tfm(calg, 0, 0);
- if (IS_ERR(abtfm)) {
- crypto_mod_put(calg);
- return PTR_ERR(abtfm);
- }
-
- ablkcipher = __crypto_ablkcipher_cast(abtfm);
- *ctx = ablkcipher;
- tfm->exit = crypto_exit_skcipher_ops_ablkcipher;
-
- skcipher->setkey = skcipher_setkey_ablkcipher;
- skcipher->encrypt = skcipher_encrypt_ablkcipher;
- skcipher->decrypt = skcipher_decrypt_ablkcipher;
-
- skcipher->ivsize = crypto_ablkcipher_ivsize(ablkcipher);
- skcipher->reqsize = crypto_ablkcipher_reqsize(ablkcipher) +
- sizeof(struct ablkcipher_request);
- skcipher->keysize = calg->cra_ablkcipher.max_keysize;
-
- skcipher_set_needkey(skcipher);
-
- return 0;
-}
-
static int skcipher_setkey_unaligned(struct crypto_skcipher *tfm,
const u8 *key, unsigned int keylen)
{
@@ -811,17 +596,15 @@ static int skcipher_setkey_unaligned(struct crypto_skcipher *tfm,
return ret;
}
-static int skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
+int crypto_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
unsigned int keylen)
{
struct skcipher_alg *cipher = crypto_skcipher_alg(tfm);
unsigned long alignmask = crypto_skcipher_alignmask(tfm);
int err;
- if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) {
- crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ if (keylen < cipher->min_keysize || keylen > cipher->max_keysize)
return -EINVAL;
- }
if ((unsigned long)key & alignmask)
err = skcipher_setkey_unaligned(tfm, key, keylen);
@@ -836,6 +619,7 @@ static int skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
crypto_skcipher_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
return 0;
}
+EXPORT_SYMBOL_GPL(crypto_skcipher_setkey);
int crypto_skcipher_encrypt(struct skcipher_request *req)
{
@@ -848,7 +632,7 @@ int crypto_skcipher_encrypt(struct skcipher_request *req)
if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
ret = -ENOKEY;
else
- ret = tfm->encrypt(req);
+ ret = crypto_skcipher_alg(tfm)->encrypt(req);
crypto_stats_skcipher_encrypt(cryptlen, ret, alg);
return ret;
}
@@ -865,7 +649,7 @@ int crypto_skcipher_decrypt(struct skcipher_request *req)
if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
ret = -ENOKEY;
else
- ret = tfm->decrypt(req);
+ ret = crypto_skcipher_alg(tfm)->decrypt(req);
crypto_stats_skcipher_decrypt(cryptlen, ret, alg);
return ret;
}
@@ -884,18 +668,6 @@ static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm)
struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
struct skcipher_alg *alg = crypto_skcipher_alg(skcipher);
- if (tfm->__crt_alg->cra_type == &crypto_blkcipher_type)
- return crypto_init_skcipher_ops_blkcipher(tfm);
-
- if (tfm->__crt_alg->cra_type == &crypto_ablkcipher_type)
- return crypto_init_skcipher_ops_ablkcipher(tfm);
-
- skcipher->setkey = skcipher_setkey;
- skcipher->encrypt = alg->encrypt;
- skcipher->decrypt = alg->decrypt;
- skcipher->ivsize = alg->ivsize;
- skcipher->keysize = alg->max_keysize;
-
skcipher_set_needkey(skcipher);
if (alg->exit)
@@ -960,8 +732,8 @@ static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
}
#endif
-static const struct crypto_type crypto_skcipher_type2 = {
- .extsize = crypto_skcipher_extsize,
+static const struct crypto_type crypto_skcipher_type = {
+ .extsize = crypto_alg_extsize,
.init_tfm = crypto_skcipher_init_tfm,
.free = crypto_skcipher_free_instance,
#ifdef CONFIG_PROC_FS
@@ -969,23 +741,24 @@ static const struct crypto_type crypto_skcipher_type2 = {
#endif
.report = crypto_skcipher_report,
.maskclear = ~CRYPTO_ALG_TYPE_MASK,
- .maskset = CRYPTO_ALG_TYPE_BLKCIPHER_MASK,
+ .maskset = CRYPTO_ALG_TYPE_MASK,
.type = CRYPTO_ALG_TYPE_SKCIPHER,
.tfmsize = offsetof(struct crypto_skcipher, base),
};
int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
- const char *name, u32 type, u32 mask)
+ struct crypto_instance *inst,
+ const char *name, u32 type, u32 mask)
{
- spawn->base.frontend = &crypto_skcipher_type2;
- return crypto_grab_spawn(&spawn->base, name, type, mask);
+ spawn->base.frontend = &crypto_skcipher_type;
+ return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
}
EXPORT_SYMBOL_GPL(crypto_grab_skcipher);
struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name,
u32 type, u32 mask)
{
- return crypto_alloc_tfm(alg_name, &crypto_skcipher_type2, type, mask);
+ return crypto_alloc_tfm(alg_name, &crypto_skcipher_type, type, mask);
}
EXPORT_SYMBOL_GPL(crypto_alloc_skcipher);
@@ -997,7 +770,7 @@ struct crypto_sync_skcipher *crypto_alloc_sync_skcipher(
/* Only sync algorithms allowed. */
mask |= CRYPTO_ALG_ASYNC;
- tfm = crypto_alloc_tfm(alg_name, &crypto_skcipher_type2, type, mask);
+ tfm = crypto_alloc_tfm(alg_name, &crypto_skcipher_type, type, mask);
/*
* Make sure we do not allocate something that might get used with
@@ -1013,12 +786,11 @@ struct crypto_sync_skcipher *crypto_alloc_sync_skcipher(
}
EXPORT_SYMBOL_GPL(crypto_alloc_sync_skcipher);
-int crypto_has_skcipher2(const char *alg_name, u32 type, u32 mask)
+int crypto_has_skcipher(const char *alg_name, u32 type, u32 mask)
{
- return crypto_type_has_alg(alg_name, &crypto_skcipher_type2,
- type, mask);
+ return crypto_type_has_alg(alg_name, &crypto_skcipher_type, type, mask);
}
-EXPORT_SYMBOL_GPL(crypto_has_skcipher2);
+EXPORT_SYMBOL_GPL(crypto_has_skcipher);
static int skcipher_prepare_alg(struct skcipher_alg *alg)
{
@@ -1033,7 +805,7 @@ static int skcipher_prepare_alg(struct skcipher_alg *alg)
if (!alg->walksize)
alg->walksize = alg->chunksize;
- base->cra_type = &crypto_skcipher_type2;
+ base->cra_type = &crypto_skcipher_type;
base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
base->cra_flags |= CRYPTO_ALG_TYPE_SKCIPHER;
@@ -1093,6 +865,9 @@ int skcipher_register_instance(struct crypto_template *tmpl,
{
int err;
+ if (WARN_ON(!inst->free))
+ return -EINVAL;
+
err = skcipher_prepare_alg(&inst->alg);
if (err)
return err;
@@ -1105,21 +880,17 @@ static int skcipher_setkey_simple(struct crypto_skcipher *tfm, const u8 *key,
unsigned int keylen)
{
struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
- int err;
crypto_cipher_clear_flags(cipher, CRYPTO_TFM_REQ_MASK);
crypto_cipher_set_flags(cipher, crypto_skcipher_get_flags(tfm) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_cipher_setkey(cipher, key, keylen);
- crypto_skcipher_set_flags(tfm, crypto_cipher_get_flags(cipher) &
- CRYPTO_TFM_RES_MASK);
- return err;
+ return crypto_cipher_setkey(cipher, key, keylen);
}
static int skcipher_init_tfm_simple(struct crypto_skcipher *tfm)
{
struct skcipher_instance *inst = skcipher_alg_instance(tfm);
- struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
+ struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst);
struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
struct crypto_cipher *cipher;
@@ -1140,7 +911,7 @@ static void skcipher_exit_tfm_simple(struct crypto_skcipher *tfm)
static void skcipher_free_instance_simple(struct skcipher_instance *inst)
{
- crypto_drop_spawn(skcipher_instance_ctx(inst));
+ crypto_drop_cipher(skcipher_instance_ctx(inst));
kfree(inst);
}
@@ -1156,21 +927,18 @@ static void skcipher_free_instance_simple(struct skcipher_instance *inst)
*
* @tmpl: the template being instantiated
* @tb: the template parameters
- * @cipher_alg_ret: on success, a pointer to the underlying cipher algorithm is
- * returned here. It must be dropped with crypto_mod_put().
*
* Return: a pointer to the new instance, or an ERR_PTR(). The caller still
* needs to register the instance.
*/
-struct skcipher_instance *
-skcipher_alloc_instance_simple(struct crypto_template *tmpl, struct rtattr **tb,
- struct crypto_alg **cipher_alg_ret)
+struct skcipher_instance *skcipher_alloc_instance_simple(
+ struct crypto_template *tmpl, struct rtattr **tb)
{
struct crypto_attr_type *algt;
- struct crypto_alg *cipher_alg;
- struct skcipher_instance *inst;
- struct crypto_spawn *spawn;
u32 mask;
+ struct skcipher_instance *inst;
+ struct crypto_cipher_spawn *spawn;
+ struct crypto_alg *cipher_alg;
int err;
algt = crypto_get_attr_type(tb);
@@ -1180,31 +948,25 @@ skcipher_alloc_instance_simple(struct crypto_template *tmpl, struct rtattr **tb,
if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
return ERR_PTR(-EINVAL);
- mask = CRYPTO_ALG_TYPE_MASK |
- crypto_requires_off(algt->type, algt->mask,
- CRYPTO_ALG_NEED_FALLBACK);
-
- cipher_alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, mask);
- if (IS_ERR(cipher_alg))
- return ERR_CAST(cipher_alg);
+ mask = crypto_requires_off(algt->type, algt->mask,
+ CRYPTO_ALG_NEED_FALLBACK);
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
- if (!inst) {
- err = -ENOMEM;
- goto err_put_cipher_alg;
- }
+ if (!inst)
+ return ERR_PTR(-ENOMEM);
spawn = skcipher_instance_ctx(inst);
- err = crypto_inst_setname(skcipher_crypto_instance(inst), tmpl->name,
- cipher_alg);
+ err = crypto_grab_cipher(spawn, skcipher_crypto_instance(inst),
+ crypto_attr_alg_name(tb[1]), 0, mask);
if (err)
goto err_free_inst;
+ cipher_alg = crypto_spawn_cipher_alg(spawn);
- err = crypto_init_spawn(spawn, cipher_alg,
- skcipher_crypto_instance(inst),
- CRYPTO_ALG_TYPE_MASK);
+ err = crypto_inst_setname(skcipher_crypto_instance(inst), tmpl->name,
+ cipher_alg);
if (err)
goto err_free_inst;
+
inst->free = skcipher_free_instance_simple;
/* Default algorithm properties, can be overridden */
@@ -1221,13 +983,10 @@ skcipher_alloc_instance_simple(struct crypto_template *tmpl, struct rtattr **tb,
inst->alg.init = skcipher_init_tfm_simple;
inst->alg.exit = skcipher_exit_tfm_simple;
- *cipher_alg_ret = cipher_alg;
return inst;
err_free_inst:
- kfree(inst);
-err_put_cipher_alg:
- crypto_mod_put(cipher_alg);
+ skcipher_free_instance_simple(inst);
return ERR_PTR(err);
}
EXPORT_SYMBOL_GPL(skcipher_alloc_instance_simple);
diff --git a/crypto/sm4_generic.c b/crypto/sm4_generic.c
index 71ffb343709a..016dbc595705 100644
--- a/crypto/sm4_generic.c
+++ b/crypto/sm4_generic.c
@@ -143,29 +143,23 @@ int crypto_sm4_expand_key(struct crypto_sm4_ctx *ctx, const u8 *in_key,
EXPORT_SYMBOL_GPL(crypto_sm4_expand_key);
/**
- * crypto_sm4_set_key - Set the AES key.
+ * crypto_sm4_set_key - Set the SM4 key.
* @tfm: The %crypto_tfm that is used in the context.
* @in_key: The input key.
* @key_len: The size of the key.
*
- * Returns 0 on success, on failure the %CRYPTO_TFM_RES_BAD_KEY_LEN flag in tfm
- * is set. The function uses crypto_sm4_expand_key() to expand the key.
+ * This function uses crypto_sm4_expand_key() to expand the key.
* &crypto_sm4_ctx _must_ be the private data embedded in @tfm which is
* retrieved with crypto_tfm_ctx().
+ *
+ * Return: 0 on success; -EINVAL on failure (only happens for bad key lengths)
*/
int crypto_sm4_set_key(struct crypto_tfm *tfm, const u8 *in_key,
unsigned int key_len)
{
struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
- u32 *flags = &tfm->crt_flags;
- int ret;
-
- ret = crypto_sm4_expand_key(ctx, in_key, key_len);
- if (!ret)
- return 0;
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
- return -EINVAL;
+ return crypto_sm4_expand_key(ctx, in_key, key_len);
}
EXPORT_SYMBOL_GPL(crypto_sm4_set_key);
diff --git a/crypto/streebog_generic.c b/crypto/streebog_generic.c
index 63663c3bab7e..dc625ffc54ad 100644
--- a/crypto/streebog_generic.c
+++ b/crypto/streebog_generic.c
@@ -148,52 +148,6 @@ static const struct streebog_uint512 C[12] = {
} }
};
-static const u8 Tau[64] = {
- 0, 8, 16, 24, 32, 40, 48, 56,
- 1, 9, 17, 25, 33, 41, 49, 57,
- 2, 10, 18, 26, 34, 42, 50, 58,
- 3, 11, 19, 27, 35, 43, 51, 59,
- 4, 12, 20, 28, 36, 44, 52, 60,
- 5, 13, 21, 29, 37, 45, 53, 61,
- 6, 14, 22, 30, 38, 46, 54, 62,
- 7, 15, 23, 31, 39, 47, 55, 63
-};
-
-static const u8 Pi[256] = {
- 252, 238, 221, 17, 207, 110, 49, 22,
- 251, 196, 250, 218, 35, 197, 4, 77,
- 233, 119, 240, 219, 147, 46, 153, 186,
- 23, 54, 241, 187, 20, 205, 95, 193,
- 249, 24, 101, 90, 226, 92, 239, 33,
- 129, 28, 60, 66, 139, 1, 142, 79,
- 5, 132, 2, 174, 227, 106, 143, 160,
- 6, 11, 237, 152, 127, 212, 211, 31,
- 235, 52, 44, 81, 234, 200, 72, 171,
- 242, 42, 104, 162, 253, 58, 206, 204,
- 181, 112, 14, 86, 8, 12, 118, 18,
- 191, 114, 19, 71, 156, 183, 93, 135,
- 21, 161, 150, 41, 16, 123, 154, 199,
- 243, 145, 120, 111, 157, 158, 178, 177,
- 50, 117, 25, 61, 255, 53, 138, 126,
- 109, 84, 198, 128, 195, 189, 13, 87,
- 223, 245, 36, 169, 62, 168, 67, 201,
- 215, 121, 214, 246, 124, 34, 185, 3,
- 224, 15, 236, 222, 122, 148, 176, 188,
- 220, 232, 40, 80, 78, 51, 10, 74,
- 167, 151, 96, 115, 30, 0, 98, 68,
- 26, 184, 56, 130, 100, 159, 38, 65,
- 173, 69, 70, 146, 39, 94, 85, 47,
- 140, 163, 165, 125, 105, 213, 149, 59,
- 7, 88, 179, 64, 134, 172, 29, 247,
- 48, 55, 107, 228, 136, 217, 231, 137,
- 225, 27, 131, 73, 76, 63, 248, 254,
- 141, 83, 170, 144, 202, 216, 133, 97,
- 32, 113, 103, 164, 45, 43, 9, 91,
- 203, 155, 37, 208, 190, 229, 108, 82,
- 89, 166, 116, 210, 230, 244, 180, 192,
- 209, 102, 175, 194, 57, 75, 99, 182
-};
-
static const unsigned long long Ax[8][256] = {
{
0xd01f715b5c7ef8e6ULL, 0x16fa240980778325ULL, 0xa8a42e857ee049c8ULL,
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c
index ad78ab5b93cb..f42f486e90e8 100644
--- a/crypto/tcrypt.c
+++ b/crypto/tcrypt.c
@@ -65,7 +65,7 @@ static int mode;
static u32 num_mb = 8;
static char *tvmem[TVMEMSIZE];
-static char *check[] = {
+static const char *check[] = {
"des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "sm3",
"blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
"cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
@@ -1634,7 +1634,7 @@ static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
static void test_available(void)
{
- char **name = check;
+ const char **name = check;
while (*name) {
printk("alg %s ", *name);
@@ -2327,6 +2327,22 @@ static int do_test(const char *alg, u32 type, u32 mask, int m, u32 num_mb)
0, speed_template_32);
break;
+ case 220:
+ test_acipher_speed("essiv(cbc(aes),sha256)",
+ ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("essiv(cbc(aes),sha256)",
+ DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ break;
+
+ case 221:
+ test_aead_speed("aegis128", ENCRYPT, sec,
+ NULL, 0, 16, 8, speed_template_16);
+ test_aead_speed("aegis128", DECRYPT, sec,
+ NULL, 0, 16, 8, speed_template_16);
+ break;
+
case 300:
if (alg) {
test_hash_speed(alg, sec, generic_hash_speed_template);
diff --git a/crypto/testmgr.c b/crypto/testmgr.c
index d0b5b33806a6..ccb3d60729fc 100644
--- a/crypto/testmgr.c
+++ b/crypto/testmgr.c
@@ -82,6 +82,19 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
struct aead_test_suite {
const struct aead_testvec *vecs;
unsigned int count;
+
+ /*
+ * Set if trying to decrypt an inauthentic ciphertext with this
+ * algorithm might result in EINVAL rather than EBADMSG, due to other
+ * validation the algorithm does on the inputs such as length checks.
+ */
+ unsigned int einval_allowed : 1;
+
+ /*
+ * Set if the algorithm intentionally ignores the last 8 bytes of the
+ * AAD buffer during decryption.
+ */
+ unsigned int esp_aad : 1;
};
struct cipher_test_suite {
@@ -259,6 +272,9 @@ struct test_sg_division {
* where 0 is aligned to a 2*(MAX_ALGAPI_ALIGNMASK+1) byte boundary
* @iv_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
* the @iv_offset
+ * @key_offset: misalignment of the key, where 0 is default alignment
+ * @key_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
+ * the @key_offset
* @finalization_type: what finalization function to use for hashes
* @nosimd: execute with SIMD disabled? Requires !CRYPTO_TFM_REQ_MAY_SLEEP.
*/
@@ -269,7 +285,9 @@ struct testvec_config {
struct test_sg_division src_divs[XBUFSIZE];
struct test_sg_division dst_divs[XBUFSIZE];
unsigned int iv_offset;
+ unsigned int key_offset;
bool iv_offset_relative_to_alignmask;
+ bool key_offset_relative_to_alignmask;
enum finalization_type finalization_type;
bool nosimd;
};
@@ -297,6 +315,7 @@ static const struct testvec_config default_cipher_testvec_configs[] = {
.name = "unaligned buffer, offset=1",
.src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
.iv_offset = 1,
+ .key_offset = 1,
}, {
.name = "buffer aligned only to alignmask",
.src_divs = {
@@ -308,6 +327,8 @@ static const struct testvec_config default_cipher_testvec_configs[] = {
},
.iv_offset = 1,
.iv_offset_relative_to_alignmask = true,
+ .key_offset = 1,
+ .key_offset_relative_to_alignmask = true,
}, {
.name = "two even aligned splits",
.src_divs = {
@@ -323,6 +344,7 @@ static const struct testvec_config default_cipher_testvec_configs[] = {
{ .proportion_of_total = 4800, .offset = 18 },
},
.iv_offset = 3,
+ .key_offset = 3,
}, {
.name = "misaligned splits crossing pages, inplace",
.inplace = true,
@@ -355,6 +377,7 @@ static const struct testvec_config default_hash_testvec_configs[] = {
.name = "init+update+final misaligned buffer",
.src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
.finalization_type = FINALIZATION_TYPE_FINAL,
+ .key_offset = 1,
}, {
.name = "digest buffer aligned only to alignmask",
.src_divs = {
@@ -365,6 +388,8 @@ static const struct testvec_config default_hash_testvec_configs[] = {
},
},
.finalization_type = FINALIZATION_TYPE_DIGEST,
+ .key_offset = 1,
+ .key_offset_relative_to_alignmask = true,
}, {
.name = "init+update+update+final two even splits",
.src_divs = {
@@ -740,6 +765,49 @@ static int build_cipher_test_sglists(struct cipher_test_sglists *tsgls,
alignmask, dst_total_len, NULL, NULL);
}
+/*
+ * Support for testing passing a misaligned key to setkey():
+ *
+ * If cfg->key_offset is set, copy the key into a new buffer at that offset,
+ * optionally adding alignmask. Else, just use the key directly.
+ */
+static int prepare_keybuf(const u8 *key, unsigned int ksize,
+ const struct testvec_config *cfg,
+ unsigned int alignmask,
+ const u8 **keybuf_ret, const u8 **keyptr_ret)
+{
+ unsigned int key_offset = cfg->key_offset;
+ u8 *keybuf = NULL, *keyptr = (u8 *)key;
+
+ if (key_offset != 0) {
+ if (cfg->key_offset_relative_to_alignmask)
+ key_offset += alignmask;
+ keybuf = kmalloc(key_offset + ksize, GFP_KERNEL);
+ if (!keybuf)
+ return -ENOMEM;
+ keyptr = keybuf + key_offset;
+ memcpy(keyptr, key, ksize);
+ }
+ *keybuf_ret = keybuf;
+ *keyptr_ret = keyptr;
+ return 0;
+}
+
+/* Like setkey_f(tfm, key, ksize), but sometimes misalign the key */
+#define do_setkey(setkey_f, tfm, key, ksize, cfg, alignmask) \
+({ \
+ const u8 *keybuf, *keyptr; \
+ int err; \
+ \
+ err = prepare_keybuf((key), (ksize), (cfg), (alignmask), \
+ &keybuf, &keyptr); \
+ if (err == 0) { \
+ err = setkey_f((tfm), keyptr, (ksize)); \
+ kfree(keybuf); \
+ } \
+ err; \
+})
+
#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
/* Generate a random length in range [0, max_len], but prefer smaller values */
@@ -759,27 +827,39 @@ static unsigned int generate_random_length(unsigned int max_len)
}
}
-/* Sometimes make some random changes to the given data buffer */
-static void mutate_buffer(u8 *buf, size_t count)
+/* Flip a random bit in the given nonempty data buffer */
+static void flip_random_bit(u8 *buf, size_t size)
+{
+ size_t bitpos;
+
+ bitpos = prandom_u32() % (size * 8);
+ buf[bitpos / 8] ^= 1 << (bitpos % 8);
+}
+
+/* Flip a random byte in the given nonempty data buffer */
+static void flip_random_byte(u8 *buf, size_t size)
+{
+ buf[prandom_u32() % size] ^= 0xff;
+}
+
+/* Sometimes make some random changes to the given nonempty data buffer */
+static void mutate_buffer(u8 *buf, size_t size)
{
size_t num_flips;
size_t i;
- size_t pos;
/* Sometimes flip some bits */
if (prandom_u32() % 4 == 0) {
- num_flips = min_t(size_t, 1 << (prandom_u32() % 8), count * 8);
- for (i = 0; i < num_flips; i++) {
- pos = prandom_u32() % (count * 8);
- buf[pos / 8] ^= 1 << (pos % 8);
- }
+ num_flips = min_t(size_t, 1 << (prandom_u32() % 8), size * 8);
+ for (i = 0; i < num_flips; i++)
+ flip_random_bit(buf, size);
}
/* Sometimes flip some bytes */
if (prandom_u32() % 4 == 0) {
- num_flips = min_t(size_t, 1 << (prandom_u32() % 8), count);
+ num_flips = min_t(size_t, 1 << (prandom_u32() % 8), size);
for (i = 0; i < num_flips; i++)
- buf[prandom_u32() % count] ^= 0xff;
+ flip_random_byte(buf, size);
}
}
@@ -966,6 +1046,11 @@ static void generate_random_testvec_config(struct testvec_config *cfg,
p += scnprintf(p, end - p, " iv_offset=%u", cfg->iv_offset);
}
+ if (prandom_u32() % 2 == 0) {
+ cfg->key_offset = 1 + (prandom_u32() % MAX_ALGAPI_ALIGNMASK);
+ p += scnprintf(p, end - p, " key_offset=%u", cfg->key_offset);
+ }
+
WARN_ON_ONCE(!valid_testvec_config(cfg));
}
@@ -1103,7 +1188,8 @@ static int test_shash_vec_cfg(const char *driver,
/* Set the key, if specified */
if (vec->ksize) {
- err = crypto_shash_setkey(tfm, vec->key, vec->ksize);
+ err = do_setkey(crypto_shash_setkey, tfm, vec->key, vec->ksize,
+ cfg, alignmask);
if (err) {
if (err == vec->setkey_error)
return 0;
@@ -1290,7 +1376,8 @@ static int test_ahash_vec_cfg(const char *driver,
/* Set the key, if specified */
if (vec->ksize) {
- err = crypto_ahash_setkey(tfm, vec->key, vec->ksize);
+ err = do_setkey(crypto_ahash_setkey, tfm, vec->key, vec->ksize,
+ cfg, alignmask);
if (err) {
if (err == vec->setkey_error)
return 0;
@@ -1853,7 +1940,6 @@ static int test_aead_vec_cfg(const char *driver, int enc,
cfg->iv_offset +
(cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
struct kvec input[2];
- int expected_error;
int err;
/* Set the key */
@@ -1861,7 +1947,9 @@ static int test_aead_vec_cfg(const char *driver, int enc,
crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
else
crypto_aead_clear_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
- err = crypto_aead_setkey(tfm, vec->key, vec->klen);
+
+ err = do_setkey(crypto_aead_setkey, tfm, vec->key, vec->klen,
+ cfg, alignmask);
if (err && err != vec->setkey_error) {
pr_err("alg: aead: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
driver, vec_name, vec->setkey_error, err,
@@ -1972,20 +2060,31 @@ static int test_aead_vec_cfg(const char *driver, int enc,
return -EINVAL;
}
- /* Check for success or failure */
- expected_error = vec->novrfy ? -EBADMSG : vec->crypt_error;
- if (err) {
- if (err == expected_error)
- return 0;
- pr_err("alg: aead: %s %s failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
- driver, op, vec_name, expected_error, err, cfg->name);
- return err;
- }
- if (expected_error) {
- pr_err("alg: aead: %s %s unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
+ /* Check for unexpected success or failure, or wrong error code */
+ if ((err == 0 && vec->novrfy) ||
+ (err != vec->crypt_error && !(err == -EBADMSG && vec->novrfy))) {
+ char expected_error[32];
+
+ if (vec->novrfy &&
+ vec->crypt_error != 0 && vec->crypt_error != -EBADMSG)
+ sprintf(expected_error, "-EBADMSG or %d",
+ vec->crypt_error);
+ else if (vec->novrfy)
+ sprintf(expected_error, "-EBADMSG");
+ else
+ sprintf(expected_error, "%d", vec->crypt_error);
+ if (err) {
+ pr_err("alg: aead: %s %s failed on test vector %s; expected_error=%s, actual_error=%d, cfg=\"%s\"\n",
+ driver, op, vec_name, expected_error, err,
+ cfg->name);
+ return err;
+ }
+ pr_err("alg: aead: %s %s unexpectedly succeeded on test vector %s; expected_error=%s, cfg=\"%s\"\n",
driver, op, vec_name, expected_error, cfg->name);
return -EINVAL;
}
+ if (err) /* Expectedly failed. */
+ return 0;
/* Check for the correct output (ciphertext or plaintext) */
err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
@@ -2047,25 +2146,129 @@ static int test_aead_vec(const char *driver, int enc,
}
#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
+
+struct aead_extra_tests_ctx {
+ struct aead_request *req;
+ struct crypto_aead *tfm;
+ const char *driver;
+ const struct alg_test_desc *test_desc;
+ struct cipher_test_sglists *tsgls;
+ unsigned int maxdatasize;
+ unsigned int maxkeysize;
+
+ struct aead_testvec vec;
+ char vec_name[64];
+ char cfgname[TESTVEC_CONFIG_NAMELEN];
+ struct testvec_config cfg;
+};
+
/*
- * Generate an AEAD test vector from the given implementation.
- * Assumes the buffers in 'vec' were already allocated.
+ * Make at least one random change to a (ciphertext, AAD) pair. "Ciphertext"
+ * here means the full ciphertext including the authentication tag. The
+ * authentication tag (and hence also the ciphertext) is assumed to be nonempty.
+ */
+static void mutate_aead_message(struct aead_testvec *vec, bool esp_aad)
+{
+ const unsigned int aad_tail_size = esp_aad ? 8 : 0;
+ const unsigned int authsize = vec->clen - vec->plen;
+
+ if (prandom_u32() % 2 == 0 && vec->alen > aad_tail_size) {
+ /* Mutate the AAD */
+ flip_random_bit((u8 *)vec->assoc, vec->alen - aad_tail_size);
+ if (prandom_u32() % 2 == 0)
+ return;
+ }
+ if (prandom_u32() % 2 == 0) {
+ /* Mutate auth tag (assuming it's at the end of ciphertext) */
+ flip_random_bit((u8 *)vec->ctext + vec->plen, authsize);
+ } else {
+ /* Mutate any part of the ciphertext */
+ flip_random_bit((u8 *)vec->ctext, vec->clen);
+ }
+}
+
+/*
+ * Minimum authentication tag size in bytes at which we assume that we can
+ * reliably generate inauthentic messages, i.e. not generate an authentic
+ * message by chance.
+ */
+#define MIN_COLLISION_FREE_AUTHSIZE 8
+
+static void generate_aead_message(struct aead_request *req,
+ const struct aead_test_suite *suite,
+ struct aead_testvec *vec,
+ bool prefer_inauthentic)
+{
+ struct crypto_aead *tfm = crypto_aead_reqtfm(req);
+ const unsigned int ivsize = crypto_aead_ivsize(tfm);
+ const unsigned int authsize = vec->clen - vec->plen;
+ const bool inauthentic = (authsize >= MIN_COLLISION_FREE_AUTHSIZE) &&
+ (prefer_inauthentic || prandom_u32() % 4 == 0);
+
+ /* Generate the AAD. */
+ generate_random_bytes((u8 *)vec->assoc, vec->alen);
+
+ if (inauthentic && prandom_u32() % 2 == 0) {
+ /* Generate a random ciphertext. */
+ generate_random_bytes((u8 *)vec->ctext, vec->clen);
+ } else {
+ int i = 0;
+ struct scatterlist src[2], dst;
+ u8 iv[MAX_IVLEN];
+ DECLARE_CRYPTO_WAIT(wait);
+
+ /* Generate a random plaintext and encrypt it. */
+ sg_init_table(src, 2);
+ if (vec->alen)
+ sg_set_buf(&src[i++], vec->assoc, vec->alen);
+ if (vec->plen) {
+ generate_random_bytes((u8 *)vec->ptext, vec->plen);
+ sg_set_buf(&src[i++], vec->ptext, vec->plen);
+ }
+ sg_init_one(&dst, vec->ctext, vec->alen + vec->clen);
+ memcpy(iv, vec->iv, ivsize);
+ aead_request_set_callback(req, 0, crypto_req_done, &wait);
+ aead_request_set_crypt(req, src, &dst, vec->plen, iv);
+ aead_request_set_ad(req, vec->alen);
+ vec->crypt_error = crypto_wait_req(crypto_aead_encrypt(req),
+ &wait);
+ /* If encryption failed, we're done. */
+ if (vec->crypt_error != 0)
+ return;
+ memmove((u8 *)vec->ctext, vec->ctext + vec->alen, vec->clen);
+ if (!inauthentic)
+ return;
+ /*
+ * Mutate the authentic (ciphertext, AAD) pair to get an
+ * inauthentic one.
+ */
+ mutate_aead_message(vec, suite->esp_aad);
+ }
+ vec->novrfy = 1;
+ if (suite->einval_allowed)
+ vec->crypt_error = -EINVAL;
+}
+
+/*
+ * Generate an AEAD test vector 'vec' using the implementation specified by
+ * 'req'. The buffers in 'vec' must already be allocated.
+ *
+ * If 'prefer_inauthentic' is true, then this function will generate inauthentic
+ * test vectors (i.e. vectors with 'vec->novrfy=1') more often.
*/
static void generate_random_aead_testvec(struct aead_request *req,
struct aead_testvec *vec,
+ const struct aead_test_suite *suite,
unsigned int maxkeysize,
unsigned int maxdatasize,
- char *name, size_t max_namelen)
+ char *name, size_t max_namelen,
+ bool prefer_inauthentic)
{
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
const unsigned int ivsize = crypto_aead_ivsize(tfm);
- unsigned int maxauthsize = crypto_aead_alg(tfm)->maxauthsize;
+ const unsigned int maxauthsize = crypto_aead_maxauthsize(tfm);
unsigned int authsize;
unsigned int total_len;
- int i;
- struct scatterlist src[2], dst;
- u8 iv[MAX_IVLEN];
- DECLARE_CRYPTO_WAIT(wait);
/* Key: length in [0, maxkeysize], but usually choose maxkeysize */
vec->klen = maxkeysize;
@@ -2081,81 +2284,101 @@ static void generate_random_aead_testvec(struct aead_request *req,
authsize = maxauthsize;
if (prandom_u32() % 4 == 0)
authsize = prandom_u32() % (maxauthsize + 1);
+ if (prefer_inauthentic && authsize < MIN_COLLISION_FREE_AUTHSIZE)
+ authsize = MIN_COLLISION_FREE_AUTHSIZE;
if (WARN_ON(authsize > maxdatasize))
authsize = maxdatasize;
maxdatasize -= authsize;
vec->setauthsize_error = crypto_aead_setauthsize(tfm, authsize);
- /* Plaintext and associated data */
+ /* AAD, plaintext, and ciphertext lengths */
total_len = generate_random_length(maxdatasize);
if (prandom_u32() % 4 == 0)
vec->alen = 0;
else
vec->alen = generate_random_length(total_len);
vec->plen = total_len - vec->alen;
- generate_random_bytes((u8 *)vec->assoc, vec->alen);
- generate_random_bytes((u8 *)vec->ptext, vec->plen);
-
vec->clen = vec->plen + authsize;
/*
- * If the key or authentication tag size couldn't be set, no need to
- * continue to encrypt.
+ * Generate the AAD, plaintext, and ciphertext. Not applicable if the
+ * key or the authentication tag size couldn't be set.
*/
- if (vec->setkey_error || vec->setauthsize_error)
- goto done;
-
- /* Ciphertext */
- sg_init_table(src, 2);
- i = 0;
- if (vec->alen)
- sg_set_buf(&src[i++], vec->assoc, vec->alen);
- if (vec->plen)
- sg_set_buf(&src[i++], vec->ptext, vec->plen);
- sg_init_one(&dst, vec->ctext, vec->alen + vec->clen);
- memcpy(iv, vec->iv, ivsize);
- aead_request_set_callback(req, 0, crypto_req_done, &wait);
- aead_request_set_crypt(req, src, &dst, vec->plen, iv);
- aead_request_set_ad(req, vec->alen);
- vec->crypt_error = crypto_wait_req(crypto_aead_encrypt(req), &wait);
- if (vec->crypt_error == 0)
- memmove((u8 *)vec->ctext, vec->ctext + vec->alen, vec->clen);
-done:
+ vec->novrfy = 0;
+ vec->crypt_error = 0;
+ if (vec->setkey_error == 0 && vec->setauthsize_error == 0)
+ generate_aead_message(req, suite, vec, prefer_inauthentic);
snprintf(name, max_namelen,
- "\"random: alen=%u plen=%u authsize=%u klen=%u\"",
- vec->alen, vec->plen, authsize, vec->klen);
+ "\"random: alen=%u plen=%u authsize=%u klen=%u novrfy=%d\"",
+ vec->alen, vec->plen, authsize, vec->klen, vec->novrfy);
+}
+
+static void try_to_generate_inauthentic_testvec(
+ struct aead_extra_tests_ctx *ctx)
+{
+ int i;
+
+ for (i = 0; i < 10; i++) {
+ generate_random_aead_testvec(ctx->req, &ctx->vec,
+ &ctx->test_desc->suite.aead,
+ ctx->maxkeysize, ctx->maxdatasize,
+ ctx->vec_name,
+ sizeof(ctx->vec_name), true);
+ if (ctx->vec.novrfy)
+ return;
+ }
}
/*
- * Test the AEAD algorithm represented by @req against the corresponding generic
- * implementation, if one is available.
+ * Generate inauthentic test vectors (i.e. ciphertext, AAD pairs that aren't the
+ * result of an encryption with the key) and verify that decryption fails.
*/
-static int test_aead_vs_generic_impl(const char *driver,
- const struct alg_test_desc *test_desc,
- struct aead_request *req,
- struct cipher_test_sglists *tsgls)
+static int test_aead_inauthentic_inputs(struct aead_extra_tests_ctx *ctx)
{
- struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- const unsigned int ivsize = crypto_aead_ivsize(tfm);
- const unsigned int maxauthsize = crypto_aead_alg(tfm)->maxauthsize;
- const unsigned int blocksize = crypto_aead_blocksize(tfm);
- const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
+ unsigned int i;
+ int err;
+
+ for (i = 0; i < fuzz_iterations * 8; i++) {
+ /*
+ * Since this part of the tests isn't comparing the
+ * implementation to another, there's no point in testing any
+ * test vectors other than inauthentic ones (vec.novrfy=1) here.
+ *
+ * If we're having trouble generating such a test vector, e.g.
+ * if the algorithm keeps rejecting the generated keys, don't
+ * retry forever; just continue on.
+ */
+ try_to_generate_inauthentic_testvec(ctx);
+ if (ctx->vec.novrfy) {
+ generate_random_testvec_config(&ctx->cfg, ctx->cfgname,
+ sizeof(ctx->cfgname));
+ err = test_aead_vec_cfg(ctx->driver, DECRYPT, &ctx->vec,
+ ctx->vec_name, &ctx->cfg,
+ ctx->req, ctx->tsgls);
+ if (err)
+ return err;
+ }
+ cond_resched();
+ }
+ return 0;
+}
+
+/*
+ * Test the AEAD algorithm against the corresponding generic implementation, if
+ * one is available.
+ */
+static int test_aead_vs_generic_impl(struct aead_extra_tests_ctx *ctx)
+{
+ struct crypto_aead *tfm = ctx->tfm;
const char *algname = crypto_aead_alg(tfm)->base.cra_name;
- const char *generic_driver = test_desc->generic_driver;
+ const char *driver = ctx->driver;
+ const char *generic_driver = ctx->test_desc->generic_driver;
char _generic_driver[CRYPTO_MAX_ALG_NAME];
struct crypto_aead *generic_tfm = NULL;
struct aead_request *generic_req = NULL;
- unsigned int maxkeysize;
unsigned int i;
- struct aead_testvec vec = { 0 };
- char vec_name[64];
- struct testvec_config *cfg;
- char cfgname[TESTVEC_CONFIG_NAMELEN];
int err;
- if (noextratests)
- return 0;
-
if (!generic_driver) { /* Use default naming convention? */
err = build_generic_driver_name(algname, _generic_driver);
if (err)
@@ -2179,12 +2402,6 @@ static int test_aead_vs_generic_impl(const char *driver,
return err;
}
- cfg = kzalloc(sizeof(*cfg), GFP_KERNEL);
- if (!cfg) {
- err = -ENOMEM;
- goto out;
- }
-
generic_req = aead_request_alloc(generic_tfm, GFP_KERNEL);
if (!generic_req) {
err = -ENOMEM;
@@ -2193,24 +2410,27 @@ static int test_aead_vs_generic_impl(const char *driver,
/* Check the algorithm properties for consistency. */
- if (maxauthsize != crypto_aead_alg(generic_tfm)->maxauthsize) {
+ if (crypto_aead_maxauthsize(tfm) !=
+ crypto_aead_maxauthsize(generic_tfm)) {
pr_err("alg: aead: maxauthsize for %s (%u) doesn't match generic impl (%u)\n",
- driver, maxauthsize,
- crypto_aead_alg(generic_tfm)->maxauthsize);
+ driver, crypto_aead_maxauthsize(tfm),
+ crypto_aead_maxauthsize(generic_tfm));
err = -EINVAL;
goto out;
}
- if (ivsize != crypto_aead_ivsize(generic_tfm)) {
+ if (crypto_aead_ivsize(tfm) != crypto_aead_ivsize(generic_tfm)) {
pr_err("alg: aead: ivsize for %s (%u) doesn't match generic impl (%u)\n",
- driver, ivsize, crypto_aead_ivsize(generic_tfm));
+ driver, crypto_aead_ivsize(tfm),
+ crypto_aead_ivsize(generic_tfm));
err = -EINVAL;
goto out;
}
- if (blocksize != crypto_aead_blocksize(generic_tfm)) {
+ if (crypto_aead_blocksize(tfm) != crypto_aead_blocksize(generic_tfm)) {
pr_err("alg: aead: blocksize for %s (%u) doesn't match generic impl (%u)\n",
- driver, blocksize, crypto_aead_blocksize(generic_tfm));
+ driver, crypto_aead_blocksize(tfm),
+ crypto_aead_blocksize(generic_tfm));
err = -EINVAL;
goto out;
}
@@ -2219,55 +2439,93 @@ static int test_aead_vs_generic_impl(const char *driver,
* Now generate test vectors using the generic implementation, and test
* the other implementation against them.
*/
+ for (i = 0; i < fuzz_iterations * 8; i++) {
+ generate_random_aead_testvec(generic_req, &ctx->vec,
+ &ctx->test_desc->suite.aead,
+ ctx->maxkeysize, ctx->maxdatasize,
+ ctx->vec_name,
+ sizeof(ctx->vec_name), false);
+ generate_random_testvec_config(&ctx->cfg, ctx->cfgname,
+ sizeof(ctx->cfgname));
+ if (!ctx->vec.novrfy) {
+ err = test_aead_vec_cfg(driver, ENCRYPT, &ctx->vec,
+ ctx->vec_name, &ctx->cfg,
+ ctx->req, ctx->tsgls);
+ if (err)
+ goto out;
+ }
+ if (ctx->vec.crypt_error == 0 || ctx->vec.novrfy) {
+ err = test_aead_vec_cfg(driver, DECRYPT, &ctx->vec,
+ ctx->vec_name, &ctx->cfg,
+ ctx->req, ctx->tsgls);
+ if (err)
+ goto out;
+ }
+ cond_resched();
+ }
+ err = 0;
+out:
+ crypto_free_aead(generic_tfm);
+ aead_request_free(generic_req);
+ return err;
+}
- maxkeysize = 0;
- for (i = 0; i < test_desc->suite.aead.count; i++)
- maxkeysize = max_t(unsigned int, maxkeysize,
- test_desc->suite.aead.vecs[i].klen);
+static int test_aead_extra(const char *driver,
+ const struct alg_test_desc *test_desc,
+ struct aead_request *req,
+ struct cipher_test_sglists *tsgls)
+{
+ struct aead_extra_tests_ctx *ctx;
+ unsigned int i;
+ int err;
- vec.key = kmalloc(maxkeysize, GFP_KERNEL);
- vec.iv = kmalloc(ivsize, GFP_KERNEL);
- vec.assoc = kmalloc(maxdatasize, GFP_KERNEL);
- vec.ptext = kmalloc(maxdatasize, GFP_KERNEL);
- vec.ctext = kmalloc(maxdatasize, GFP_KERNEL);
- if (!vec.key || !vec.iv || !vec.assoc || !vec.ptext || !vec.ctext) {
+ if (noextratests)
+ return 0;
+
+ ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
+ if (!ctx)
+ return -ENOMEM;
+ ctx->req = req;
+ ctx->tfm = crypto_aead_reqtfm(req);
+ ctx->driver = driver;
+ ctx->test_desc = test_desc;
+ ctx->tsgls = tsgls;
+ ctx->maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
+ ctx->maxkeysize = 0;
+ for (i = 0; i < test_desc->suite.aead.count; i++)
+ ctx->maxkeysize = max_t(unsigned int, ctx->maxkeysize,
+ test_desc->suite.aead.vecs[i].klen);
+
+ ctx->vec.key = kmalloc(ctx->maxkeysize, GFP_KERNEL);
+ ctx->vec.iv = kmalloc(crypto_aead_ivsize(ctx->tfm), GFP_KERNEL);
+ ctx->vec.assoc = kmalloc(ctx->maxdatasize, GFP_KERNEL);
+ ctx->vec.ptext = kmalloc(ctx->maxdatasize, GFP_KERNEL);
+ ctx->vec.ctext = kmalloc(ctx->maxdatasize, GFP_KERNEL);
+ if (!ctx->vec.key || !ctx->vec.iv || !ctx->vec.assoc ||
+ !ctx->vec.ptext || !ctx->vec.ctext) {
err = -ENOMEM;
goto out;
}
- for (i = 0; i < fuzz_iterations * 8; i++) {
- generate_random_aead_testvec(generic_req, &vec,
- maxkeysize, maxdatasize,
- vec_name, sizeof(vec_name));
- generate_random_testvec_config(cfg, cfgname, sizeof(cfgname));
+ err = test_aead_inauthentic_inputs(ctx);
+ if (err)
+ goto out;
- err = test_aead_vec_cfg(driver, ENCRYPT, &vec, vec_name, cfg,
- req, tsgls);
- if (err)
- goto out;
- err = test_aead_vec_cfg(driver, DECRYPT, &vec, vec_name, cfg,
- req, tsgls);
- if (err)
- goto out;
- cond_resched();
- }
- err = 0;
+ err = test_aead_vs_generic_impl(ctx);
out:
- kfree(cfg);
- kfree(vec.key);
- kfree(vec.iv);
- kfree(vec.assoc);
- kfree(vec.ptext);
- kfree(vec.ctext);
- crypto_free_aead(generic_tfm);
- aead_request_free(generic_req);
+ kfree(ctx->vec.key);
+ kfree(ctx->vec.iv);
+ kfree(ctx->vec.assoc);
+ kfree(ctx->vec.ptext);
+ kfree(ctx->vec.ctext);
+ kfree(ctx);
return err;
}
#else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
-static int test_aead_vs_generic_impl(const char *driver,
- const struct alg_test_desc *test_desc,
- struct aead_request *req,
- struct cipher_test_sglists *tsgls)
+static int test_aead_extra(const char *driver,
+ const struct alg_test_desc *test_desc,
+ struct aead_request *req,
+ struct cipher_test_sglists *tsgls)
{
return 0;
}
@@ -2336,7 +2594,7 @@ static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
if (err)
goto out;
- err = test_aead_vs_generic_impl(driver, desc, req, tsgls);
+ err = test_aead_extra(driver, desc, req, tsgls);
out:
free_cipher_test_sglists(tsgls);
aead_request_free(req);
@@ -2457,7 +2715,8 @@ static int test_skcipher_vec_cfg(const char *driver, int enc,
else
crypto_skcipher_clear_flags(tfm,
CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
- err = crypto_skcipher_setkey(tfm, vec->key, vec->klen);
+ err = do_setkey(crypto_skcipher_setkey, tfm, vec->key, vec->klen,
+ cfg, alignmask);
if (err) {
if (err == vec->setkey_error)
return 0;
@@ -2647,7 +2906,7 @@ static void generate_random_cipher_testvec(struct skcipher_request *req,
char *name, size_t max_namelen)
{
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
- const unsigned int maxkeysize = tfm->keysize;
+ const unsigned int maxkeysize = crypto_skcipher_max_keysize(tfm);
const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
struct scatterlist src, dst;
u8 iv[MAX_IVLEN];
@@ -2678,6 +2937,15 @@ static void generate_random_cipher_testvec(struct skcipher_request *req,
skcipher_request_set_callback(req, 0, crypto_req_done, &wait);
skcipher_request_set_crypt(req, &src, &dst, vec->len, iv);
vec->crypt_error = crypto_wait_req(crypto_skcipher_encrypt(req), &wait);
+ if (vec->crypt_error != 0) {
+ /*
+ * The only acceptable error here is for an invalid length, so
+ * skcipher decryption should fail with the same error too.
+ * We'll test for this. But to keep the API usage well-defined,
+ * explicitly initialize the ciphertext buffer too.
+ */
+ memset((u8 *)vec->ctext, 0, vec->len);
+ }
done:
snprintf(name, max_namelen, "\"random: len=%u klen=%u\"",
vec->len, vec->klen);
@@ -2693,6 +2961,7 @@ static int test_skcipher_vs_generic_impl(const char *driver,
struct cipher_test_sglists *tsgls)
{
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
+ const unsigned int maxkeysize = crypto_skcipher_max_keysize(tfm);
const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
const unsigned int blocksize = crypto_skcipher_blocksize(tfm);
const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
@@ -2751,9 +3020,19 @@ static int test_skcipher_vs_generic_impl(const char *driver,
/* Check the algorithm properties for consistency. */
- if (tfm->keysize != generic_tfm->keysize) {
+ if (crypto_skcipher_min_keysize(tfm) !=
+ crypto_skcipher_min_keysize(generic_tfm)) {
+ pr_err("alg: skcipher: min keysize for %s (%u) doesn't match generic impl (%u)\n",
+ driver, crypto_skcipher_min_keysize(tfm),
+ crypto_skcipher_min_keysize(generic_tfm));
+ err = -EINVAL;
+ goto out;
+ }
+
+ if (maxkeysize != crypto_skcipher_max_keysize(generic_tfm)) {
pr_err("alg: skcipher: max keysize for %s (%u) doesn't match generic impl (%u)\n",
- driver, tfm->keysize, generic_tfm->keysize);
+ driver, maxkeysize,
+ crypto_skcipher_max_keysize(generic_tfm));
err = -EINVAL;
goto out;
}
@@ -2778,7 +3057,7 @@ static int test_skcipher_vs_generic_impl(const char *driver,
* the other implementation against them.
*/
- vec.key = kmalloc(tfm->keysize, GFP_KERNEL);
+ vec.key = kmalloc(maxkeysize, GFP_KERNEL);
vec.iv = kmalloc(ivsize, GFP_KERNEL);
vec.ptext = kmalloc(maxdatasize, GFP_KERNEL);
vec.ctext = kmalloc(maxdatasize, GFP_KERNEL);
@@ -3862,7 +4141,8 @@ static int alg_test_null(const struct alg_test_desc *desc,
return 0;
}
-#define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) }
+#define ____VECS(tv) .vecs = tv, .count = ARRAY_SIZE(tv)
+#define __VECS(tv) { ____VECS(tv) }
/* Please keep this list sorted by algorithm name. */
static const struct alg_test_desc alg_test_descs[] = {
@@ -3887,18 +4167,6 @@ static const struct alg_test_desc alg_test_descs[] = {
.aead = __VECS(aegis128_tv_template)
}
}, {
- .alg = "aegis128l",
- .test = alg_test_aead,
- .suite = {
- .aead = __VECS(aegis128l_tv_template)
- }
- }, {
- .alg = "aegis256",
- .test = alg_test_aead,
- .suite = {
- .aead = __VECS(aegis256_tv_template)
- }
- }, {
.alg = "ansi_cprng",
.test = alg_test_cprng,
.suite = {
@@ -4035,6 +4303,58 @@ static const struct alg_test_desc alg_test_descs[] = {
.test = alg_test_null,
.fips_allowed = 1,
}, {
+ .alg = "blake2b-160",
+ .test = alg_test_hash,
+ .fips_allowed = 0,
+ .suite = {
+ .hash = __VECS(blake2b_160_tv_template)
+ }
+ }, {
+ .alg = "blake2b-256",
+ .test = alg_test_hash,
+ .fips_allowed = 0,
+ .suite = {
+ .hash = __VECS(blake2b_256_tv_template)
+ }
+ }, {
+ .alg = "blake2b-384",
+ .test = alg_test_hash,
+ .fips_allowed = 0,
+ .suite = {
+ .hash = __VECS(blake2b_384_tv_template)
+ }
+ }, {
+ .alg = "blake2b-512",
+ .test = alg_test_hash,
+ .fips_allowed = 0,
+ .suite = {
+ .hash = __VECS(blake2b_512_tv_template)
+ }
+ }, {
+ .alg = "blake2s-128",
+ .test = alg_test_hash,
+ .suite = {
+ .hash = __VECS(blakes2s_128_tv_template)
+ }
+ }, {
+ .alg = "blake2s-160",
+ .test = alg_test_hash,
+ .suite = {
+ .hash = __VECS(blakes2s_160_tv_template)
+ }
+ }, {
+ .alg = "blake2s-224",
+ .test = alg_test_hash,
+ .suite = {
+ .hash = __VECS(blakes2s_224_tv_template)
+ }
+ }, {
+ .alg = "blake2s-256",
+ .test = alg_test_hash,
+ .suite = {
+ .hash = __VECS(blakes2s_256_tv_template)
+ }
+ }, {
.alg = "cbc(aes)",
.test = alg_test_skcipher,
.fips_allowed = 1,
@@ -4116,6 +4436,15 @@ static const struct alg_test_desc alg_test_descs[] = {
.cipher = __VECS(tf_cbc_tv_template)
},
}, {
+#if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
+ .alg = "cbc-paes-s390",
+ .fips_allowed = 1,
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = __VECS(aes_cbc_tv_template)
+ }
+ }, {
+#endif
.alg = "cbcmac(aes)",
.fips_allowed = 1,
.test = alg_test_hash,
@@ -4128,7 +4457,10 @@ static const struct alg_test_desc alg_test_descs[] = {
.test = alg_test_aead,
.fips_allowed = 1,
.suite = {
- .aead = __VECS(aes_ccm_tv_template)
+ .aead = {
+ ____VECS(aes_ccm_tv_template),
+ .einval_allowed = 1,
+ }
}
}, {
.alg = "cfb(aes)",
@@ -4138,6 +4470,12 @@ static const struct alg_test_desc alg_test_descs[] = {
.cipher = __VECS(aes_cfb_tv_template)
},
}, {
+ .alg = "cfb(sm4)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = __VECS(sm4_cfb_tv_template)
+ }
+ }, {
.alg = "chacha20",
.test = alg_test_skcipher,
.suite = {
@@ -4258,6 +4596,15 @@ static const struct alg_test_desc alg_test_descs[] = {
.cipher = __VECS(tf_ctr_tv_template)
}
}, {
+#if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
+ .alg = "ctr-paes-s390",
+ .fips_allowed = 1,
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = __VECS(aes_ctr_tv_template)
+ }
+ }, {
+#endif
.alg = "cts(cbc(aes))",
.test = alg_test_skcipher,
.fips_allowed = 1,
@@ -4272,6 +4619,12 @@ static const struct alg_test_desc alg_test_descs[] = {
.test = alg_test_null,
.fips_allowed = 1,
}, {
+ .alg = "curve25519",
+ .test = alg_test_kpp,
+ .suite = {
+ .kpp = __VECS(curve25519_tv_template)
+ }
+ }, {
.alg = "deflate",
.test = alg_test_comp,
.fips_allowed = 1,
@@ -4544,6 +4897,15 @@ static const struct alg_test_desc alg_test_descs[] = {
.cipher = __VECS(xtea_tv_template)
}
}, {
+#if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
+ .alg = "ecb-paes-s390",
+ .fips_allowed = 1,
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = __VECS(aes_tv_template)
+ }
+ }, {
+#endif
.alg = "ecdh",
.test = alg_test_kpp,
.fips_allowed = 1,
@@ -4557,6 +4919,20 @@ static const struct alg_test_desc alg_test_descs[] = {
.akcipher = __VECS(ecrdsa_tv_template)
}
}, {
+ .alg = "essiv(authenc(hmac(sha256),cbc(aes)),sha256)",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = __VECS(essiv_hmac_sha256_aes_cbc_tv_temp)
+ }
+ }, {
+ .alg = "essiv(cbc(aes),sha256)",
+ .test = alg_test_skcipher,
+ .fips_allowed = 1,
+ .suite = {
+ .cipher = __VECS(essiv_aes_cbc_tv_template)
+ }
+ }, {
.alg = "gcm(aes)",
.generic_driver = "gcm_base(ctr(aes-generic),ghash-generic)",
.test = alg_test_aead,
@@ -4653,6 +5029,12 @@ static const struct alg_test_desc alg_test_descs[] = {
.hash = __VECS(hmac_sha512_tv_template)
}
}, {
+ .alg = "hmac(sm3)",
+ .test = alg_test_hash,
+ .suite = {
+ .hash = __VECS(hmac_sm3_tv_template)
+ }
+ }, {
.alg = "hmac(streebog256)",
.test = alg_test_hash,
.suite = {
@@ -4741,6 +5123,16 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "lzo-rle",
+ .test = alg_test_comp,
+ .fips_allowed = 1,
+ .suite = {
+ .comp = {
+ .comp = __VECS(lzorle_comp_tv_template),
+ .decomp = __VECS(lzorle_decomp_tv_template)
+ }
+ }
+ }, {
.alg = "md4",
.test = alg_test_hash,
.suite = {
@@ -4759,18 +5151,6 @@ static const struct alg_test_desc alg_test_descs[] = {
.hash = __VECS(michael_mic_tv_template)
}
}, {
- .alg = "morus1280",
- .test = alg_test_aead,
- .suite = {
- .aead = __VECS(morus1280_tv_template)
- }
- }, {
- .alg = "morus640",
- .test = alg_test_aead,
- .suite = {
- .aead = __VECS(morus640_tv_template)
- }
- }, {
.alg = "nhpoly1305",
.test = alg_test_hash,
.suite = {
@@ -4791,6 +5171,12 @@ static const struct alg_test_desc alg_test_descs[] = {
.test = alg_test_null,
.fips_allowed = 1,
}, {
+ .alg = "ofb(sm4)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = __VECS(sm4_ofb_tv_template)
+ }
+ }, {
.alg = "pcbc(fcrypt)",
.test = alg_test_skcipher,
.suite = {
@@ -4829,12 +5215,22 @@ static const struct alg_test_desc alg_test_descs[] = {
.cipher = __VECS(aes_ctr_rfc3686_tv_template)
}
}, {
+ .alg = "rfc3686(ctr(sm4))",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = __VECS(sm4_ctr_rfc3686_tv_template)
+ }
+ }, {
.alg = "rfc4106(gcm(aes))",
.generic_driver = "rfc4106(gcm_base(ctr(aes-generic),ghash-generic))",
.test = alg_test_aead,
.fips_allowed = 1,
.suite = {
- .aead = __VECS(aes_gcm_rfc4106_tv_template)
+ .aead = {
+ ____VECS(aes_gcm_rfc4106_tv_template),
+ .einval_allowed = 1,
+ .esp_aad = 1,
+ }
}
}, {
.alg = "rfc4309(ccm(aes))",
@@ -4842,14 +5238,21 @@ static const struct alg_test_desc alg_test_descs[] = {
.test = alg_test_aead,
.fips_allowed = 1,
.suite = {
- .aead = __VECS(aes_ccm_rfc4309_tv_template)
+ .aead = {
+ ____VECS(aes_ccm_rfc4309_tv_template),
+ .einval_allowed = 1,
+ .esp_aad = 1,
+ }
}
}, {
.alg = "rfc4543(gcm(aes))",
.generic_driver = "rfc4543(gcm_base(ctr(aes-generic),ghash-generic))",
.test = alg_test_aead,
.suite = {
- .aead = __VECS(aes_gcm_rfc4543_tv_template)
+ .aead = {
+ ____VECS(aes_gcm_rfc4543_tv_template),
+ .einval_allowed = 1,
+ }
}
}, {
.alg = "rfc7539(chacha20,poly1305)",
@@ -4861,7 +5264,11 @@ static const struct alg_test_desc alg_test_descs[] = {
.alg = "rfc7539esp(chacha20,poly1305)",
.test = alg_test_aead,
.suite = {
- .aead = __VECS(rfc7539esp_tv_template)
+ .aead = {
+ ____VECS(rfc7539esp_tv_template),
+ .einval_allowed = 1,
+ .esp_aad = 1,
+ }
}
}, {
.alg = "rmd128",
@@ -5085,6 +5492,15 @@ static const struct alg_test_desc alg_test_descs[] = {
.cipher = __VECS(tf_xts_tv_template)
}
}, {
+#if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
+ .alg = "xts-paes-s390",
+ .fips_allowed = 1,
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = __VECS(aes_xts_tv_template)
+ }
+ }, {
+#endif
.alg = "xts4096(paes)",
.test = alg_test_null,
.fips_allowed = 1,
@@ -5240,9 +5656,11 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
type, mask);
test_done:
- if (rc && (fips_enabled || panic_on_fail))
+ if (rc && (fips_enabled || panic_on_fail)) {
+ fips_fail_notify();
panic("alg: self-tests for %s (%s) failed in %s mode!\n",
driver, alg, fips_enabled ? "fips" : "panic_on_fail");
+ }
if (fips_enabled && !rc)
pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
diff --git a/crypto/testmgr.h b/crypto/testmgr.h
index 073bd2efafca..d29983908c38 100644
--- a/crypto/testmgr.h
+++ b/crypto/testmgr.h
@@ -85,16 +85,22 @@ struct cipher_testvec {
* @ctext: Pointer to the full authenticated ciphertext. For AEADs that
* produce a separate "ciphertext" and "authentication tag", these
* two parts are concatenated: ciphertext || tag.
- * @novrfy: Decryption verification failure expected?
+ * @novrfy: If set, this is an inauthentic input test: only decryption is
+ * tested, and it is expected to fail with either -EBADMSG or
+ * @crypt_error if it is nonzero.
* @wk: Does the test need CRYPTO_TFM_REQ_FORBID_WEAK_KEYS?
* (e.g. setkey() needs to fail due to a weak key)
* @klen: Length of @key in bytes
* @plen: Length of @ptext in bytes
* @alen: Length of @assoc in bytes
* @clen: Length of @ctext in bytes
- * @setkey_error: Expected error from setkey()
- * @setauthsize_error: Expected error from setauthsize()
- * @crypt_error: Expected error from encrypt() and decrypt()
+ * @setkey_error: Expected error from setkey(). If set, neither encryption nor
+ * decryption is tested.
+ * @setauthsize_error: Expected error from setauthsize(). If set, neither
+ * encryption nor decryption is tested.
+ * @crypt_error: When @novrfy=0, the expected error from encrypt(). When
+ * @novrfy=1, an optional alternate error code that is acceptable
+ * for decrypt() to return besides -EBADMSG.
*/
struct aead_testvec {
const char *key;
@@ -1030,6 +1036,1231 @@ static const struct kpp_testvec dh_tv_template[] = {
}
};
+static const struct kpp_testvec curve25519_tv_template[] = {
+{
+ .secret = (u8[32]){ 0x77, 0x07, 0x6d, 0x0a, 0x73, 0x18, 0xa5, 0x7d,
+ 0x3c, 0x16, 0xc1, 0x72, 0x51, 0xb2, 0x66, 0x45,
+ 0xdf, 0x4c, 0x2f, 0x87, 0xeb, 0xc0, 0x99, 0x2a,
+ 0xb1, 0x77, 0xfb, 0xa5, 0x1d, 0xb9, 0x2c, 0x2a },
+ .b_public = (u8[32]){ 0xde, 0x9e, 0xdb, 0x7d, 0x7b, 0x7d, 0xc1, 0xb4,
+ 0xd3, 0x5b, 0x61, 0xc2, 0xec, 0xe4, 0x35, 0x37,
+ 0x3f, 0x83, 0x43, 0xc8, 0x5b, 0x78, 0x67, 0x4d,
+ 0xad, 0xfc, 0x7e, 0x14, 0x6f, 0x88, 0x2b, 0x4f },
+ .expected_ss = (u8[32]){ 0x4a, 0x5d, 0x9d, 0x5b, 0xa4, 0xce, 0x2d, 0xe1,
+ 0x72, 0x8e, 0x3b, 0xf4, 0x80, 0x35, 0x0f, 0x25,
+ 0xe0, 0x7e, 0x21, 0xc9, 0x47, 0xd1, 0x9e, 0x33,
+ 0x76, 0xf0, 0x9b, 0x3c, 0x1e, 0x16, 0x17, 0x42 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+{
+ .secret = (u8[32]){ 0x5d, 0xab, 0x08, 0x7e, 0x62, 0x4a, 0x8a, 0x4b,
+ 0x79, 0xe1, 0x7f, 0x8b, 0x83, 0x80, 0x0e, 0xe6,
+ 0x6f, 0x3b, 0xb1, 0x29, 0x26, 0x18, 0xb6, 0xfd,
+ 0x1c, 0x2f, 0x8b, 0x27, 0xff, 0x88, 0xe0, 0xeb },
+ .b_public = (u8[32]){ 0x85, 0x20, 0xf0, 0x09, 0x89, 0x30, 0xa7, 0x54,
+ 0x74, 0x8b, 0x7d, 0xdc, 0xb4, 0x3e, 0xf7, 0x5a,
+ 0x0d, 0xbf, 0x3a, 0x0d, 0x26, 0x38, 0x1a, 0xf4,
+ 0xeb, 0xa4, 0xa9, 0x8e, 0xaa, 0x9b, 0x4e, 0x6a },
+ .expected_ss = (u8[32]){ 0x4a, 0x5d, 0x9d, 0x5b, 0xa4, 0xce, 0x2d, 0xe1,
+ 0x72, 0x8e, 0x3b, 0xf4, 0x80, 0x35, 0x0f, 0x25,
+ 0xe0, 0x7e, 0x21, 0xc9, 0x47, 0xd1, 0x9e, 0x33,
+ 0x76, 0xf0, 0x9b, 0x3c, 0x1e, 0x16, 0x17, 0x42 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+{
+ .secret = (u8[32]){ 1 },
+ .b_public = (u8[32]){ 0x25, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+ .expected_ss = (u8[32]){ 0x3c, 0x77, 0x77, 0xca, 0xf9, 0x97, 0xb2, 0x64,
+ 0x41, 0x60, 0x77, 0x66, 0x5b, 0x4e, 0x22, 0x9d,
+ 0x0b, 0x95, 0x48, 0xdc, 0x0c, 0xd8, 0x19, 0x98,
+ 0xdd, 0xcd, 0xc5, 0xc8, 0x53, 0x3c, 0x79, 0x7f },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+{
+ .secret = (u8[32]){ 1 },
+ .b_public = (u8[32]){ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff },
+ .expected_ss = (u8[32]){ 0xb3, 0x2d, 0x13, 0x62, 0xc2, 0x48, 0xd6, 0x2f,
+ 0xe6, 0x26, 0x19, 0xcf, 0xf0, 0x4d, 0xd4, 0x3d,
+ 0xb7, 0x3f, 0xfc, 0x1b, 0x63, 0x08, 0xed, 0xe3,
+ 0x0b, 0x78, 0xd8, 0x73, 0x80, 0xf1, 0xe8, 0x34 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+{
+ .secret = (u8[32]){ 0xa5, 0x46, 0xe3, 0x6b, 0xf0, 0x52, 0x7c, 0x9d,
+ 0x3b, 0x16, 0x15, 0x4b, 0x82, 0x46, 0x5e, 0xdd,
+ 0x62, 0x14, 0x4c, 0x0a, 0xc1, 0xfc, 0x5a, 0x18,
+ 0x50, 0x6a, 0x22, 0x44, 0xba, 0x44, 0x9a, 0xc4 },
+ .b_public = (u8[32]){ 0xe6, 0xdb, 0x68, 0x67, 0x58, 0x30, 0x30, 0xdb,
+ 0x35, 0x94, 0xc1, 0xa4, 0x24, 0xb1, 0x5f, 0x7c,
+ 0x72, 0x66, 0x24, 0xec, 0x26, 0xb3, 0x35, 0x3b,
+ 0x10, 0xa9, 0x03, 0xa6, 0xd0, 0xab, 0x1c, 0x4c },
+ .expected_ss = (u8[32]){ 0xc3, 0xda, 0x55, 0x37, 0x9d, 0xe9, 0xc6, 0x90,
+ 0x8e, 0x94, 0xea, 0x4d, 0xf2, 0x8d, 0x08, 0x4f,
+ 0x32, 0xec, 0xcf, 0x03, 0x49, 0x1c, 0x71, 0xf7,
+ 0x54, 0xb4, 0x07, 0x55, 0x77, 0xa2, 0x85, 0x52 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+{
+ .secret = (u8[32]){ 0xff, 0xff, 0xff, 0xff, 0x0a, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff },
+ .b_public = (u8[32]){ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0x0a, 0x00, 0xfb, 0x9f },
+ .expected_ss = (u8[32]){ 0x77, 0x52, 0xb6, 0x18, 0xc1, 0x2d, 0x48, 0xd2,
+ 0xc6, 0x93, 0x46, 0x83, 0x81, 0x7c, 0xc6, 0x57,
+ 0xf3, 0x31, 0x03, 0x19, 0x49, 0x48, 0x20, 0x05,
+ 0x42, 0x2b, 0x4e, 0xae, 0x8d, 0x1d, 0x43, 0x23 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+{
+ .secret = (u8[32]){ 0x8e, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+ .b_public = (u8[32]){ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8e, 0x06 },
+ .expected_ss = (u8[32]){ 0x5a, 0xdf, 0xaa, 0x25, 0x86, 0x8e, 0x32, 0x3d,
+ 0xae, 0x49, 0x62, 0xc1, 0x01, 0x5c, 0xb3, 0x12,
+ 0xe1, 0xc5, 0xc7, 0x9e, 0x95, 0x3f, 0x03, 0x99,
+ 0xb0, 0xba, 0x16, 0x22, 0xf3, 0xb6, 0xf7, 0x0c },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - normal case */
+{
+ .secret = (u8[32]){ 0x48, 0x52, 0x83, 0x4d, 0x9d, 0x6b, 0x77, 0xda,
+ 0xde, 0xab, 0xaa, 0xf2, 0xe1, 0x1d, 0xca, 0x66,
+ 0xd1, 0x9f, 0xe7, 0x49, 0x93, 0xa7, 0xbe, 0xc3,
+ 0x6c, 0x6e, 0x16, 0xa0, 0x98, 0x3f, 0xea, 0xba },
+ .b_public = (u8[32]){ 0x9c, 0x64, 0x7d, 0x9a, 0xe5, 0x89, 0xb9, 0xf5,
+ 0x8f, 0xdc, 0x3c, 0xa4, 0x94, 0x7e, 0xfb, 0xc9,
+ 0x15, 0xc4, 0xb2, 0xe0, 0x8e, 0x74, 0x4a, 0x0e,
+ 0xdf, 0x46, 0x9d, 0xac, 0x59, 0xc8, 0xf8, 0x5a },
+ .expected_ss = (u8[32]){ 0x87, 0xb7, 0xf2, 0x12, 0xb6, 0x27, 0xf7, 0xa5,
+ 0x4c, 0xa5, 0xe0, 0xbc, 0xda, 0xdd, 0xd5, 0x38,
+ 0x9d, 0x9d, 0xe6, 0x15, 0x6c, 0xdb, 0xcf, 0x8e,
+ 0xbe, 0x14, 0xff, 0xbc, 0xfb, 0x43, 0x65, 0x51 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key on twist */
+{
+ .secret = (u8[32]){ 0x58, 0x8c, 0x06, 0x1a, 0x50, 0x80, 0x4a, 0xc4,
+ 0x88, 0xad, 0x77, 0x4a, 0xc7, 0x16, 0xc3, 0xf5,
+ 0xba, 0x71, 0x4b, 0x27, 0x12, 0xe0, 0x48, 0x49,
+ 0x13, 0x79, 0xa5, 0x00, 0x21, 0x19, 0x98, 0xa8 },
+ .b_public = (u8[32]){ 0x63, 0xaa, 0x40, 0xc6, 0xe3, 0x83, 0x46, 0xc5,
+ 0xca, 0xf2, 0x3a, 0x6d, 0xf0, 0xa5, 0xe6, 0xc8,
+ 0x08, 0x89, 0xa0, 0x86, 0x47, 0xe5, 0x51, 0xb3,
+ 0x56, 0x34, 0x49, 0xbe, 0xfc, 0xfc, 0x97, 0x33 },
+ .expected_ss = (u8[32]){ 0xb1, 0xa7, 0x07, 0x51, 0x94, 0x95, 0xff, 0xff,
+ 0xb2, 0x98, 0xff, 0x94, 0x17, 0x16, 0xb0, 0x6d,
+ 0xfa, 0xb8, 0x7c, 0xf8, 0xd9, 0x11, 0x23, 0xfe,
+ 0x2b, 0xe9, 0xa2, 0x33, 0xdd, 0xa2, 0x22, 0x12 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key on twist */
+{
+ .secret = (u8[32]){ 0xb0, 0x5b, 0xfd, 0x32, 0xe5, 0x53, 0x25, 0xd9,
+ 0xfd, 0x64, 0x8c, 0xb3, 0x02, 0x84, 0x80, 0x39,
+ 0x00, 0x0b, 0x39, 0x0e, 0x44, 0xd5, 0x21, 0xe5,
+ 0x8a, 0xab, 0x3b, 0x29, 0xa6, 0x96, 0x0b, 0xa8 },
+ .b_public = (u8[32]){ 0x0f, 0x83, 0xc3, 0x6f, 0xde, 0xd9, 0xd3, 0x2f,
+ 0xad, 0xf4, 0xef, 0xa3, 0xae, 0x93, 0xa9, 0x0b,
+ 0xb5, 0xcf, 0xa6, 0x68, 0x93, 0xbc, 0x41, 0x2c,
+ 0x43, 0xfa, 0x72, 0x87, 0xdb, 0xb9, 0x97, 0x79 },
+ .expected_ss = (u8[32]){ 0x67, 0xdd, 0x4a, 0x6e, 0x16, 0x55, 0x33, 0x53,
+ 0x4c, 0x0e, 0x3f, 0x17, 0x2e, 0x4a, 0xb8, 0x57,
+ 0x6b, 0xca, 0x92, 0x3a, 0x5f, 0x07, 0xb2, 0xc0,
+ 0x69, 0xb4, 0xc3, 0x10, 0xff, 0x2e, 0x93, 0x5b },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key on twist */
+{
+ .secret = (u8[32]){ 0x70, 0xe3, 0x4b, 0xcb, 0xe1, 0xf4, 0x7f, 0xbc,
+ 0x0f, 0xdd, 0xfd, 0x7c, 0x1e, 0x1a, 0xa5, 0x3d,
+ 0x57, 0xbf, 0xe0, 0xf6, 0x6d, 0x24, 0x30, 0x67,
+ 0xb4, 0x24, 0xbb, 0x62, 0x10, 0xbe, 0xd1, 0x9c },
+ .b_public = (u8[32]){ 0x0b, 0x82, 0x11, 0xa2, 0xb6, 0x04, 0x90, 0x97,
+ 0xf6, 0x87, 0x1c, 0x6c, 0x05, 0x2d, 0x3c, 0x5f,
+ 0xc1, 0xba, 0x17, 0xda, 0x9e, 0x32, 0xae, 0x45,
+ 0x84, 0x03, 0xb0, 0x5b, 0xb2, 0x83, 0x09, 0x2a },
+ .expected_ss = (u8[32]){ 0x4a, 0x06, 0x38, 0xcf, 0xaa, 0x9e, 0xf1, 0x93,
+ 0x3b, 0x47, 0xf8, 0x93, 0x92, 0x96, 0xa6, 0xb2,
+ 0x5b, 0xe5, 0x41, 0xef, 0x7f, 0x70, 0xe8, 0x44,
+ 0xc0, 0xbc, 0xc0, 0x0b, 0x13, 0x4d, 0xe6, 0x4a },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key on twist */
+{
+ .secret = (u8[32]){ 0x68, 0xc1, 0xf3, 0xa6, 0x53, 0xa4, 0xcd, 0xb1,
+ 0xd3, 0x7b, 0xba, 0x94, 0x73, 0x8f, 0x8b, 0x95,
+ 0x7a, 0x57, 0xbe, 0xb2, 0x4d, 0x64, 0x6e, 0x99,
+ 0x4d, 0xc2, 0x9a, 0x27, 0x6a, 0xad, 0x45, 0x8d },
+ .b_public = (u8[32]){ 0x34, 0x3a, 0xc2, 0x0a, 0x3b, 0x9c, 0x6a, 0x27,
+ 0xb1, 0x00, 0x81, 0x76, 0x50, 0x9a, 0xd3, 0x07,
+ 0x35, 0x85, 0x6e, 0xc1, 0xc8, 0xd8, 0xfc, 0xae,
+ 0x13, 0x91, 0x2d, 0x08, 0xd1, 0x52, 0xf4, 0x6c },
+ .expected_ss = (u8[32]){ 0x39, 0x94, 0x91, 0xfc, 0xe8, 0xdf, 0xab, 0x73,
+ 0xb4, 0xf9, 0xf6, 0x11, 0xde, 0x8e, 0xa0, 0xb2,
+ 0x7b, 0x28, 0xf8, 0x59, 0x94, 0x25, 0x0b, 0x0f,
+ 0x47, 0x5d, 0x58, 0x5d, 0x04, 0x2a, 0xc2, 0x07 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key on twist */
+{
+ .secret = (u8[32]){ 0xd8, 0x77, 0xb2, 0x6d, 0x06, 0xdf, 0xf9, 0xd9,
+ 0xf7, 0xfd, 0x4c, 0x5b, 0x37, 0x69, 0xf8, 0xcd,
+ 0xd5, 0xb3, 0x05, 0x16, 0xa5, 0xab, 0x80, 0x6b,
+ 0xe3, 0x24, 0xff, 0x3e, 0xb6, 0x9e, 0xa0, 0xb2 },
+ .b_public = (u8[32]){ 0xfa, 0x69, 0x5f, 0xc7, 0xbe, 0x8d, 0x1b, 0xe5,
+ 0xbf, 0x70, 0x48, 0x98, 0xf3, 0x88, 0xc4, 0x52,
+ 0xba, 0xfd, 0xd3, 0xb8, 0xea, 0xe8, 0x05, 0xf8,
+ 0x68, 0x1a, 0x8d, 0x15, 0xc2, 0xd4, 0xe1, 0x42 },
+ .expected_ss = (u8[32]){ 0x2c, 0x4f, 0xe1, 0x1d, 0x49, 0x0a, 0x53, 0x86,
+ 0x17, 0x76, 0xb1, 0x3b, 0x43, 0x54, 0xab, 0xd4,
+ 0xcf, 0x5a, 0x97, 0x69, 0x9d, 0xb6, 0xe6, 0xc6,
+ 0x8c, 0x16, 0x26, 0xd0, 0x76, 0x62, 0xf7, 0x58 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case on twist */
+{
+ .secret = (u8[32]){ 0x38, 0xdd, 0xe9, 0xf3, 0xe7, 0xb7, 0x99, 0x04,
+ 0x5f, 0x9a, 0xc3, 0x79, 0x3d, 0x4a, 0x92, 0x77,
+ 0xda, 0xde, 0xad, 0xc4, 0x1b, 0xec, 0x02, 0x90,
+ 0xf8, 0x1f, 0x74, 0x4f, 0x73, 0x77, 0x5f, 0x84 },
+ .b_public = (u8[32]){ 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+ .expected_ss = (u8[32]){ 0x9a, 0x2c, 0xfe, 0x84, 0xff, 0x9c, 0x4a, 0x97,
+ 0x39, 0x62, 0x5c, 0xae, 0x4a, 0x3b, 0x82, 0xa9,
+ 0x06, 0x87, 0x7a, 0x44, 0x19, 0x46, 0xf8, 0xd7,
+ 0xb3, 0xd7, 0x95, 0xfe, 0x8f, 0x5d, 0x16, 0x39 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case on twist */
+{
+ .secret = (u8[32]){ 0x98, 0x57, 0xa9, 0x14, 0xe3, 0xc2, 0x90, 0x36,
+ 0xfd, 0x9a, 0x44, 0x2b, 0xa5, 0x26, 0xb5, 0xcd,
+ 0xcd, 0xf2, 0x82, 0x16, 0x15, 0x3e, 0x63, 0x6c,
+ 0x10, 0x67, 0x7a, 0xca, 0xb6, 0xbd, 0x6a, 0xa5 },
+ .b_public = (u8[32]){ 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+ .expected_ss = (u8[32]){ 0x4d, 0xa4, 0xe0, 0xaa, 0x07, 0x2c, 0x23, 0x2e,
+ 0xe2, 0xf0, 0xfa, 0x4e, 0x51, 0x9a, 0xe5, 0x0b,
+ 0x52, 0xc1, 0xed, 0xd0, 0x8a, 0x53, 0x4d, 0x4e,
+ 0xf3, 0x46, 0xc2, 0xe1, 0x06, 0xd2, 0x1d, 0x60 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case on twist */
+{
+ .secret = (u8[32]){ 0x48, 0xe2, 0x13, 0x0d, 0x72, 0x33, 0x05, 0xed,
+ 0x05, 0xe6, 0xe5, 0x89, 0x4d, 0x39, 0x8a, 0x5e,
+ 0x33, 0x36, 0x7a, 0x8c, 0x6a, 0xac, 0x8f, 0xcd,
+ 0xf0, 0xa8, 0x8e, 0x4b, 0x42, 0x82, 0x0d, 0xb7 },
+ .b_public = (u8[32]){ 0xff, 0xff, 0xff, 0x03, 0x00, 0x00, 0xf8, 0xff,
+ 0xff, 0x1f, 0x00, 0x00, 0xc0, 0xff, 0xff, 0xff,
+ 0x00, 0x00, 0x00, 0xfe, 0xff, 0xff, 0x07, 0x00,
+ 0x00, 0xf0, 0xff, 0xff, 0x3f, 0x00, 0x00, 0x00 },
+ .expected_ss = (u8[32]){ 0x9e, 0xd1, 0x0c, 0x53, 0x74, 0x7f, 0x64, 0x7f,
+ 0x82, 0xf4, 0x51, 0x25, 0xd3, 0xde, 0x15, 0xa1,
+ 0xe6, 0xb8, 0x24, 0x49, 0x6a, 0xb4, 0x04, 0x10,
+ 0xff, 0xcc, 0x3c, 0xfe, 0x95, 0x76, 0x0f, 0x3b },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case on twist */
+{
+ .secret = (u8[32]){ 0x28, 0xf4, 0x10, 0x11, 0x69, 0x18, 0x51, 0xb3,
+ 0xa6, 0x2b, 0x64, 0x15, 0x53, 0xb3, 0x0d, 0x0d,
+ 0xfd, 0xdc, 0xb8, 0xff, 0xfc, 0xf5, 0x37, 0x00,
+ 0xa7, 0xbe, 0x2f, 0x6a, 0x87, 0x2e, 0x9f, 0xb0 },
+ .b_public = (u8[32]){ 0x00, 0x00, 0x00, 0xfc, 0xff, 0xff, 0x07, 0x00,
+ 0x00, 0xe0, 0xff, 0xff, 0x3f, 0x00, 0x00, 0x00,
+ 0xff, 0xff, 0xff, 0x01, 0x00, 0x00, 0xf8, 0xff,
+ 0xff, 0x0f, 0x00, 0x00, 0xc0, 0xff, 0xff, 0x7f },
+ .expected_ss = (u8[32]){ 0xcf, 0x72, 0xb4, 0xaa, 0x6a, 0xa1, 0xc9, 0xf8,
+ 0x94, 0xf4, 0x16, 0x5b, 0x86, 0x10, 0x9a, 0xa4,
+ 0x68, 0x51, 0x76, 0x48, 0xe1, 0xf0, 0xcc, 0x70,
+ 0xe1, 0xab, 0x08, 0x46, 0x01, 0x76, 0x50, 0x6b },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case on twist */
+{
+ .secret = (u8[32]){ 0x18, 0xa9, 0x3b, 0x64, 0x99, 0xb9, 0xf6, 0xb3,
+ 0x22, 0x5c, 0xa0, 0x2f, 0xef, 0x41, 0x0e, 0x0a,
+ 0xde, 0xc2, 0x35, 0x32, 0x32, 0x1d, 0x2d, 0x8e,
+ 0xf1, 0xa6, 0xd6, 0x02, 0xa8, 0xc6, 0x5b, 0x83 },
+ .b_public = (u8[32]){ 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff,
+ 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff,
+ 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff,
+ 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x7f },
+ .expected_ss = (u8[32]){ 0x5d, 0x50, 0xb6, 0x28, 0x36, 0xbb, 0x69, 0x57,
+ 0x94, 0x10, 0x38, 0x6c, 0xf7, 0xbb, 0x81, 0x1c,
+ 0x14, 0xbf, 0x85, 0xb1, 0xc7, 0xb1, 0x7e, 0x59,
+ 0x24, 0xc7, 0xff, 0xea, 0x91, 0xef, 0x9e, 0x12 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case on twist */
+{
+ .secret = (u8[32]){ 0xc0, 0x1d, 0x13, 0x05, 0xa1, 0x33, 0x8a, 0x1f,
+ 0xca, 0xc2, 0xba, 0x7e, 0x2e, 0x03, 0x2b, 0x42,
+ 0x7e, 0x0b, 0x04, 0x90, 0x31, 0x65, 0xac, 0xa9,
+ 0x57, 0xd8, 0xd0, 0x55, 0x3d, 0x87, 0x17, 0xb0 },
+ .b_public = (u8[32]){ 0xea, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f },
+ .expected_ss = (u8[32]){ 0x19, 0x23, 0x0e, 0xb1, 0x48, 0xd5, 0xd6, 0x7c,
+ 0x3c, 0x22, 0xab, 0x1d, 0xae, 0xff, 0x80, 0xa5,
+ 0x7e, 0xae, 0x42, 0x65, 0xce, 0x28, 0x72, 0x65,
+ 0x7b, 0x2c, 0x80, 0x99, 0xfc, 0x69, 0x8e, 0x50 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for public key */
+{
+ .secret = (u8[32]){ 0x38, 0x6f, 0x7f, 0x16, 0xc5, 0x07, 0x31, 0xd6,
+ 0x4f, 0x82, 0xe6, 0xa1, 0x70, 0xb1, 0x42, 0xa4,
+ 0xe3, 0x4f, 0x31, 0xfd, 0x77, 0x68, 0xfc, 0xb8,
+ 0x90, 0x29, 0x25, 0xe7, 0xd1, 0xe2, 0x1a, 0xbe },
+ .b_public = (u8[32]){ 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+ .expected_ss = (u8[32]){ 0x0f, 0xca, 0xb5, 0xd8, 0x42, 0xa0, 0x78, 0xd7,
+ 0xa7, 0x1f, 0xc5, 0x9b, 0x57, 0xbf, 0xb4, 0xca,
+ 0x0b, 0xe6, 0x87, 0x3b, 0x49, 0xdc, 0xdb, 0x9f,
+ 0x44, 0xe1, 0x4a, 0xe8, 0xfb, 0xdf, 0xa5, 0x42 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for public key */
+{
+ .secret = (u8[32]){ 0xe0, 0x23, 0xa2, 0x89, 0xbd, 0x5e, 0x90, 0xfa,
+ 0x28, 0x04, 0xdd, 0xc0, 0x19, 0xa0, 0x5e, 0xf3,
+ 0xe7, 0x9d, 0x43, 0x4b, 0xb6, 0xea, 0x2f, 0x52,
+ 0x2e, 0xcb, 0x64, 0x3a, 0x75, 0x29, 0x6e, 0x95 },
+ .b_public = (u8[32]){ 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00,
+ 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00,
+ 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00,
+ 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00 },
+ .expected_ss = (u8[32]){ 0x54, 0xce, 0x8f, 0x22, 0x75, 0xc0, 0x77, 0xe3,
+ 0xb1, 0x30, 0x6a, 0x39, 0x39, 0xc5, 0xe0, 0x3e,
+ 0xef, 0x6b, 0xbb, 0x88, 0x06, 0x05, 0x44, 0x75,
+ 0x8d, 0x9f, 0xef, 0x59, 0xb0, 0xbc, 0x3e, 0x4f },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for public key */
+{
+ .secret = (u8[32]){ 0x68, 0xf0, 0x10, 0xd6, 0x2e, 0xe8, 0xd9, 0x26,
+ 0x05, 0x3a, 0x36, 0x1c, 0x3a, 0x75, 0xc6, 0xea,
+ 0x4e, 0xbd, 0xc8, 0x60, 0x6a, 0xb2, 0x85, 0x00,
+ 0x3a, 0x6f, 0x8f, 0x40, 0x76, 0xb0, 0x1e, 0x83 },
+ .b_public = (u8[32]){ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03 },
+ .expected_ss = (u8[32]){ 0xf1, 0x36, 0x77, 0x5c, 0x5b, 0xeb, 0x0a, 0xf8,
+ 0x11, 0x0a, 0xf1, 0x0b, 0x20, 0x37, 0x23, 0x32,
+ 0x04, 0x3c, 0xab, 0x75, 0x24, 0x19, 0x67, 0x87,
+ 0x75, 0xa2, 0x23, 0xdf, 0x57, 0xc9, 0xd3, 0x0d },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for public key */
+{
+ .secret = (u8[32]){ 0x58, 0xeb, 0xcb, 0x35, 0xb0, 0xf8, 0x84, 0x5c,
+ 0xaf, 0x1e, 0xc6, 0x30, 0xf9, 0x65, 0x76, 0xb6,
+ 0x2c, 0x4b, 0x7b, 0x6c, 0x36, 0xb2, 0x9d, 0xeb,
+ 0x2c, 0xb0, 0x08, 0x46, 0x51, 0x75, 0x5c, 0x96 },
+ .b_public = (u8[32]){ 0xff, 0xff, 0xff, 0xfb, 0xff, 0xff, 0xfb, 0xff,
+ 0xff, 0xdf, 0xff, 0xff, 0xdf, 0xff, 0xff, 0xff,
+ 0xfe, 0xff, 0xff, 0xfe, 0xff, 0xff, 0xf7, 0xff,
+ 0xff, 0xf7, 0xff, 0xff, 0xbf, 0xff, 0xff, 0x3f },
+ .expected_ss = (u8[32]){ 0xbf, 0x9a, 0xff, 0xd0, 0x6b, 0x84, 0x40, 0x85,
+ 0x58, 0x64, 0x60, 0x96, 0x2e, 0xf2, 0x14, 0x6f,
+ 0xf3, 0xd4, 0x53, 0x3d, 0x94, 0x44, 0xaa, 0xb0,
+ 0x06, 0xeb, 0x88, 0xcc, 0x30, 0x54, 0x40, 0x7d },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for public key */
+{
+ .secret = (u8[32]){ 0x18, 0x8c, 0x4b, 0xc5, 0xb9, 0xc4, 0x4b, 0x38,
+ 0xbb, 0x65, 0x8b, 0x9b, 0x2a, 0xe8, 0x2d, 0x5b,
+ 0x01, 0x01, 0x5e, 0x09, 0x31, 0x84, 0xb1, 0x7c,
+ 0xb7, 0x86, 0x35, 0x03, 0xa7, 0x83, 0xe1, 0xbb },
+ .b_public = (u8[32]){ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f },
+ .expected_ss = (u8[32]){ 0xd4, 0x80, 0xde, 0x04, 0xf6, 0x99, 0xcb, 0x3b,
+ 0xe0, 0x68, 0x4a, 0x9c, 0xc2, 0xe3, 0x12, 0x81,
+ 0xea, 0x0b, 0xc5, 0xa9, 0xdc, 0xc1, 0x57, 0xd3,
+ 0xd2, 0x01, 0x58, 0xd4, 0x6c, 0xa5, 0x24, 0x6d },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for public key */
+{
+ .secret = (u8[32]){ 0xe0, 0x6c, 0x11, 0xbb, 0x2e, 0x13, 0xce, 0x3d,
+ 0xc7, 0x67, 0x3f, 0x67, 0xf5, 0x48, 0x22, 0x42,
+ 0x90, 0x94, 0x23, 0xa9, 0xae, 0x95, 0xee, 0x98,
+ 0x6a, 0x98, 0x8d, 0x98, 0xfa, 0xee, 0x23, 0xa2 },
+ .b_public = (u8[32]){ 0xff, 0xff, 0xff, 0xff, 0xfe, 0xff, 0xff, 0x7f,
+ 0xff, 0xff, 0xff, 0xff, 0xfe, 0xff, 0xff, 0x7f,
+ 0xff, 0xff, 0xff, 0xff, 0xfe, 0xff, 0xff, 0x7f,
+ 0xff, 0xff, 0xff, 0xff, 0xfe, 0xff, 0xff, 0x7f },
+ .expected_ss = (u8[32]){ 0x4c, 0x44, 0x01, 0xcc, 0xe6, 0xb5, 0x1e, 0x4c,
+ 0xb1, 0x8f, 0x27, 0x90, 0x24, 0x6c, 0x9b, 0xf9,
+ 0x14, 0xdb, 0x66, 0x77, 0x50, 0xa1, 0xcb, 0x89,
+ 0x06, 0x90, 0x92, 0xaf, 0x07, 0x29, 0x22, 0x76 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for public key */
+{
+ .secret = (u8[32]){ 0xc0, 0x65, 0x8c, 0x46, 0xdd, 0xe1, 0x81, 0x29,
+ 0x29, 0x38, 0x77, 0x53, 0x5b, 0x11, 0x62, 0xb6,
+ 0xf9, 0xf5, 0x41, 0x4a, 0x23, 0xcf, 0x4d, 0x2c,
+ 0xbc, 0x14, 0x0a, 0x4d, 0x99, 0xda, 0x2b, 0x8f },
+ .b_public = (u8[32]){ 0xeb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f },
+ .expected_ss = (u8[32]){ 0x57, 0x8b, 0xa8, 0xcc, 0x2d, 0xbd, 0xc5, 0x75,
+ 0xaf, 0xcf, 0x9d, 0xf2, 0xb3, 0xee, 0x61, 0x89,
+ 0xf5, 0x33, 0x7d, 0x68, 0x54, 0xc7, 0x9b, 0x4c,
+ 0xe1, 0x65, 0xea, 0x12, 0x29, 0x3b, 0x3a, 0x0f },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0xf0, 0x1e, 0x48, 0xda, 0xfa, 0xc9, 0xd7, 0xbc,
+ 0xf5, 0x89, 0xcb, 0xc3, 0x82, 0xc8, 0x78, 0xd1,
+ 0x8b, 0xda, 0x35, 0x50, 0x58, 0x9f, 0xfb, 0x5d,
+ 0x50, 0xb5, 0x23, 0xbe, 0xbe, 0x32, 0x9d, 0xae },
+ .b_public = (u8[32]){ 0xef, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f },
+ .expected_ss = (u8[32]){ 0xbd, 0x36, 0xa0, 0x79, 0x0e, 0xb8, 0x83, 0x09,
+ 0x8c, 0x98, 0x8b, 0x21, 0x78, 0x67, 0x73, 0xde,
+ 0x0b, 0x3a, 0x4d, 0xf1, 0x62, 0x28, 0x2c, 0xf1,
+ 0x10, 0xde, 0x18, 0xdd, 0x48, 0x4c, 0xe7, 0x4b },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0x28, 0x87, 0x96, 0xbc, 0x5a, 0xff, 0x4b, 0x81,
+ 0xa3, 0x75, 0x01, 0x75, 0x7b, 0xc0, 0x75, 0x3a,
+ 0x3c, 0x21, 0x96, 0x47, 0x90, 0xd3, 0x86, 0x99,
+ 0x30, 0x8d, 0xeb, 0xc1, 0x7a, 0x6e, 0xaf, 0x8d },
+ .b_public = (u8[32]){ 0xf0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f },
+ .expected_ss = (u8[32]){ 0xb4, 0xe0, 0xdd, 0x76, 0xda, 0x7b, 0x07, 0x17,
+ 0x28, 0xb6, 0x1f, 0x85, 0x67, 0x71, 0xaa, 0x35,
+ 0x6e, 0x57, 0xed, 0xa7, 0x8a, 0x5b, 0x16, 0x55,
+ 0xcc, 0x38, 0x20, 0xfb, 0x5f, 0x85, 0x4c, 0x5c },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0x98, 0xdf, 0x84, 0x5f, 0x66, 0x51, 0xbf, 0x11,
+ 0x38, 0x22, 0x1f, 0x11, 0x90, 0x41, 0xf7, 0x2b,
+ 0x6d, 0xbc, 0x3c, 0x4a, 0xce, 0x71, 0x43, 0xd9,
+ 0x9f, 0xd5, 0x5a, 0xd8, 0x67, 0x48, 0x0d, 0xa8 },
+ .b_public = (u8[32]){ 0xf1, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f },
+ .expected_ss = (u8[32]){ 0x6f, 0xdf, 0x6c, 0x37, 0x61, 0x1d, 0xbd, 0x53,
+ 0x04, 0xdc, 0x0f, 0x2e, 0xb7, 0xc9, 0x51, 0x7e,
+ 0xb3, 0xc5, 0x0e, 0x12, 0xfd, 0x05, 0x0a, 0xc6,
+ 0xde, 0xc2, 0x70, 0x71, 0xd4, 0xbf, 0xc0, 0x34 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0xf0, 0x94, 0x98, 0xe4, 0x6f, 0x02, 0xf8, 0x78,
+ 0x82, 0x9e, 0x78, 0xb8, 0x03, 0xd3, 0x16, 0xa2,
+ 0xed, 0x69, 0x5d, 0x04, 0x98, 0xa0, 0x8a, 0xbd,
+ 0xf8, 0x27, 0x69, 0x30, 0xe2, 0x4e, 0xdc, 0xb0 },
+ .b_public = (u8[32]){ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f },
+ .expected_ss = (u8[32]){ 0x4c, 0x8f, 0xc4, 0xb1, 0xc6, 0xab, 0x88, 0xfb,
+ 0x21, 0xf1, 0x8f, 0x6d, 0x4c, 0x81, 0x02, 0x40,
+ 0xd4, 0xe9, 0x46, 0x51, 0xba, 0x44, 0xf7, 0xa2,
+ 0xc8, 0x63, 0xce, 0xc7, 0xdc, 0x56, 0x60, 0x2d },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0x18, 0x13, 0xc1, 0x0a, 0x5c, 0x7f, 0x21, 0xf9,
+ 0x6e, 0x17, 0xf2, 0x88, 0xc0, 0xcc, 0x37, 0x60,
+ 0x7c, 0x04, 0xc5, 0xf5, 0xae, 0xa2, 0xdb, 0x13,
+ 0x4f, 0x9e, 0x2f, 0xfc, 0x66, 0xbd, 0x9d, 0xb8 },
+ .b_public = (u8[32]){ 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80 },
+ .expected_ss = (u8[32]){ 0x1c, 0xd0, 0xb2, 0x82, 0x67, 0xdc, 0x54, 0x1c,
+ 0x64, 0x2d, 0x6d, 0x7d, 0xca, 0x44, 0xa8, 0xb3,
+ 0x8a, 0x63, 0x73, 0x6e, 0xef, 0x5c, 0x4e, 0x65,
+ 0x01, 0xff, 0xbb, 0xb1, 0x78, 0x0c, 0x03, 0x3c },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0x78, 0x57, 0xfb, 0x80, 0x86, 0x53, 0x64, 0x5a,
+ 0x0b, 0xeb, 0x13, 0x8a, 0x64, 0xf5, 0xf4, 0xd7,
+ 0x33, 0xa4, 0x5e, 0xa8, 0x4c, 0x3c, 0xda, 0x11,
+ 0xa9, 0xc0, 0x6f, 0x7e, 0x71, 0x39, 0x14, 0x9e },
+ .b_public = (u8[32]){ 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80 },
+ .expected_ss = (u8[32]){ 0x87, 0x55, 0xbe, 0x01, 0xc6, 0x0a, 0x7e, 0x82,
+ 0x5c, 0xff, 0x3e, 0x0e, 0x78, 0xcb, 0x3a, 0xa4,
+ 0x33, 0x38, 0x61, 0x51, 0x6a, 0xa5, 0x9b, 0x1c,
+ 0x51, 0xa8, 0xb2, 0xa5, 0x43, 0xdf, 0xa8, 0x22 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0xe0, 0x3a, 0xa8, 0x42, 0xe2, 0xab, 0xc5, 0x6e,
+ 0x81, 0xe8, 0x7b, 0x8b, 0x9f, 0x41, 0x7b, 0x2a,
+ 0x1e, 0x59, 0x13, 0xc7, 0x23, 0xee, 0xd2, 0x8d,
+ 0x75, 0x2f, 0x8d, 0x47, 0xa5, 0x9f, 0x49, 0x8f },
+ .b_public = (u8[32]){ 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80 },
+ .expected_ss = (u8[32]){ 0x54, 0xc9, 0xa1, 0xed, 0x95, 0xe5, 0x46, 0xd2,
+ 0x78, 0x22, 0xa3, 0x60, 0x93, 0x1d, 0xda, 0x60,
+ 0xa1, 0xdf, 0x04, 0x9d, 0xa6, 0xf9, 0x04, 0x25,
+ 0x3c, 0x06, 0x12, 0xbb, 0xdc, 0x08, 0x74, 0x76 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0xf8, 0xf7, 0x07, 0xb7, 0x99, 0x9b, 0x18, 0xcb,
+ 0x0d, 0x6b, 0x96, 0x12, 0x4f, 0x20, 0x45, 0x97,
+ 0x2c, 0xa2, 0x74, 0xbf, 0xc1, 0x54, 0xad, 0x0c,
+ 0x87, 0x03, 0x8c, 0x24, 0xc6, 0xd0, 0xd4, 0xb2 },
+ .b_public = (u8[32]){ 0xda, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff },
+ .expected_ss = (u8[32]){ 0xcc, 0x1f, 0x40, 0xd7, 0x43, 0xcd, 0xc2, 0x23,
+ 0x0e, 0x10, 0x43, 0xda, 0xba, 0x8b, 0x75, 0xe8,
+ 0x10, 0xf1, 0xfb, 0xab, 0x7f, 0x25, 0x52, 0x69,
+ 0xbd, 0x9e, 0xbb, 0x29, 0xe6, 0xbf, 0x49, 0x4f },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0xa0, 0x34, 0xf6, 0x84, 0xfa, 0x63, 0x1e, 0x1a,
+ 0x34, 0x81, 0x18, 0xc1, 0xce, 0x4c, 0x98, 0x23,
+ 0x1f, 0x2d, 0x9e, 0xec, 0x9b, 0xa5, 0x36, 0x5b,
+ 0x4a, 0x05, 0xd6, 0x9a, 0x78, 0x5b, 0x07, 0x96 },
+ .b_public = (u8[32]){ 0xdb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff },
+ .expected_ss = (u8[32]){ 0x54, 0x99, 0x8e, 0xe4, 0x3a, 0x5b, 0x00, 0x7b,
+ 0xf4, 0x99, 0xf0, 0x78, 0xe7, 0x36, 0x52, 0x44,
+ 0x00, 0xa8, 0xb5, 0xc7, 0xe9, 0xb9, 0xb4, 0x37,
+ 0x71, 0x74, 0x8c, 0x7c, 0xdf, 0x88, 0x04, 0x12 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0x30, 0xb6, 0xc6, 0xa0, 0xf2, 0xff, 0xa6, 0x80,
+ 0x76, 0x8f, 0x99, 0x2b, 0xa8, 0x9e, 0x15, 0x2d,
+ 0x5b, 0xc9, 0x89, 0x3d, 0x38, 0xc9, 0x11, 0x9b,
+ 0xe4, 0xf7, 0x67, 0xbf, 0xab, 0x6e, 0x0c, 0xa5 },
+ .b_public = (u8[32]){ 0xdc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff },
+ .expected_ss = (u8[32]){ 0xea, 0xd9, 0xb3, 0x8e, 0xfd, 0xd7, 0x23, 0x63,
+ 0x79, 0x34, 0xe5, 0x5a, 0xb7, 0x17, 0xa7, 0xae,
+ 0x09, 0xeb, 0x86, 0xa2, 0x1d, 0xc3, 0x6a, 0x3f,
+ 0xee, 0xb8, 0x8b, 0x75, 0x9e, 0x39, 0x1e, 0x09 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0x90, 0x1b, 0x9d, 0xcf, 0x88, 0x1e, 0x01, 0xe0,
+ 0x27, 0x57, 0x50, 0x35, 0xd4, 0x0b, 0x43, 0xbd,
+ 0xc1, 0xc5, 0x24, 0x2e, 0x03, 0x08, 0x47, 0x49,
+ 0x5b, 0x0c, 0x72, 0x86, 0x46, 0x9b, 0x65, 0x91 },
+ .b_public = (u8[32]){ 0xea, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff },
+ .expected_ss = (u8[32]){ 0x60, 0x2f, 0xf4, 0x07, 0x89, 0xb5, 0x4b, 0x41,
+ 0x80, 0x59, 0x15, 0xfe, 0x2a, 0x62, 0x21, 0xf0,
+ 0x7a, 0x50, 0xff, 0xc2, 0xc3, 0xfc, 0x94, 0xcf,
+ 0x61, 0xf1, 0x3d, 0x79, 0x04, 0xe8, 0x8e, 0x0e },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0x80, 0x46, 0x67, 0x7c, 0x28, 0xfd, 0x82, 0xc9,
+ 0xa1, 0xbd, 0xb7, 0x1a, 0x1a, 0x1a, 0x34, 0xfa,
+ 0xba, 0x12, 0x25, 0xe2, 0x50, 0x7f, 0xe3, 0xf5,
+ 0x4d, 0x10, 0xbd, 0x5b, 0x0d, 0x86, 0x5f, 0x8e },
+ .b_public = (u8[32]){ 0xeb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff },
+ .expected_ss = (u8[32]){ 0xe0, 0x0a, 0xe8, 0xb1, 0x43, 0x47, 0x12, 0x47,
+ 0xba, 0x24, 0xf1, 0x2c, 0x88, 0x55, 0x36, 0xc3,
+ 0xcb, 0x98, 0x1b, 0x58, 0xe1, 0xe5, 0x6b, 0x2b,
+ 0xaf, 0x35, 0xc1, 0x2a, 0xe1, 0xf7, 0x9c, 0x26 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0x60, 0x2f, 0x7e, 0x2f, 0x68, 0xa8, 0x46, 0xb8,
+ 0x2c, 0xc2, 0x69, 0xb1, 0xd4, 0x8e, 0x93, 0x98,
+ 0x86, 0xae, 0x54, 0xfd, 0x63, 0x6c, 0x1f, 0xe0,
+ 0x74, 0xd7, 0x10, 0x12, 0x7d, 0x47, 0x24, 0x91 },
+ .b_public = (u8[32]){ 0xef, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff },
+ .expected_ss = (u8[32]){ 0x98, 0xcb, 0x9b, 0x50, 0xdd, 0x3f, 0xc2, 0xb0,
+ 0xd4, 0xf2, 0xd2, 0xbf, 0x7c, 0x5c, 0xfd, 0xd1,
+ 0x0c, 0x8f, 0xcd, 0x31, 0xfc, 0x40, 0xaf, 0x1a,
+ 0xd4, 0x4f, 0x47, 0xc1, 0x31, 0x37, 0x63, 0x62 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0x60, 0x88, 0x7b, 0x3d, 0xc7, 0x24, 0x43, 0x02,
+ 0x6e, 0xbe, 0xdb, 0xbb, 0xb7, 0x06, 0x65, 0xf4,
+ 0x2b, 0x87, 0xad, 0xd1, 0x44, 0x0e, 0x77, 0x68,
+ 0xfb, 0xd7, 0xe8, 0xe2, 0xce, 0x5f, 0x63, 0x9d },
+ .b_public = (u8[32]){ 0xf0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff },
+ .expected_ss = (u8[32]){ 0x38, 0xd6, 0x30, 0x4c, 0x4a, 0x7e, 0x6d, 0x9f,
+ 0x79, 0x59, 0x33, 0x4f, 0xb5, 0x24, 0x5b, 0xd2,
+ 0xc7, 0x54, 0x52, 0x5d, 0x4c, 0x91, 0xdb, 0x95,
+ 0x02, 0x06, 0x92, 0x62, 0x34, 0xc1, 0xf6, 0x33 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0x78, 0xd3, 0x1d, 0xfa, 0x85, 0x44, 0x97, 0xd7,
+ 0x2d, 0x8d, 0xef, 0x8a, 0x1b, 0x7f, 0xb0, 0x06,
+ 0xce, 0xc2, 0xd8, 0xc4, 0x92, 0x46, 0x47, 0xc9,
+ 0x38, 0x14, 0xae, 0x56, 0xfa, 0xed, 0xa4, 0x95 },
+ .b_public = (u8[32]){ 0xf1, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff },
+ .expected_ss = (u8[32]){ 0x78, 0x6c, 0xd5, 0x49, 0x96, 0xf0, 0x14, 0xa5,
+ 0xa0, 0x31, 0xec, 0x14, 0xdb, 0x81, 0x2e, 0xd0,
+ 0x83, 0x55, 0x06, 0x1f, 0xdb, 0x5d, 0xe6, 0x80,
+ 0xa8, 0x00, 0xac, 0x52, 0x1f, 0x31, 0x8e, 0x23 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - public key >= p */
+{
+ .secret = (u8[32]){ 0xc0, 0x4c, 0x5b, 0xae, 0xfa, 0x83, 0x02, 0xdd,
+ 0xde, 0xd6, 0xa4, 0xbb, 0x95, 0x77, 0x61, 0xb4,
+ 0xeb, 0x97, 0xae, 0xfa, 0x4f, 0xc3, 0xb8, 0x04,
+ 0x30, 0x85, 0xf9, 0x6a, 0x56, 0x59, 0xb3, 0xa5 },
+ .b_public = (u8[32]){ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff },
+ .expected_ss = (u8[32]){ 0x29, 0xae, 0x8b, 0xc7, 0x3e, 0x9b, 0x10, 0xa0,
+ 0x8b, 0x4f, 0x68, 0x1c, 0x43, 0xc3, 0xe0, 0xac,
+ 0x1a, 0x17, 0x1d, 0x31, 0xb3, 0x8f, 0x1a, 0x48,
+ 0xef, 0xba, 0x29, 0xae, 0x63, 0x9e, 0xa1, 0x34 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - RFC 7748 */
+{
+ .secret = (u8[32]){ 0xa0, 0x46, 0xe3, 0x6b, 0xf0, 0x52, 0x7c, 0x9d,
+ 0x3b, 0x16, 0x15, 0x4b, 0x82, 0x46, 0x5e, 0xdd,
+ 0x62, 0x14, 0x4c, 0x0a, 0xc1, 0xfc, 0x5a, 0x18,
+ 0x50, 0x6a, 0x22, 0x44, 0xba, 0x44, 0x9a, 0x44 },
+ .b_public = (u8[32]){ 0xe6, 0xdb, 0x68, 0x67, 0x58, 0x30, 0x30, 0xdb,
+ 0x35, 0x94, 0xc1, 0xa4, 0x24, 0xb1, 0x5f, 0x7c,
+ 0x72, 0x66, 0x24, 0xec, 0x26, 0xb3, 0x35, 0x3b,
+ 0x10, 0xa9, 0x03, 0xa6, 0xd0, 0xab, 0x1c, 0x4c },
+ .expected_ss = (u8[32]){ 0xc3, 0xda, 0x55, 0x37, 0x9d, 0xe9, 0xc6, 0x90,
+ 0x8e, 0x94, 0xea, 0x4d, 0xf2, 0x8d, 0x08, 0x4f,
+ 0x32, 0xec, 0xcf, 0x03, 0x49, 0x1c, 0x71, 0xf7,
+ 0x54, 0xb4, 0x07, 0x55, 0x77, 0xa2, 0x85, 0x52 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - RFC 7748 */
+{
+ .secret = (u8[32]){ 0x48, 0x66, 0xe9, 0xd4, 0xd1, 0xb4, 0x67, 0x3c,
+ 0x5a, 0xd2, 0x26, 0x91, 0x95, 0x7d, 0x6a, 0xf5,
+ 0xc1, 0x1b, 0x64, 0x21, 0xe0, 0xea, 0x01, 0xd4,
+ 0x2c, 0xa4, 0x16, 0x9e, 0x79, 0x18, 0xba, 0x4d },
+ .b_public = (u8[32]){ 0xe5, 0x21, 0x0f, 0x12, 0x78, 0x68, 0x11, 0xd3,
+ 0xf4, 0xb7, 0x95, 0x9d, 0x05, 0x38, 0xae, 0x2c,
+ 0x31, 0xdb, 0xe7, 0x10, 0x6f, 0xc0, 0x3c, 0x3e,
+ 0xfc, 0x4c, 0xd5, 0x49, 0xc7, 0x15, 0xa4, 0x13 },
+ .expected_ss = (u8[32]){ 0x95, 0xcb, 0xde, 0x94, 0x76, 0xe8, 0x90, 0x7d,
+ 0x7a, 0xad, 0xe4, 0x5c, 0xb4, 0xb8, 0x73, 0xf8,
+ 0x8b, 0x59, 0x5a, 0x68, 0x79, 0x9f, 0xa1, 0x52,
+ 0xe6, 0xf8, 0xf7, 0x64, 0x7a, 0xac, 0x79, 0x57 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0x0a, 0xb4, 0xe7, 0x63, 0x80, 0xd8, 0x4d, 0xde,
+ 0x4f, 0x68, 0x33, 0xc5, 0x8f, 0x2a, 0x9f, 0xb8,
+ 0xf8, 0x3b, 0xb0, 0x16, 0x9b, 0x17, 0x2b, 0xe4,
+ 0xb6, 0xe0, 0x59, 0x28, 0x87, 0x74, 0x1a, 0x36 },
+ .expected_ss = (u8[32]){ 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0x89, 0xe1, 0x0d, 0x57, 0x01, 0xb4, 0x33, 0x7d,
+ 0x2d, 0x03, 0x21, 0x81, 0x53, 0x8b, 0x10, 0x64,
+ 0xbd, 0x40, 0x84, 0x40, 0x1c, 0xec, 0xa1, 0xfd,
+ 0x12, 0x66, 0x3a, 0x19, 0x59, 0x38, 0x80, 0x00 },
+ .expected_ss = (u8[32]){ 0x09, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0x2b, 0x55, 0xd3, 0xaa, 0x4a, 0x8f, 0x80, 0xc8,
+ 0xc0, 0xb2, 0xae, 0x5f, 0x93, 0x3e, 0x85, 0xaf,
+ 0x49, 0xbe, 0xac, 0x36, 0xc2, 0xfa, 0x73, 0x94,
+ 0xba, 0xb7, 0x6c, 0x89, 0x33, 0xf8, 0xf8, 0x1d },
+ .expected_ss = (u8[32]){ 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0x63, 0xe5, 0xb1, 0xfe, 0x96, 0x01, 0xfe, 0x84,
+ 0x38, 0x5d, 0x88, 0x66, 0xb0, 0x42, 0x12, 0x62,
+ 0xf7, 0x8f, 0xbf, 0xa5, 0xaf, 0xf9, 0x58, 0x5e,
+ 0x62, 0x66, 0x79, 0xb1, 0x85, 0x47, 0xd9, 0x59 },
+ .expected_ss = (u8[32]){ 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0xe4, 0x28, 0xf3, 0xda, 0xc1, 0x78, 0x09, 0xf8,
+ 0x27, 0xa5, 0x22, 0xce, 0x32, 0x35, 0x50, 0x58,
+ 0xd0, 0x73, 0x69, 0x36, 0x4a, 0xa7, 0x89, 0x02,
+ 0xee, 0x10, 0x13, 0x9b, 0x9f, 0x9d, 0xd6, 0x53 },
+ .expected_ss = (u8[32]){ 0xfc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0xb3, 0xb5, 0x0e, 0x3e, 0xd3, 0xa4, 0x07, 0xb9,
+ 0x5d, 0xe9, 0x42, 0xef, 0x74, 0x57, 0x5b, 0x5a,
+ 0xb8, 0xa1, 0x0c, 0x09, 0xee, 0x10, 0x35, 0x44,
+ 0xd6, 0x0b, 0xdf, 0xed, 0x81, 0x38, 0xab, 0x2b },
+ .expected_ss = (u8[32]){ 0xf9, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0x21, 0x3f, 0xff, 0xe9, 0x3d, 0x5e, 0xa8, 0xcd,
+ 0x24, 0x2e, 0x46, 0x28, 0x44, 0x02, 0x99, 0x22,
+ 0xc4, 0x3c, 0x77, 0xc9, 0xe3, 0xe4, 0x2f, 0x56,
+ 0x2f, 0x48, 0x5d, 0x24, 0xc5, 0x01, 0xa2, 0x0b },
+ .expected_ss = (u8[32]){ 0xf3, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0x91, 0xb2, 0x32, 0xa1, 0x78, 0xb3, 0xcd, 0x53,
+ 0x09, 0x32, 0x44, 0x1e, 0x61, 0x39, 0x41, 0x8f,
+ 0x72, 0x17, 0x22, 0x92, 0xf1, 0xda, 0x4c, 0x18,
+ 0x34, 0xfc, 0x5e, 0xbf, 0xef, 0xb5, 0x1e, 0x3f },
+ .expected_ss = (u8[32]){ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0x04, 0x5c, 0x6e, 0x11, 0xc5, 0xd3, 0x32, 0x55,
+ 0x6c, 0x78, 0x22, 0xfe, 0x94, 0xeb, 0xf8, 0x9b,
+ 0x56, 0xa3, 0x87, 0x8d, 0xc2, 0x7c, 0xa0, 0x79,
+ 0x10, 0x30, 0x58, 0x84, 0x9f, 0xab, 0xcb, 0x4f },
+ .expected_ss = (u8[32]){ 0xe5, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0x1c, 0xa2, 0x19, 0x0b, 0x71, 0x16, 0x35, 0x39,
+ 0x06, 0x3c, 0x35, 0x77, 0x3b, 0xda, 0x0c, 0x9c,
+ 0x92, 0x8e, 0x91, 0x36, 0xf0, 0x62, 0x0a, 0xeb,
+ 0x09, 0x3f, 0x09, 0x91, 0x97, 0xb7, 0xf7, 0x4e },
+ .expected_ss = (u8[32]){ 0xe3, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0xf7, 0x6e, 0x90, 0x10, 0xac, 0x33, 0xc5, 0x04,
+ 0x3b, 0x2d, 0x3b, 0x76, 0xa8, 0x42, 0x17, 0x10,
+ 0x00, 0xc4, 0x91, 0x62, 0x22, 0xe9, 0xe8, 0x58,
+ 0x97, 0xa0, 0xae, 0xc7, 0xf6, 0x35, 0x0b, 0x3c },
+ .expected_ss = (u8[32]){ 0xdd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0xbb, 0x72, 0x68, 0x8d, 0x8f, 0x8a, 0xa7, 0xa3,
+ 0x9c, 0xd6, 0x06, 0x0c, 0xd5, 0xc8, 0x09, 0x3c,
+ 0xde, 0xc6, 0xfe, 0x34, 0x19, 0x37, 0xc3, 0x88,
+ 0x6a, 0x99, 0x34, 0x6c, 0xd0, 0x7f, 0xaa, 0x55 },
+ .expected_ss = (u8[32]){ 0xdb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0x88, 0xfd, 0xde, 0xa1, 0x93, 0x39, 0x1c, 0x6a,
+ 0x59, 0x33, 0xef, 0x9b, 0x71, 0x90, 0x15, 0x49,
+ 0x44, 0x72, 0x05, 0xaa, 0xe9, 0xda, 0x92, 0x8a,
+ 0x6b, 0x91, 0xa3, 0x52, 0xba, 0x10, 0xf4, 0x1f },
+ .expected_ss = (u8[32]){ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - edge case for shared secret */
+{
+ .secret = (u8[32]){ 0xa0, 0xa4, 0xf1, 0x30, 0xb9, 0x8a, 0x5b, 0xe4,
+ 0xb1, 0xce, 0xdb, 0x7c, 0xb8, 0x55, 0x84, 0xa3,
+ 0x52, 0x0e, 0x14, 0x2d, 0x47, 0x4d, 0xc9, 0xcc,
+ 0xb9, 0x09, 0xa0, 0x73, 0xa9, 0x76, 0xbf, 0x63 },
+ .b_public = (u8[32]){ 0x30, 0x3b, 0x39, 0x2f, 0x15, 0x31, 0x16, 0xca,
+ 0xd9, 0xcc, 0x68, 0x2a, 0x00, 0xcc, 0xc4, 0x4c,
+ 0x95, 0xff, 0x0d, 0x3b, 0xbe, 0x56, 0x8b, 0xeb,
+ 0x6c, 0x4e, 0x73, 0x9b, 0xaf, 0xdc, 0x2c, 0x68 },
+ .expected_ss = (u8[32]){ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - checking for overflow */
+{
+ .secret = (u8[32]){ 0xc8, 0x17, 0x24, 0x70, 0x40, 0x00, 0xb2, 0x6d,
+ 0x31, 0x70, 0x3c, 0xc9, 0x7e, 0x3a, 0x37, 0x8d,
+ 0x56, 0xfa, 0xd8, 0x21, 0x93, 0x61, 0xc8, 0x8c,
+ 0xca, 0x8b, 0xd7, 0xc5, 0x71, 0x9b, 0x12, 0xb2 },
+ .b_public = (u8[32]){ 0xfd, 0x30, 0x0a, 0xeb, 0x40, 0xe1, 0xfa, 0x58,
+ 0x25, 0x18, 0x41, 0x2b, 0x49, 0xb2, 0x08, 0xa7,
+ 0x84, 0x2b, 0x1e, 0x1f, 0x05, 0x6a, 0x04, 0x01,
+ 0x78, 0xea, 0x41, 0x41, 0x53, 0x4f, 0x65, 0x2d },
+ .expected_ss = (u8[32]){ 0xb7, 0x34, 0x10, 0x5d, 0xc2, 0x57, 0x58, 0x5d,
+ 0x73, 0xb5, 0x66, 0xcc, 0xb7, 0x6f, 0x06, 0x27,
+ 0x95, 0xcc, 0xbe, 0xc8, 0x91, 0x28, 0xe5, 0x2b,
+ 0x02, 0xf3, 0xe5, 0x96, 0x39, 0xf1, 0x3c, 0x46 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - checking for overflow */
+{
+ .secret = (u8[32]){ 0xc8, 0x17, 0x24, 0x70, 0x40, 0x00, 0xb2, 0x6d,
+ 0x31, 0x70, 0x3c, 0xc9, 0x7e, 0x3a, 0x37, 0x8d,
+ 0x56, 0xfa, 0xd8, 0x21, 0x93, 0x61, 0xc8, 0x8c,
+ 0xca, 0x8b, 0xd7, 0xc5, 0x71, 0x9b, 0x12, 0xb2 },
+ .b_public = (u8[32]){ 0xc8, 0xef, 0x79, 0xb5, 0x14, 0xd7, 0x68, 0x26,
+ 0x77, 0xbc, 0x79, 0x31, 0xe0, 0x6e, 0xe5, 0xc2,
+ 0x7c, 0x9b, 0x39, 0x2b, 0x4a, 0xe9, 0x48, 0x44,
+ 0x73, 0xf5, 0x54, 0xe6, 0x67, 0x8e, 0xcc, 0x2e },
+ .expected_ss = (u8[32]){ 0x64, 0x7a, 0x46, 0xb6, 0xfc, 0x3f, 0x40, 0xd6,
+ 0x21, 0x41, 0xee, 0x3c, 0xee, 0x70, 0x6b, 0x4d,
+ 0x7a, 0x92, 0x71, 0x59, 0x3a, 0x7b, 0x14, 0x3e,
+ 0x8e, 0x2e, 0x22, 0x79, 0x88, 0x3e, 0x45, 0x50 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - checking for overflow */
+{
+ .secret = (u8[32]){ 0xc8, 0x17, 0x24, 0x70, 0x40, 0x00, 0xb2, 0x6d,
+ 0x31, 0x70, 0x3c, 0xc9, 0x7e, 0x3a, 0x37, 0x8d,
+ 0x56, 0xfa, 0xd8, 0x21, 0x93, 0x61, 0xc8, 0x8c,
+ 0xca, 0x8b, 0xd7, 0xc5, 0x71, 0x9b, 0x12, 0xb2 },
+ .b_public = (u8[32]){ 0x64, 0xae, 0xac, 0x25, 0x04, 0x14, 0x48, 0x61,
+ 0x53, 0x2b, 0x7b, 0xbc, 0xb6, 0xc8, 0x7d, 0x67,
+ 0xdd, 0x4c, 0x1f, 0x07, 0xeb, 0xc2, 0xe0, 0x6e,
+ 0xff, 0xb9, 0x5a, 0xec, 0xc6, 0x17, 0x0b, 0x2c },
+ .expected_ss = (u8[32]){ 0x4f, 0xf0, 0x3d, 0x5f, 0xb4, 0x3c, 0xd8, 0x65,
+ 0x7a, 0x3c, 0xf3, 0x7c, 0x13, 0x8c, 0xad, 0xce,
+ 0xcc, 0xe5, 0x09, 0xe4, 0xeb, 0xa0, 0x89, 0xd0,
+ 0xef, 0x40, 0xb4, 0xe4, 0xfb, 0x94, 0x61, 0x55 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - checking for overflow */
+{
+ .secret = (u8[32]){ 0xc8, 0x17, 0x24, 0x70, 0x40, 0x00, 0xb2, 0x6d,
+ 0x31, 0x70, 0x3c, 0xc9, 0x7e, 0x3a, 0x37, 0x8d,
+ 0x56, 0xfa, 0xd8, 0x21, 0x93, 0x61, 0xc8, 0x8c,
+ 0xca, 0x8b, 0xd7, 0xc5, 0x71, 0x9b, 0x12, 0xb2 },
+ .b_public = (u8[32]){ 0xbf, 0x68, 0xe3, 0x5e, 0x9b, 0xdb, 0x7e, 0xee,
+ 0x1b, 0x50, 0x57, 0x02, 0x21, 0x86, 0x0f, 0x5d,
+ 0xcd, 0xad, 0x8a, 0xcb, 0xab, 0x03, 0x1b, 0x14,
+ 0x97, 0x4c, 0xc4, 0x90, 0x13, 0xc4, 0x98, 0x31 },
+ .expected_ss = (u8[32]){ 0x21, 0xce, 0xe5, 0x2e, 0xfd, 0xbc, 0x81, 0x2e,
+ 0x1d, 0x02, 0x1a, 0x4a, 0xf1, 0xe1, 0xd8, 0xbc,
+ 0x4d, 0xb3, 0xc4, 0x00, 0xe4, 0xd2, 0xa2, 0xc5,
+ 0x6a, 0x39, 0x26, 0xdb, 0x4d, 0x99, 0xc6, 0x5b },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - checking for overflow */
+{
+ .secret = (u8[32]){ 0xc8, 0x17, 0x24, 0x70, 0x40, 0x00, 0xb2, 0x6d,
+ 0x31, 0x70, 0x3c, 0xc9, 0x7e, 0x3a, 0x37, 0x8d,
+ 0x56, 0xfa, 0xd8, 0x21, 0x93, 0x61, 0xc8, 0x8c,
+ 0xca, 0x8b, 0xd7, 0xc5, 0x71, 0x9b, 0x12, 0xb2 },
+ .b_public = (u8[32]){ 0x53, 0x47, 0xc4, 0x91, 0x33, 0x1a, 0x64, 0xb4,
+ 0x3d, 0xdc, 0x68, 0x30, 0x34, 0xe6, 0x77, 0xf5,
+ 0x3d, 0xc3, 0x2b, 0x52, 0xa5, 0x2a, 0x57, 0x7c,
+ 0x15, 0xa8, 0x3b, 0xf2, 0x98, 0xe9, 0x9f, 0x19 },
+ .expected_ss = (u8[32]){ 0x18, 0xcb, 0x89, 0xe4, 0xe2, 0x0c, 0x0c, 0x2b,
+ 0xd3, 0x24, 0x30, 0x52, 0x45, 0x26, 0x6c, 0x93,
+ 0x27, 0x69, 0x0b, 0xbe, 0x79, 0xac, 0xb8, 0x8f,
+ 0x5b, 0x8f, 0xb3, 0xf7, 0x4e, 0xca, 0x3e, 0x52 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - private key == -1 (mod order) */
+{
+ .secret = (u8[32]){ 0xa0, 0x23, 0xcd, 0xd0, 0x83, 0xef, 0x5b, 0xb8,
+ 0x2f, 0x10, 0xd6, 0x2e, 0x59, 0xe1, 0x5a, 0x68,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50 },
+ .b_public = (u8[32]){ 0x25, 0x8e, 0x04, 0x52, 0x3b, 0x8d, 0x25, 0x3e,
+ 0xe6, 0x57, 0x19, 0xfc, 0x69, 0x06, 0xc6, 0x57,
+ 0x19, 0x2d, 0x80, 0x71, 0x7e, 0xdc, 0x82, 0x8f,
+ 0xa0, 0xaf, 0x21, 0x68, 0x6e, 0x2f, 0xaa, 0x75 },
+ .expected_ss = (u8[32]){ 0x25, 0x8e, 0x04, 0x52, 0x3b, 0x8d, 0x25, 0x3e,
+ 0xe6, 0x57, 0x19, 0xfc, 0x69, 0x06, 0xc6, 0x57,
+ 0x19, 0x2d, 0x80, 0x71, 0x7e, 0xdc, 0x82, 0x8f,
+ 0xa0, 0xaf, 0x21, 0x68, 0x6e, 0x2f, 0xaa, 0x75 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+},
+/* wycheproof - private key == 1 (mod order) on twist */
+{
+ .secret = (u8[32]){ 0x58, 0x08, 0x3d, 0xd2, 0x61, 0xad, 0x91, 0xef,
+ 0xf9, 0x52, 0x32, 0x2e, 0xc8, 0x24, 0xc6, 0x82,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x5f },
+ .b_public = (u8[32]){ 0x2e, 0xae, 0x5e, 0xc3, 0xdd, 0x49, 0x4e, 0x9f,
+ 0x2d, 0x37, 0xd2, 0x58, 0xf8, 0x73, 0xa8, 0xe6,
+ 0xe9, 0xd0, 0xdb, 0xd1, 0xe3, 0x83, 0xef, 0x64,
+ 0xd9, 0x8b, 0xb9, 0x1b, 0x3e, 0x0b, 0xe0, 0x35 },
+ .expected_ss = (u8[32]){ 0x2e, 0xae, 0x5e, 0xc3, 0xdd, 0x49, 0x4e, 0x9f,
+ 0x2d, 0x37, 0xd2, 0x58, 0xf8, 0x73, 0xa8, 0xe6,
+ 0xe9, 0xd0, 0xdb, 0xd1, 0xe3, 0x83, 0xef, 0x64,
+ 0xd9, 0x8b, 0xb9, 0x1b, 0x3e, 0x0b, 0xe0, 0x35 },
+ .secret_size = 32,
+ .b_public_size = 32,
+ .expected_ss_size = 32,
+
+}
+};
+
static const struct kpp_testvec ecdh_tv_template[] = {
{
#ifndef CONFIG_CRYPTO_FIPS
@@ -2628,6 +3859,62 @@ static const struct hash_testvec sm3_tv_template[] = {
}
};
+/* Example vectors below taken from
+ * GM/T 0042-2015 Appendix D.3
+ */
+static const struct hash_testvec hmac_sm3_tv_template[] = {
+ {
+ .key = "\x01\x02\x03\x04\x05\x06\x07\x08"
+ "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10"
+ "\x11\x12\x13\x14\x15\x16\x17\x18"
+ "\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20",
+ .ksize = 32,
+ .plaintext = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"
+ "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+ .psize = 112,
+ .digest = "\xca\x05\xe1\x44\xed\x05\xd1\x85"
+ "\x78\x40\xd1\xf3\x18\xa4\xa8\x66"
+ "\x9e\x55\x9f\xc8\x39\x1f\x41\x44"
+ "\x85\xbf\xdf\x7b\xb4\x08\x96\x3a",
+ }, {
+ .key = "\x01\x02\x03\x04\x05\x06\x07\x08"
+ "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10"
+ "\x11\x12\x13\x14\x15\x16\x17\x18"
+ "\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20"
+ "\x21\x22\x23\x24\x25",
+ .ksize = 37,
+ .plaintext = "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd"
+ "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd"
+ "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd"
+ "\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd",
+ .psize = 50,
+ .digest = "\x22\x0b\xf5\x79\xde\xd5\x55\x39"
+ "\x3f\x01\x59\xf6\x6c\x99\x87\x78"
+ "\x22\xa3\xec\xf6\x10\xd1\x55\x21"
+ "\x54\xb4\x1d\x44\xb9\x4d\xb3\xae",
+ }, {
+ .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b"
+ "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b"
+ "\x0b\x0b\x0b\x0b\x0b\x0b",
+ .ksize = 32,
+ .plaintext = "Hi There",
+ .psize = 8,
+ .digest = "\xc0\xba\x18\xc6\x8b\x90\xc8\x8b"
+ "\xc0\x7d\xe7\x94\xbf\xc7\xd2\xc8"
+ "\xd1\x9e\xc3\x1e\xd8\x77\x3b\xc2"
+ "\xb3\x90\xc9\x60\x4e\x0b\xe1\x1e",
+ }, {
+ .key = "Jefe",
+ .ksize = 4,
+ .plaintext = "what do ya want for nothing?",
+ .psize = 28,
+ .digest = "\x2e\x87\xf1\xd1\x68\x62\xe6\xd9"
+ "\x64\xb5\x0a\x52\x00\xbf\x2b\x10"
+ "\xb7\x64\xfa\xa9\x68\x0a\x29\x6a"
+ "\x24\x05\xf2\x4b\xec\x39\xf8\x82",
+ },
+};
+
/*
* SHA1 test vectors from from FIPS PUB 180-1
* Long vector from CAVS 5.0
@@ -11846,6 +13133,133 @@ static const struct cipher_testvec sm4_ctr_tv_template[] = {
}
};
+static const struct cipher_testvec sm4_ctr_rfc3686_tv_template[] = {
+ {
+ .key = "\xae\x68\x52\xf8\x12\x10\x67\xcc"
+ "\x4b\xf7\xa5\x76\x55\x77\xf3\x9e"
+ "\x00\x00\x00\x30",
+ .klen = 20,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .ptext = "Single block msg",
+ .ctext = "\x20\x9b\x77\x31\xd3\x65\xdb\xab"
+ "\x9e\x48\x74\x7e\xbd\x13\x83\xeb",
+ .len = 16,
+ }, {
+ .key = "\x7e\x24\x06\x78\x17\xfa\xe0\xd7"
+ "\x43\xd6\xce\x1f\x32\x53\x91\x63"
+ "\x00\x6c\xb6\xdb",
+ .klen = 20,
+ .iv = "\xc0\x54\x3b\x59\xda\x48\xd9\x0b",
+ .ptext = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f",
+ .ctext = "\x33\xe0\x28\x01\x92\xed\xc9\x1e"
+ "\x97\x35\xd9\x4a\xec\xd4\xbc\x23"
+ "\x4f\x35\x9f\x1c\x55\x1f\xe0\x27"
+ "\xe0\xdf\xc5\x43\xbc\xb0\x23\x94",
+ .len = 32,
+ }
+};
+
+static const struct cipher_testvec sm4_ofb_tv_template[] = {
+ { /* From: draft-ribose-cfrg-sm4-02, paragraph 12.2.3 */
+ .key = "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\xfe\xdc\xba\x98\x76\x54\x32\x10",
+ .klen = 16,
+ .iv = "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\xfe\xdc\xba\x98\x76\x54\x32\x10",
+ .ptext = "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\xfe\xdc\xba\x98\x76\x54\x32\x10"
+ "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\xfe\xdc\xba\x98\x76\x54\x32\x10",
+ .ctext = "\x69\x3d\x9a\x53\x5b\xad\x5b\xb1"
+ "\x78\x6f\x53\xd7\x25\x3a\x70\x56"
+ "\xf2\x07\x5d\x28\xb5\x23\x5f\x58"
+ "\xd5\x00\x27\xe4\x17\x7d\x2b\xce",
+ .len = 32,
+ }, { /* From: draft-ribose-cfrg-sm4-09, appendix A.2.3, Example 1 */
+ .key = "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\xfe\xdc\xba\x98\x76\x54\x32\x10",
+ .klen = 16,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .ptext = "\xaa\xaa\xaa\xaa\xbb\xbb\xbb\xbb"
+ "\xcc\xcc\xcc\xcc\xdd\xdd\xdd\xdd"
+ "\xee\xee\xee\xee\xff\xff\xff\xff"
+ "\xaa\xaa\xaa\xaa\xbb\xbb\xbb\xbb",
+ .ctext = "\xac\x32\x36\xcb\x86\x1d\xd3\x16"
+ "\xe6\x41\x3b\x4e\x3c\x75\x24\xb7"
+ "\x1d\x01\xac\xa2\x48\x7c\xa5\x82"
+ "\xcb\xf5\x46\x3e\x66\x98\x53\x9b",
+ .len = 32,
+ }, { /* From: draft-ribose-cfrg-sm4-09, appendix A.2.3, Example 2 */
+ .key = "\xfe\xdc\xba\x98\x76\x54\x32\x10"
+ "\x01\x23\x45\x67\x89\xab\xcd\xef",
+ .klen = 16,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .ptext = "\xaa\xaa\xaa\xaa\xbb\xbb\xbb\xbb"
+ "\xcc\xcc\xcc\xcc\xdd\xdd\xdd\xdd"
+ "\xee\xee\xee\xee\xff\xff\xff\xff"
+ "\xaa\xaa\xaa\xaa\xbb\xbb\xbb\xbb",
+ .ctext = "\x5d\xcc\xcd\x25\xa8\x4b\xa1\x65"
+ "\x60\xd7\xf2\x65\x88\x70\x68\x49"
+ "\x33\xfa\x16\xbd\x5c\xd9\xc8\x56"
+ "\xca\xca\xa1\xe1\x01\x89\x7a\x97",
+ .len = 32,
+ }
+};
+
+static const struct cipher_testvec sm4_cfb_tv_template[] = {
+ { /* From: draft-ribose-cfrg-sm4-02, paragraph 12.2.4 */
+ .key = "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\xfe\xdc\xba\x98\x76\x54\x32\x10",
+ .klen = 16,
+ .iv = "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\xfe\xdc\xba\x98\x76\x54\x32\x10",
+ .ptext = "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\xfe\xdc\xba\x98\x76\x54\x32\x10"
+ "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\xfe\xdc\xba\x98\x76\x54\x32\x10",
+ .ctext = "\x69\x3d\x9a\x53\x5b\xad\x5b\xb1"
+ "\x78\x6f\x53\xd7\x25\x3a\x70\x56"
+ "\x9e\xd2\x58\xa8\x5a\x04\x67\xcc"
+ "\x92\xaa\xb3\x93\xdd\x97\x89\x95",
+ .len = 32,
+ }, { /* From: draft-ribose-cfrg-sm4-09, appendix A.2.4, Example 1 */
+ .key = "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\xfe\xdc\xba\x98\x76\x54\x32\x10",
+ .klen = 16,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .ptext = "\xaa\xaa\xaa\xaa\xbb\xbb\xbb\xbb"
+ "\xcc\xcc\xcc\xcc\xdd\xdd\xdd\xdd"
+ "\xee\xee\xee\xee\xff\xff\xff\xff"
+ "\xaa\xaa\xaa\xaa\xbb\xbb\xbb\xbb",
+ .ctext = "\xac\x32\x36\xcb\x86\x1d\xd3\x16"
+ "\xe6\x41\x3b\x4e\x3c\x75\x24\xb7"
+ "\x69\xd4\xc5\x4e\xd4\x33\xb9\xa0"
+ "\x34\x60\x09\xbe\xb3\x7b\x2b\x3f",
+ .len = 32,
+ }, { /* From: draft-ribose-cfrg-sm4-09, appendix A.2.4, Example 2 */
+ .key = "\xfe\xdc\xba\x98\x76\x54\x32\x10"
+ "\x01\x23\x45\x67\x89\xab\xcd\xef",
+ .klen = 16,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .ptext = "\xaa\xaa\xaa\xaa\xbb\xbb\xbb\xbb"
+ "\xcc\xcc\xcc\xcc\xdd\xdd\xdd\xdd"
+ "\xee\xee\xee\xee\xff\xff\xff\xff"
+ "\xaa\xaa\xaa\xaa\xbb\xbb\xbb\xbb",
+ .ctext = "\x5d\xcc\xcd\x25\xa8\x4b\xa1\x65"
+ "\x60\xd7\xf2\x65\x88\x70\x68\x49"
+ "\x0d\x9b\x86\xff\x20\xc3\xbf\xe1"
+ "\x15\xff\xa0\x2c\xa6\x19\x2c\xc5",
+ .len = 32,
+ }
+};
+
/* Cast6 test vectors from RFC 2612 */
static const struct cipher_testvec cast6_tv_template[] = {
{
@@ -17043,6 +18457,198 @@ static const struct aead_testvec aes_gcm_tv_template[] = {
"\x25\x19\x49\x8e\x80\xf1\x47\x8f"
"\x37\xba\x55\xbd\x6d\x27\x61\x8c",
.clen = 76,
+ }, {
+ .key = "\x62\x35\xf8\x95\xfc\xa5\xeb\xf6"
+ "\x0e\x92\x12\x04\xd3\xa1\x3f\x2e"
+ "\x8b\x32\xcf\xe7\x44\xed\x13\x59"
+ "\x04\x38\x77\xb0\xb9\xad\xb4\x38",
+ .klen = 32,
+ .iv = "\x00\xff\xff\xff\xff\x00\x00\xff"
+ "\xff\xff\x00\xff",
+ .ptext = "\x42\xc1\xcc\x08\x48\x6f\x41\x3f"
+ "\x2f\x11\x66\x8b\x2a\x16\xf0\xe0"
+ "\x58\x83\xf0\xc3\x70\x14\xc0\x5b"
+ "\x3f\xec\x1d\x25\x3c\x51\xd2\x03"
+ "\xcf\x59\x74\x1f\xb2\x85\xb4\x07"
+ "\xc6\x6a\x63\x39\x8a\x5b\xde\xcb"
+ "\xaf\x08\x44\xbd\x6f\x91\x15\xe1"
+ "\xf5\x7a\x6e\x18\xbd\xdd\x61\x50"
+ "\x59\xa9\x97\xab\xbb\x0e\x74\x5c"
+ "\x00\xa4\x43\x54\x04\x54\x9b\x3b"
+ "\x77\xec\xfd\x5c\xa6\xe8\x7b\x08"
+ "\xae\xe6\x10\x3f\x32\x65\xd1\xfc"
+ "\xa4\x1d\x2c\x31\xfb\x33\x7a\xb3"
+ "\x35\x23\xf4\x20\x41\xd4\xad\x82"
+ "\x8b\xa4\xad\x96\x1c\x20\x53\xbe"
+ "\x0e\xa6\xf4\xdc\x78\x49\x3e\x72"
+ "\xb1\xa9\xb5\x83\xcb\x08\x54\xb7"
+ "\xad\x49\x3a\xae\x98\xce\xa6\x66"
+ "\x10\x30\x90\x8c\x55\x83\xd7\x7c"
+ "\x8b\xe6\x53\xde\xd2\x6e\x18\x21"
+ "\x01\x52\xd1\x9f\x9d\xbb\x9c\x73"
+ "\x57\xcc\x89\x09\x75\x9b\x78\x70"
+ "\xed\x26\x97\x4d\xb4\xe4\x0c\xa5"
+ "\xfa\x70\x04\x70\xc6\x96\x1c\x7d"
+ "\x54\x41\x77\xa8\xe3\xb0\x7e\x96"
+ "\x82\xd9\xec\xa2\x87\x68\x55\xf9"
+ "\x8f\x9e\x73\x43\x47\x6a\x08\x36"
+ "\x93\x67\xa8\x2d\xde\xac\x41\xa9"
+ "\x5c\x4d\x73\x97\x0f\x70\x68\xfa"
+ "\x56\x4d\x00\xc2\x3b\x1f\xc8\xb9"
+ "\x78\x1f\x51\x07\xe3\x9a\x13\x4e"
+ "\xed\x2b\x2e\xa3\xf7\x44\xb2\xe7"
+ "\xab\x19\x37\xd9\xba\x76\x5e\xd2"
+ "\xf2\x53\x15\x17\x4c\x6b\x16\x9f"
+ "\x02\x66\x49\xca\x7c\x91\x05\xf2"
+ "\x45\x36\x1e\xf5\x77\xad\x1f\x46"
+ "\xa8\x13\xfb\x63\xb6\x08\x99\x63"
+ "\x82\xa2\xed\xb3\xac\xdf\x43\x19"
+ "\x45\xea\x78\x73\xd9\xb7\x39\x11"
+ "\xa3\x13\x7c\xf8\x3f\xf7\xad\x81"
+ "\x48\x2f\xa9\x5c\x5f\xa0\xf0\x79"
+ "\xa4\x47\x7d\x80\x20\x26\xfd\x63"
+ "\x0a\xc7\x7e\x6d\x75\x47\xff\x76"
+ "\x66\x2e\x8a\x6c\x81\x35\xaf\x0b"
+ "\x2e\x6a\x49\x60\xc1\x10\xe1\xe1"
+ "\x54\x03\xa4\x09\x0c\x37\x7a\x15"
+ "\x23\x27\x5b\x8b\x4b\xa5\x64\x97"
+ "\xae\x4a\x50\x73\x1f\x66\x1c\x5c"
+ "\x03\x25\x3c\x8d\x48\x58\x71\x34"
+ "\x0e\xec\x4e\x55\x1a\x03\x6a\xe5"
+ "\xb6\x19\x2b\x84\x2a\x20\xd1\xea"
+ "\x80\x6f\x96\x0e\x05\x62\xc7\x78"
+ "\x87\x79\x60\x38\x46\xb4\x25\x57"
+ "\x6e\x16\x63\xf8\xad\x6e\xd7\x42"
+ "\x69\xe1\x88\xef\x6e\xd5\xb4\x9a"
+ "\x3c\x78\x6c\x3b\xe5\xa0\x1d\x22"
+ "\x86\x5c\x74\x3a\xeb\x24\x26\xc7"
+ "\x09\xfc\x91\x96\x47\x87\x4f\x1a"
+ "\xd6\x6b\x2c\x18\x47\xc0\xb8\x24"
+ "\xa8\x5a\x4a\x9e\xcb\x03\xe7\x2a"
+ "\x09\xe6\x4d\x9c\x6d\x86\x60\xf5"
+ "\x2f\x48\x69\x37\x9f\xf2\xd2\xcb"
+ "\x0e\x5a\xdd\x6e\x8a\xfb\x6a\xfe"
+ "\x0b\x63\xde\x87\x42\x79\x8a\x68"
+ "\x51\x28\x9b\x7a\xeb\xaf\xb8\x2f"
+ "\x9d\xd1\xc7\x45\x90\x08\xc9\x83"
+ "\xe9\x83\x84\xcb\x28\x69\x09\x69"
+ "\xce\x99\x46\x00\x54\xcb\xd8\x38"
+ "\xf9\x53\x4a\xbf\x31\xce\x57\x15"
+ "\x33\xfa\x96\x04\x33\x42\xe3\xc0"
+ "\xb7\x54\x4a\x65\x7a\x7c\x02\xe6"
+ "\x19\x95\xd0\x0e\x82\x07\x63\xf9"
+ "\xe1\x2b\x2a\xfc\x55\x92\x52\xc9"
+ "\xb5\x9f\x23\x28\x60\xe7\x20\x51"
+ "\x10\xd3\xed\x6d\x9b\xab\xb8\xe2"
+ "\x5d\x9a\x34\xb3\xbe\x9c\x64\xcb"
+ "\x78\xc6\x91\x22\x40\x91\x80\xbe"
+ "\xd7\x78\x5c\x0e\x0a\xdc\x08\xe9"
+ "\x67\x10\xa4\x83\x98\x79\x23\xe7"
+ "\x92\xda\xa9\x22\x16\xb1\xe7\x78"
+ "\xa3\x1c\x6c\x8f\x35\x7c\x4d\x37"
+ "\x2f\x6e\x0b\x50\x5c\x34\xb9\xf9"
+ "\xe6\x3d\x91\x0d\x32\x95\xaa\x3d"
+ "\x48\x11\x06\xbb\x2d\xf2\x63\x88"
+ "\x3f\x73\x09\xe2\x45\x56\x31\x51"
+ "\xfa\x5e\x4e\x62\xf7\x90\xf9\xa9"
+ "\x7d\x7b\x1b\xb1\xc8\x26\x6e\x66"
+ "\xf6\x90\x9a\x7f\xf2\x57\xcc\x23"
+ "\x59\xfa\xfa\xaa\x44\x04\x01\xa7"
+ "\xa4\x78\xdb\x74\x3d\x8b\xb5",
+ .plen = 719,
+ .ctext = "\x84\x0b\xdb\xd5\xb7\xa8\xfe\x20"
+ "\xbb\xb1\x12\x7f\x41\xea\xb3\xc0"
+ "\xa2\xb4\x37\x19\x11\x58\xb6\x0b"
+ "\x4c\x1d\x38\x05\x54\xd1\x16\x73"
+ "\x8e\x1c\x20\x90\xa2\x9a\xb7\x74"
+ "\x47\xe6\xd8\xfc\x18\x3a\xb4\xea"
+ "\xd5\x16\x5a\x2c\x53\x01\x46\xb3"
+ "\x18\x33\x74\x6c\x50\xf2\xe8\xc0"
+ "\x73\xda\x60\x22\xeb\xe3\xe5\x9b"
+ "\x20\x93\x6c\x4b\x37\x99\xb8\x23"
+ "\x3b\x4e\xac\xe8\x5b\xe8\x0f\xb7"
+ "\xc3\x8f\xfb\x4a\x37\xd9\x39\x95"
+ "\x34\xf1\xdb\x8f\x71\xd9\xc7\x0b"
+ "\x02\xf1\x63\xfc\x9b\xfc\xc5\xab"
+ "\xb9\x14\x13\x21\xdf\xce\xaa\x88"
+ "\x44\x30\x1e\xce\x26\x01\x92\xf8"
+ "\x9f\x00\x4b\x0c\x4b\xf7\x5f\xe0"
+ "\x89\xca\x94\x66\x11\x21\x97\xca"
+ "\x3e\x83\x74\x2d\xdb\x4d\x11\xeb"
+ "\x97\xc2\x14\xff\x9e\x1e\xa0\x6b"
+ "\x08\xb4\x31\x2b\x85\xc6\x85\x6c"
+ "\x90\xec\x39\xc0\xec\xb3\xb5\x4e"
+ "\xf3\x9c\xe7\x83\x3a\x77\x0a\xf4"
+ "\x56\xfe\xce\x18\x33\x6d\x0b\x2d"
+ "\x33\xda\xc8\x05\x5c\xb4\x09\x2a"
+ "\xde\x6b\x52\x98\x01\xef\x36\x3d"
+ "\xbd\xf9\x8f\xa8\x3e\xaa\xcd\xd1"
+ "\x01\x2d\x42\x49\xc3\xb6\x84\xbb"
+ "\x48\x96\xe0\x90\x93\x6c\x48\x64"
+ "\xd4\xfa\x7f\x93\x2c\xa6\x21\xc8"
+ "\x7a\x23\x7b\xaa\x20\x56\x12\xae"
+ "\x16\x9d\x94\x0f\x54\xa1\xec\xca"
+ "\x51\x4e\xf2\x39\xf4\xf8\x5f\x04"
+ "\x5a\x0d\xbf\xf5\x83\xa1\x15\xe1"
+ "\xf5\x3c\xd8\x62\xa3\xed\x47\x89"
+ "\x85\x4c\xe5\xdb\xac\x9e\x17\x1d"
+ "\x0c\x09\xe3\x3e\x39\x5b\x4d\x74"
+ "\x0e\xf5\x34\xee\x70\x11\x4c\xfd"
+ "\xdb\x34\xb1\xb5\x10\x3f\x73\xb7"
+ "\xf5\xfa\xed\xb0\x1f\xa5\xcd\x3c"
+ "\x8d\x35\x83\xd4\x11\x44\x6e\x6c"
+ "\x5b\xe0\x0e\x69\xa5\x39\xe5\xbb"
+ "\xa9\x57\x24\x37\xe6\x1f\xdd\xcf"
+ "\x16\x2a\x13\xf9\x6a\x2d\x90\xa0"
+ "\x03\x60\x7a\xed\x69\xd5\x00\x8b"
+ "\x7e\x4f\xcb\xb9\xfa\x91\xb9\x37"
+ "\xc1\x26\xce\x90\x97\x22\x64\x64"
+ "\xc1\x72\x43\x1b\xf6\xac\xc1\x54"
+ "\x8a\x10\x9c\xdd\x8d\xd5\x8e\xb2"
+ "\xe4\x85\xda\xe0\x20\x5f\xf4\xb4"
+ "\x15\xb5\xa0\x8d\x12\x74\x49\x23"
+ "\x3a\xdf\x4a\xd3\xf0\x3b\x89\xeb"
+ "\xf8\xcc\x62\x7b\xfb\x93\x07\x41"
+ "\x61\x26\x94\x58\x70\xa6\x3c\xe4"
+ "\xff\x58\xc4\x13\x3d\xcb\x36\x6b"
+ "\x32\xe5\xb2\x6d\x03\x74\x6f\x76"
+ "\x93\x77\xde\x48\xc4\xfa\x30\x4a"
+ "\xda\x49\x80\x77\x0f\x1c\xbe\x11"
+ "\xc8\x48\xb1\xe5\xbb\xf2\x8a\xe1"
+ "\x96\x2f\x9f\xd1\x8e\x8a\x5c\xe2"
+ "\xf7\xd7\xd8\x54\xf3\x3f\xc4\x91"
+ "\xb8\xfb\x86\xdc\x46\x24\x91\x60"
+ "\x6c\x2f\xc9\x41\x37\x51\x49\x54"
+ "\x09\x81\x21\xf3\x03\x9f\x2b\xe3"
+ "\x1f\x39\x63\xaf\xf4\xd7\x53\x60"
+ "\xa7\xc7\x54\xf9\xee\xb1\xb1\x7d"
+ "\x75\x54\x65\x93\xfe\xb1\x68\x6b"
+ "\x57\x02\xf9\xbb\x0e\xf9\xf8\xbf"
+ "\x01\x12\x27\xb4\xfe\xe4\x79\x7a"
+ "\x40\x5b\x51\x4b\xdf\x38\xec\xb1"
+ "\x6a\x56\xff\x35\x4d\x42\x33\xaa"
+ "\x6f\x1b\xe4\xdc\xe0\xdb\x85\x35"
+ "\x62\x10\xd4\xec\xeb\xc5\x7e\x45"
+ "\x1c\x6f\x17\xca\x3b\x8e\x2d\x66"
+ "\x4f\x4b\x36\x56\xcd\x1b\x59\xaa"
+ "\xd2\x9b\x17\xb9\x58\xdf\x7b\x64"
+ "\x8a\xff\x3b\x9c\xa6\xb5\x48\x9e"
+ "\xaa\xe2\x5d\x09\x71\x32\x5f\xb6"
+ "\x29\xbe\xe7\xc7\x52\x7e\x91\x82"
+ "\x6b\x6d\x33\xe1\x34\x06\x36\x21"
+ "\x5e\xbe\x1e\x2f\x3e\xc1\xfb\xea"
+ "\x49\x2c\xb5\xca\xf7\xb0\x37\xea"
+ "\x1f\xed\x10\x04\xd9\x48\x0d\x1a"
+ "\x1c\xfb\xe7\x84\x0e\x83\x53\x74"
+ "\xc7\x65\xe2\x5c\xe5\xba\x73\x4c"
+ "\x0e\xe1\xb5\x11\x45\x61\x43\x46"
+ "\xaa\x25\x8f\xbd\x85\x08\xfa\x4c"
+ "\x15\xc1\xc0\xd8\xf5\xdc\x16\xbb"
+ "\x7b\x1d\xe3\x87\x57\xa7\x2a\x1d"
+ "\x38\x58\x9e\x8a\x43\xdc\x57"
+ "\xd1\x81\x7d\x2b\xe9\xff\x99\x3a"
+ "\x4b\x24\x52\x58\x55\xe1\x49\x14",
+ .clen = 735,
}
};
@@ -19489,2697 +21095,6 @@ static const struct aead_testvec aegis128_tv_template[] = {
};
/*
- * AEGIS-128L test vectors - generated via reference implementation from
- * SUPERCOP (https://bench.cr.yp.to/supercop.html):
- *
- * https://bench.cr.yp.to/supercop/supercop-20170228.tar.xz
- * (see crypto_aead/aegis128l/)
- */
-static const struct aead_testvec aegis128l_tv_template[] = {
- {
- .key = "\x0f\xc9\x8e\x67\x44\x9e\xaa\x86"
- "\x20\x36\x2c\x24\xfe\xc9\x30\x81",
- .klen = 16,
- .iv = "\x1e\x92\x1c\xcf\x88\x3d\x54\x0d"
- "\x40\x6d\x59\x48\xfc\x92\x61\x03",
- .assoc = "",
- .alen = 0,
- .ptext = "",
- .plen = 0,
- .ctext = "\x30\x4f\xf3\xe9\xb1\xfa\x81\xa6"
- "\x20\x72\x78\xdd\x93\xc8\x57\xef",
- .clen = 16,
- }, {
- .key = "\x4b\xed\xc8\x07\x54\x1a\x52\xa2"
- "\xa1\x10\xde\xb5\xf8\xed\xf3\x87",
- .klen = 16,
- .iv = "\x5a\xb7\x56\x6e\x98\xb9\xfd\x29"
- "\xc1\x47\x0b\xda\xf6\xb6\x23\x09",
- .assoc = "",
- .alen = 0,
- .ptext = "\x79",
- .plen = 1,
- .ctext = "\xa9\x24\xa0\xb6\x2d\xdd\x29\xdb"
- "\x40\xb3\x71\xc5\x22\x58\x31\x77"
- "\x6d",
- .clen = 17,
- }, {
- .key = "\x88\x12\x01\xa6\x64\x96\xfb\xbe"
- "\x22\xea\x90\x47\xf2\x11\xb5\x8e",
- .klen = 16,
- .iv = "\x97\xdb\x90\x0e\xa8\x35\xa5\x45"
- "\x42\x21\xbd\x6b\xf0\xda\xe6\x0f",
- .assoc = "",
- .alen = 0,
- .ptext = "\xb5\x6e\xad\xdd\x30\x72\xfa\x53"
- "\x82\x8e\x16\xb4\xed\x6d\x47",
- .plen = 15,
- .ctext = "\xbb\x0a\x53\xc4\xaa\x7e\xa4\x03"
- "\x2b\xee\x62\x99\x7b\x98\x13\x1f"
- "\xe0\x76\x4c\x2e\x53\x99\x4f\xbe"
- "\xe1\xa8\x04\x7f\xe1\x71\xbe",
- .clen = 31,
- }, {
- .key = "\xc4\x37\x3b\x45\x74\x11\xa4\xda"
- "\xa2\xc5\x42\xd8\xec\x36\x78\x94",
- .klen = 16,
- .iv = "\xd3\x00\xc9\xad\xb8\xb0\x4e\x61"
- "\xc3\xfb\x6f\xfd\xea\xff\xa9\x15",
- .assoc = "",
- .alen = 0,
- .ptext = "\xf2\x92\xe6\x7d\x40\xee\xa3\x6f"
- "\x03\x68\xc8\x45\xe7\x91\x0a\x18",
- .plen = 16,
- .ctext = "\x66\xdf\x6e\x71\xc0\x6e\xa4\x4c"
- "\x9d\xb7\x8c\x9a\xdb\x1f\xd2\x2e"
- "\x23\xb6\xa4\xfb\xd3\x86\xdd\xbb"
- "\xde\x54\x9b\xf5\x92\x8b\x93\xc5",
- .clen = 32,
- }, {
- .key = "\x01\x5c\x75\xe5\x84\x8d\x4d\xf6"
- "\x23\x9f\xf4\x6a\xe6\x5a\x3b\x9a",
- .klen = 16,
- .iv = "\x10\x25\x03\x4c\xc8\x2c\xf7\x7d"
- "\x44\xd5\x21\x8e\xe4\x23\x6b\x1c",
- .assoc = "",
- .alen = 0,
- .ptext = "\x2e\xb7\x20\x1c\x50\x6a\x4b\x8b"
- "\x84\x42\x7a\xd7\xe1\xb5\xcd\x1f"
- "\xd3",
- .plen = 17,
- .ctext = "\x4f\xc3\x69\xb6\xd3\xa4\x64\x8b"
- "\x71\xc3\x8a\x91\x22\x4f\x1b\xd2"
- "\x33\x6d\x86\xbc\xf8\x2f\x06\xf9"
- "\x82\x64\xc7\x72\x00\x30\xfc\xf0"
- "\xf8",
- .clen = 33,
- }, {
- .key = "\x3d\x80\xae\x84\x94\x09\xf6\x12"
- "\xa4\x79\xa6\xfb\xe0\x7f\xfd\xa0",
- .klen = 16,
- .iv = "\x4c\x49\x3d\xec\xd8\xa8\xa0\x98"
- "\xc5\xb0\xd3\x1f\xde\x48\x2e\x22",
- .assoc = "",
- .alen = 0,
- .ptext = "\x6b\xdc\x5a\xbb\x60\xe5\xf4\xa6"
- "\x05\x1d\x2c\x68\xdb\xda\x8f\x25"
- "\xfe\x8d\x45\x19\x1e\xc0\x0b\x99"
- "\x88\x11\x39\x12\x1c\x3a\xbb",
- .plen = 31,
- .ctext = "\xe3\x93\x15\xae\x5f\x9d\x3c\xb5"
- "\xd6\x9d\xee\xee\xcf\xaa\xaf\xe1"
- "\x45\x10\x96\xe0\xbf\x55\x0f\x4c"
- "\x1a\xfd\xf4\xda\x4e\x10\xde\xc9"
- "\x0e\x6f\xc7\x3c\x49\x94\x41\xfc"
- "\x59\x28\x88\x3c\x79\x10\x6b",
- .clen = 47,
- }, {
- .key = "\x7a\xa5\xe8\x23\xa4\x84\x9e\x2d"
- "\x25\x53\x58\x8c\xda\xa3\xc0\xa6",
- .klen = 16,
- .iv = "\x89\x6e\x77\x8b\xe8\x23\x49\xb4"
- "\x45\x8a\x85\xb1\xd8\x6c\xf1\x28",
- .assoc = "",
- .alen = 0,
- .ptext = "\xa7\x00\x93\x5b\x70\x61\x9d\xc2"
- "\x86\xf7\xde\xfa\xd5\xfe\x52\x2b"
- "\x28\x50\x51\x9d\x24\x60\x8d\xb3"
- "\x49\x3e\x17\xea\xf6\x99\x5a\xdd",
- .plen = 32,
- .ctext = "\x1c\x8e\x22\x34\xfd\xab\xe6\x0d"
- "\x1c\x9f\x06\x54\x8b\x0b\xb4\x40"
- "\xde\x11\x59\x3e\xfd\x74\xf6\x42"
- "\x97\x17\xf7\x24\xb6\x7e\xc4\xc6"
- "\x06\xa3\x94\xda\x3d\x7f\x55\x0a"
- "\x92\x07\x2f\xa6\xf3\x6b\x2c\xfc",
- .clen = 48,
- }, {
- .key = "\xb6\xca\x22\xc3\xb4\x00\x47\x49"
- "\xa6\x2d\x0a\x1e\xd4\xc7\x83\xad",
- .klen = 16,
- .iv = "\xc5\x93\xb0\x2a\xf8\x9f\xf1\xd0"
- "\xc6\x64\x37\x42\xd2\x90\xb3\x2e",
- .assoc = "\xd5",
- .alen = 1,
- .ptext = "",
- .plen = 0,
- .ctext = "\xa0\x2a\xb4\x9a\x91\x00\x15\xb8"
- "\x0f\x9a\x15\x60\x0e\x9b\x13\x8f",
- .clen = 16,
- }, {
- .key = "\xf3\xee\x5c\x62\xc4\x7c\xf0\x65"
- "\x27\x08\xbd\xaf\xce\xec\x45\xb3",
- .klen = 16,
- .iv = "\x02\xb8\xea\xca\x09\x1b\x9a\xec"
- "\x47\x3e\xe9\xd4\xcc\xb5\x76\x34",
- .assoc = "\x11\x81\x78\x32\x4d\xb9\x44\x73"
- "\x68\x75\x16\xf8\xcb\x7e\xa7",
- .alen = 15,
- .ptext = "",
- .plen = 0,
- .ctext = "\x4c\x26\xad\x9c\x14\xfd\x9c\x8c"
- "\x84\xfb\x26\xfb\xd5\xca\x62\x39",
- .clen = 16,
- }, {
- .key = "\x2f\x13\x95\x01\xd5\xf7\x99\x81"
- "\xa8\xe2\x6f\x41\xc8\x10\x08\xb9",
- .klen = 16,
- .iv = "\x3f\xdc\x24\x69\x19\x96\x43\x08"
- "\xc8\x18\x9b\x65\xc6\xd9\x39\x3b",
- .assoc = "\x4e\xa5\xb2\xd1\x5d\x35\xed\x8f"
- "\xe8\x4f\xc8\x89\xc5\xa2\x69\xbc",
- .alen = 16,
- .ptext = "",
- .plen = 0,
- .ctext = "\x45\x85\x0e\x0f\xf4\xae\x96\xa1"
- "\x99\x4d\x6d\xb4\x67\x32\xb0\x3a",
- .clen = 16,
- }, {
- .key = "\x6c\x38\xcf\xa1\xe5\x73\x41\x9d"
- "\x29\xbc\x21\xd2\xc2\x35\xcb\xbf",
- .klen = 16,
- .iv = "\x7b\x01\x5d\x08\x29\x12\xec\x24"
- "\x49\xf3\x4d\xf7\xc0\xfe\xfb\x41",
- .assoc = "\x8a\xca\xec\x70\x6d\xb1\x96\xab"
- "\x69\x29\x7a\x1b\xbf\xc7\x2c\xc2"
- "\x07",
- .alen = 17,
- .ptext = "",
- .plen = 0,
- .ctext = "\x33\xb1\x42\x97\x8e\x16\x7b\x63"
- "\x06\xba\x5b\xcb\xae\x6d\x8b\x56",
- .clen = 16,
- }, {
- .key = "\xa8\x5c\x09\x40\xf5\xef\xea\xb8"
- "\xaa\x96\xd3\x64\xbc\x59\x8d\xc6",
- .klen = 16,
- .iv = "\xb8\x26\x97\xa8\x39\x8e\x94\x3f"
- "\xca\xcd\xff\x88\xba\x22\xbe\x47",
- .assoc = "\xc7\xef\x26\x10\x7d\x2c\x3f\xc6"
- "\xea\x03\x2c\xac\xb9\xeb\xef\xc9"
- "\x31\x6b\x08\x12\xfc\xd8\x37\x2d"
- "\xe0\x17\x3a\x2e\x83\x5c\x8f",
- .alen = 31,
- .ptext = "",
- .plen = 0,
- .ctext = "\xda\x44\x08\x8c\x2a\xa5\x07\x35"
- "\x0b\x54\x4e\x6d\xe3\xfd\xc4\x5f",
- .clen = 16,
- }, {
- .key = "\xe5\x81\x42\xdf\x05\x6a\x93\xd4"
- "\x2b\x70\x85\xf5\xb6\x7d\x50\xcc",
- .klen = 16,
- .iv = "\xf4\x4a\xd1\x47\x49\x09\x3d\x5b"
- "\x4b\xa7\xb1\x19\xb4\x46\x81\x4d",
- .assoc = "\x03\x14\x5f\xaf\x8d\xa8\xe7\xe2"
- "\x6b\xde\xde\x3e\xb3\x10\xb1\xcf"
- "\x5c\x2d\x14\x96\x01\x78\xb9\x47"
- "\xa1\x44\x19\x06\x5d\xbb\x2e\x2f",
- .alen = 32,
- .ptext = "",
- .plen = 0,
- .ctext = "\x1b\xb1\xf1\xa8\x9e\xc2\xb2\x88"
- "\x40\x7f\x7b\x19\x7a\x52\x8c\xf0",
- .clen = 16,
- }, {
- .key = "\x22\xa6\x7c\x7f\x15\xe6\x3c\xf0"
- "\xac\x4b\x37\x86\xb0\xa2\x13\xd2",
- .klen = 16,
- .iv = "\x31\x6f\x0b\xe6\x59\x85\xe6\x77"
- "\xcc\x81\x63\xab\xae\x6b\x43\x54",
- .assoc = "\x40",
- .alen = 1,
- .ptext = "\x4f",
- .plen = 1,
- .ctext = "\x6e\xc8\xfb\x15\x9d\x98\x49\xc9"
- "\xa0\x98\x09\x85\xbe\x56\x8e\x79"
- "\xf4",
- .clen = 17,
- }, {
- .key = "\x5e\xcb\xb6\x1e\x25\x62\xe4\x0c"
- "\x2d\x25\xe9\x18\xaa\xc6\xd5\xd8",
- .klen = 16,
- .iv = "\x6d\x94\x44\x86\x69\x00\x8f\x93"
- "\x4d\x5b\x15\x3c\xa8\x8f\x06\x5a",
- .assoc = "\x7c\x5d\xd3\xee\xad\x9f\x39\x1a"
- "\x6d\x92\x42\x61\xa7\x58\x37",
- .alen = 15,
- .ptext = "\x8b\x26\x61\x55\xf1\x3e\xe3\xa1"
- "\x8d\xc8\x6e\x85\xa5\x21\x67",
- .plen = 15,
- .ctext = "\x99\x2e\x84\x50\x64\x5c\xab\x29"
- "\x20\xba\xb9\x2f\x62\x3a\xce\x2a"
- "\x75\x25\x3b\xe3\x40\xe0\x1d\xfc"
- "\x20\x63\x0b\x49\x7e\x97\x08",
- .clen = 31,
- }, {
- .key = "\x9b\xef\xf0\xbd\x35\xdd\x8d\x28"
- "\xad\xff\x9b\xa9\xa4\xeb\x98\xdf",
- .klen = 16,
- .iv = "\xaa\xb8\x7e\x25\x79\x7c\x37\xaf"
- "\xce\x36\xc7\xce\xa2\xb4\xc9\x60",
- .assoc = "\xb9\x82\x0c\x8d\xbd\x1b\xe2\x36"
- "\xee\x6c\xf4\xf2\xa1\x7d\xf9\xe2",
- .alen = 16,
- .ptext = "\xc8\x4b\x9b\xf5\x01\xba\x8c\xbd"
- "\x0e\xa3\x21\x16\x9f\x46\x2a\x63",
- .plen = 16,
- .ctext = "\xd9\x8e\xfd\x50\x8f\x02\x9f\xee"
- "\x78\x08\x12\xec\x09\xaf\x53\x14"
- "\x90\x3e\x3d\x76\xad\x71\x21\x08"
- "\x77\xe5\x4b\x15\xc2\xe6\xbc\xdb",
- .clen = 32,
- }, {
- .key = "\xd7\x14\x29\x5d\x45\x59\x36\x44"
- "\x2e\xd9\x4d\x3b\x9e\x0f\x5b\xe5",
- .klen = 16,
- .iv = "\xe6\xdd\xb8\xc4\x89\xf8\xe0\xca"
- "\x4f\x10\x7a\x5f\x9c\xd8\x8b\x66",
- .assoc = "\xf5\xa6\x46\x2c\xce\x97\x8a\x51"
- "\x6f\x46\xa6\x83\x9b\xa1\xbc\xe8"
- "\x05",
- .alen = 17,
- .ptext = "\x05\x70\xd5\x94\x12\x36\x35\xd8"
- "\x8f\x7d\xd3\xa8\x99\x6a\xed\x69"
- "\xd0",
- .plen = 17,
- .ctext = "\xf3\xe7\x95\x86\xcf\x34\x95\x96"
- "\x17\xfe\x1b\xae\x1b\x31\xf2\x1a"
- "\xbd\xbc\xc9\x4e\x11\x29\x09\x5c"
- "\x05\xd3\xb4\x2e\x4a\x74\x59\x49"
- "\x7d",
- .clen = 33,
- }, {
- .key = "\x14\x39\x63\xfc\x56\xd5\xdf\x5f"
- "\xaf\xb3\xff\xcc\x98\x33\x1d\xeb",
- .klen = 16,
- .iv = "\x23\x02\xf1\x64\x9a\x73\x89\xe6"
- "\xd0\xea\x2c\xf1\x96\xfc\x4e\x6d",
- .assoc = "\x32\xcb\x80\xcc\xde\x12\x33\x6d"
- "\xf0\x20\x58\x15\x95\xc6\x7f\xee"
- "\x2f\xf9\x4e\x2c\x1b\x98\x43\xc7"
- "\x68\x28\x73\x40\x9f\x96\x4a",
- .alen = 31,
- .ptext = "\x41\x94\x0e\x33\x22\xb1\xdd\xf4"
- "\x10\x57\x85\x39\x93\x8f\xaf\x70"
- "\xfa\xa9\xd0\x4d\x5c\x40\x23\xcd"
- "\x98\x34\xab\x37\x56\xae\x32",
- .plen = 31,
- .ctext = "\x06\x96\xb2\xbf\x63\xf4\x1e\x24"
- "\x0d\x19\x15\x61\x65\x3b\x06\x26"
- "\x71\xe8\x7e\x16\xdb\x96\x01\x01"
- "\x52\xcd\x49\x5b\x07\x33\x4e\xe7"
- "\xaa\x91\xf5\xd5\xc6\xfe\x41\xb5"
- "\xed\x90\xce\xb9\xcd\xcc\xa1",
- .clen = 47,
- }, {
- .key = "\x50\x5d\x9d\x9b\x66\x50\x88\x7b"
- "\x30\x8e\xb1\x5e\x92\x58\xe0\xf1",
- .klen = 16,
- .iv = "\x5f\x27\x2b\x03\xaa\xef\x32\x02"
- "\x50\xc4\xde\x82\x90\x21\x11\x73",
- .assoc = "\x6e\xf0\xba\x6b\xee\x8e\xdc\x89"
- "\x71\xfb\x0a\xa6\x8f\xea\x41\xf4"
- "\x5a\xbb\x59\xb0\x20\x38\xc5\xe0"
- "\x29\x56\x52\x19\x79\xf5\xe9\x37",
- .alen = 32,
- .ptext = "\x7e\xb9\x48\xd3\x32\x2d\x86\x10"
- "\x91\x31\x37\xcb\x8d\xb3\x72\x76"
- "\x24\x6b\xdc\xd1\x61\xe0\xa5\xe7"
- "\x5a\x61\x8a\x0f\x30\x0d\xd1\xec",
- .plen = 32,
- .ctext = "\xf9\xd7\xee\x17\xfd\x24\xcd\xf1"
- "\xbc\x0f\x35\x97\x97\x0c\x4b\x18"
- "\xce\x58\xc8\x3b\xd4\x85\x93\x79"
- "\xcc\x9c\xea\xc1\x73\x13\x0b\x4c"
- "\xcc\x6f\x28\xf8\xa4\x4e\xb8\x56"
- "\x64\x4e\x47\xce\xb2\xb4\x92\xb4",
- .clen = 48,
- }, {
- .key = "\x8d\x82\xd6\x3b\x76\xcc\x30\x97"
- "\xb1\x68\x63\xef\x8c\x7c\xa3\xf7",
- .klen = 16,
- .iv = "\x9c\x4b\x65\xa2\xba\x6b\xdb\x1e"
- "\xd1\x9e\x90\x13\x8a\x45\xd3\x79",
- .assoc = "\xab\x14\xf3\x0a\xfe\x0a\x85\xa5"
- "\xf2\xd5\xbc\x38\x89\x0e\x04\xfb"
- "\x84\x7d\x65\x34\x25\xd8\x47\xfa"
- "\xeb\x83\x31\xf1\x54\x54\x89\x0d"
- "\x9d",
- .alen = 33,
- .ptext = "\xba\xde\x82\x72\x42\xa9\x2f\x2c"
- "\x12\x0b\xe9\x5c\x87\xd7\x35\x7c"
- "\x4f\x2e\xe8\x55\x66\x80\x27\x00"
- "\x1b\x8f\x68\xe7\x0a\x6c\x71\xc3"
- "\x21\x78\x55\x9d\x9c\x65\x7b\xcd"
- "\x0a\x34\x97\xff\x47\x37\xb0\x2a"
- "\x80\x0d\x19\x98\x33\xa9\x7a\xe3"
- "\x2e\x4c\xc6\xf3\x8c\x88\x42\x01"
- "\xbd",
- .plen = 65,
- .ctext = "\x58\xfa\x3a\x3d\xd9\x88\x63\xe8"
- "\xc5\x78\x50\x8b\x4a\xc9\xdf\x7f"
- "\x4b\xfa\xc8\x2e\x67\x43\xf3\x63"
- "\x42\x8e\x99\x5a\x9c\x0b\x84\x77"
- "\xbc\x46\x76\x48\x82\xc7\x57\x96"
- "\xe1\x65\xd1\xed\x1d\xdd\x80\x24"
- "\xa6\x4d\xa9\xf1\x53\x8b\x5e\x0e"
- "\x26\xb9\xcc\x37\xe5\x43\xe1\x5a"
- "\x8a\xd6\x8c\x5a\xe4\x95\xd1\x8d"
- "\xf7\x33\x64\xc1\xd3\xf2\xfc\x35"
- "\x01",
- .clen = 81,
- }, {
- .key = "\xc9\xa7\x10\xda\x86\x48\xd9\xb3"
- "\x32\x42\x15\x80\x85\xa1\x65\xfe",
- .klen = 16,
- .iv = "\xd8\x70\x9f\x42\xca\xe6\x83\x3a"
- "\x52\x79\x42\xa5\x84\x6a\x96\x7f",
- .assoc = "\xe8\x39\x2d\xaa\x0e\x85\x2d\xc1"
- "\x72\xaf\x6e\xc9\x82\x33\xc7\x01"
- "\xaf\x40\x70\xb8\x2a\x78\xc9\x14"
- "\xac\xb1\x10\xca\x2e\xb3\x28\xe4"
- "\xac\xfa\x58\x7f\xe5\x73\x09\x8c"
- "\x1d\x40\x87\x8c\xd9\x75\xc0\x55"
- "\xa2\xda\x07\xd1\xc2\xa9\xd1\xbb"
- "\x09\x4f\x77\x62\x88\x2d\xf2\x68"
- "\x54",
- .alen = 65,
- .ptext = "\xf7\x02\xbb\x11\x52\x24\xd8\x48"
- "\x93\xe6\x9b\xee\x81\xfc\xf7\x82"
- "\x79\xf0\xf3\xd9\x6c\x20\xa9\x1a"
- "\xdc\xbc\x47\xc0\xe4\xcb\x10\x99"
- "\x2f",
- .plen = 33,
- .ctext = "\x4c\xa9\xac\x71\xed\x10\xa6\x24"
- "\xb7\xa7\xdf\x8b\xf5\xc2\x41\xcb"
- "\x05\xc9\xd6\x97\xb6\x10\x7f\x17"
- "\xc2\xc0\x93\xcf\xe0\x94\xfd\x99"
- "\xf2\x62\x25\x28\x01\x23\x6f\x8b"
- "\x04\x52\xbc\xb0\x3e\x66\x52\x90"
- "\x9f",
- .clen = 49,
- }, {
- .key = "\x06\xcc\x4a\x79\x96\xc3\x82\xcf"
- "\xb3\x1c\xc7\x12\x7f\xc5\x28\x04",
- .klen = 16,
- .iv = "\x15\x95\xd8\xe1\xda\x62\x2c\x56"
- "\xd3\x53\xf4\x36\x7e\x8e\x59\x85",
- .assoc = "\x24\x5e\x67\x49\x1e\x01\xd6\xdd"
- "\xf3\x89\x20\x5b\x7c\x57\x89\x07",
- .alen = 16,
- .ptext = "\x33\x27\xf5\xb1\x62\xa0\x80\x63"
- "\x14\xc0\x4d\x7f\x7b\x20\xba\x89",
- .plen = 16,
- .ctext = "\x6d\xed\x04\x7a\x2f\x0c\x30\xa5"
- "\x96\xe6\x97\xe4\x10\xeb\x40\x95"
- "\xc5\x9a\xdf\x31\xd5\xa5\xa6\xec"
- "\x05\xa8\x31\x50\x11\x19\x44",
- .clen = 31,
- }, {
- .key = "\x42\xf0\x84\x19\xa6\x3f\x2b\xea"
- "\x34\xf6\x79\xa3\x79\xe9\xeb\x0a",
- .klen = 16,
- .iv = "\x51\xb9\x12\x80\xea\xde\xd5\x71"
- "\x54\x2d\xa6\xc8\x78\xb2\x1b\x8c",
- .assoc = "\x61\x83\xa0\xe8\x2e\x7d\x7f\xf8"
- "\x74\x63\xd2\xec\x76\x7c\x4c\x0d",
- .alen = 16,
- .ptext = "\x70\x4c\x2f\x50\x72\x1c\x29\x7f"
- "\x95\x9a\xff\x10\x75\x45\x7d\x8f",
- .plen = 16,
- .ctext = "\x30\x95\x7d\xea\xdc\x62\xc0\x88"
- "\xa1\xe3\x8d\x8c\xac\x04\x10\xa7"
- "\xfa\xfa\x07\xbd\xa0\xf0\x36\xeb"
- "\x21\x93\x2e\x31\x84\x83",
- .clen = 30,
- }, {
- .key = "\x7f\x15\xbd\xb8\xb6\xba\xd3\x06"
- "\xb5\xd1\x2b\x35\x73\x0e\xad\x10",
- .klen = 16,
- .iv = "\x8e\xde\x4c\x20\xfa\x59\x7e\x8d"
- "\xd5\x07\x58\x59\x72\xd7\xde\x92",
- .assoc = "\x9d\xa7\xda\x88\x3e\xf8\x28\x14"
- "\xf5\x3e\x85\x7d\x70\xa0\x0f\x13",
- .alen = 16,
- .ptext = "\xac\x70\x69\xef\x82\x97\xd2\x9b"
- "\x15\x74\xb1\xa2\x6f\x69\x3f\x95",
- .plen = 16,
- .ctext = "\x93\xcd\xee\xd4\xcb\x9d\x8d\x16"
- "\x63\x0d\x43\xd5\x49\xca\xa8\x85"
- "\x49\xc0\xae\x13\xbc\x26\x1d\x4b",
- .clen = 24,
- },
-};
-
-/*
- * AEGIS-256 test vectors - generated via reference implementation from
- * SUPERCOP (https://bench.cr.yp.to/supercop.html):
- *
- * https://bench.cr.yp.to/supercop/supercop-20170228.tar.xz
- * (see crypto_aead/aegis256/)
- */
-static const struct aead_testvec aegis256_tv_template[] = {
- {
- .key = "\x0f\xc9\x8e\x67\x44\x9e\xaa\x86"
- "\x20\x36\x2c\x24\xfe\xc9\x30\x81"
- "\xca\xb0\x82\x21\x41\xa8\xe0\x06"
- "\x30\x0b\x37\xf6\xb6\x17\xe7\xb5",
- .klen = 32,
- .iv = "\x1e\x92\x1c\xcf\x88\x3d\x54\x0d"
- "\x40\x6d\x59\x48\xfc\x92\x61\x03"
- "\x95\x61\x05\x42\x82\x50\xc0\x0c"
- "\x60\x16\x6f\xec\x6d\x2f\xcf\x6b",
- .assoc = "",
- .alen = 0,
- .ptext = "",
- .plen = 0,
- .ctext = "\xd5\x65\x3a\xa9\x03\x51\xd7\xaa"
- "\xfa\x4b\xd8\xa2\x41\x9b\xc1\xb2",
- .clen = 16,
- }, {
- .key = "\x4b\xed\xc8\x07\x54\x1a\x52\xa2"
- "\xa1\x10\xde\xb5\xf8\xed\xf3\x87"
- "\xf4\x72\x8e\xa5\x46\x48\x62\x20"
- "\xf1\x38\x16\xce\x90\x76\x87\x8c",
- .klen = 32,
- .iv = "\x5a\xb7\x56\x6e\x98\xb9\xfd\x29"
- "\xc1\x47\x0b\xda\xf6\xb6\x23\x09"
- "\xbf\x23\x11\xc6\x87\xf0\x42\x26"
- "\x22\x44\x4e\xc4\x47\x8e\x6e\x41",
- .assoc = "",
- .alen = 0,
- .ptext = "\x79",
- .plen = 1,
- .ctext = "\x84\xa2\x8f\xad\xdb\x8d\x2c\x16"
- "\x9e\x89\xd9\x06\xa6\xa8\x14\x29"
- "\x8b",
- .clen = 17,
- }, {
- .key = "\x88\x12\x01\xa6\x64\x96\xfb\xbe"
- "\x22\xea\x90\x47\xf2\x11\xb5\x8e"
- "\x1f\x35\x9a\x29\x4b\xe8\xe4\x39"
- "\xb3\x66\xf5\xa6\x6a\xd5\x26\x62",
- .klen = 32,
- .iv = "\x97\xdb\x90\x0e\xa8\x35\xa5\x45"
- "\x42\x21\xbd\x6b\xf0\xda\xe6\x0f"
- "\xe9\xe5\x1d\x4a\x8c\x90\xc4\x40"
- "\xe3\x71\x2d\x9c\x21\xed\x0e\x18",
- .assoc = "",
- .alen = 0,
- .ptext = "\xb5\x6e\xad\xdd\x30\x72\xfa\x53"
- "\x82\x8e\x16\xb4\xed\x6d\x47",
- .plen = 15,
- .ctext = "\x09\x94\x1f\xa6\x13\xc3\x74\x75"
- "\x17\xad\x8a\x0e\xd8\x66\x9a\x28"
- "\xd7\x30\x66\x09\x2a\xdc\xfa\x2a"
- "\x9f\x3b\xd7\xdd\x66\xd1\x2b",
- .clen = 31,
- }, {
- .key = "\xc4\x37\x3b\x45\x74\x11\xa4\xda"
- "\xa2\xc5\x42\xd8\xec\x36\x78\x94"
- "\x49\xf7\xa5\xad\x50\x88\x66\x53"
- "\x74\x94\xd4\x7f\x44\x34\xc5\x39",
- .klen = 32,
- .iv = "\xd3\x00\xc9\xad\xb8\xb0\x4e\x61"
- "\xc3\xfb\x6f\xfd\xea\xff\xa9\x15"
- "\x14\xa8\x28\xce\x92\x30\x46\x59"
- "\xa4\x9f\x0b\x75\xfb\x4c\xad\xee",
- .assoc = "",
- .alen = 0,
- .ptext = "\xf2\x92\xe6\x7d\x40\xee\xa3\x6f"
- "\x03\x68\xc8\x45\xe7\x91\x0a\x18",
- .plen = 16,
- .ctext = "\x8a\x46\xa2\x22\x8c\x03\xab\x6f"
- "\x54\x63\x4e\x7f\xc9\x8e\xfa\x70"
- "\x7b\xe5\x8d\x78\xbc\xe9\xb6\xa1"
- "\x29\x17\xc8\x3b\x52\xa4\x98\x72",
- .clen = 32,
- }, {
- .key = "\x01\x5c\x75\xe5\x84\x8d\x4d\xf6"
- "\x23\x9f\xf4\x6a\xe6\x5a\x3b\x9a"
- "\x74\xb9\xb1\x32\x55\x28\xe8\x6d"
- "\x35\xc1\xb3\x57\x1f\x93\x64\x0f",
- .klen = 32,
- .iv = "\x10\x25\x03\x4c\xc8\x2c\xf7\x7d"
- "\x44\xd5\x21\x8e\xe4\x23\x6b\x1c"
- "\x3e\x6a\x34\x53\x97\xd0\xc8\x73"
- "\x66\xcd\xea\x4d\xd5\xab\x4c\xc5",
- .assoc = "",
- .alen = 0,
- .ptext = "\x2e\xb7\x20\x1c\x50\x6a\x4b\x8b"
- "\x84\x42\x7a\xd7\xe1\xb5\xcd\x1f"
- "\xd3",
- .plen = 17,
- .ctext = "\x71\x6b\x37\x0b\x02\x61\x28\x12"
- "\x83\xab\x66\x90\x84\xc7\xd1\xc5"
- "\xb2\x7a\xb4\x7b\xb4\xfe\x02\xb2"
- "\xc0\x00\x39\x13\xb5\x51\x68\x44"
- "\xad",
- .clen = 33,
- }, {
- .key = "\x3d\x80\xae\x84\x94\x09\xf6\x12"
- "\xa4\x79\xa6\xfb\xe0\x7f\xfd\xa0"
- "\x9e\x7c\xbc\xb6\x5b\xc8\x6a\x86"
- "\xf7\xef\x91\x30\xf9\xf2\x04\xe6",
- .klen = 32,
- .iv = "\x4c\x49\x3d\xec\xd8\xa8\xa0\x98"
- "\xc5\xb0\xd3\x1f\xde\x48\x2e\x22"
- "\x69\x2c\x3f\xd7\x9c\x70\x4a\x8d"
- "\x27\xfa\xc9\x26\xaf\x0a\xeb\x9c",
- .assoc = "",
- .alen = 0,
- .ptext = "\x6b\xdc\x5a\xbb\x60\xe5\xf4\xa6"
- "\x05\x1d\x2c\x68\xdb\xda\x8f\x25"
- "\xfe\x8d\x45\x19\x1e\xc0\x0b\x99"
- "\x88\x11\x39\x12\x1c\x3a\xbb",
- .plen = 31,
- .ctext = "\xaf\xa4\x34\x0d\x59\xe6\x1c\x2f"
- "\x06\x3b\x52\x18\x49\x75\x1b\xf0"
- "\x53\x09\x72\x7b\x45\x79\xe0\xbe"
- "\x89\x85\x23\x15\xb8\x79\x07\x4c"
- "\x53\x7a\x15\x37\x0a\xee\xb7\xfb"
- "\xc4\x1f\x12\x27\xcf\x77\x90",
- .clen = 47,
- }, {
- .key = "\x7a\xa5\xe8\x23\xa4\x84\x9e\x2d"
- "\x25\x53\x58\x8c\xda\xa3\xc0\xa6"
- "\xc8\x3e\xc8\x3a\x60\x68\xec\xa0"
- "\xb8\x1c\x70\x08\xd3\x51\xa3\xbd",
- .klen = 32,
- .iv = "\x89\x6e\x77\x8b\xe8\x23\x49\xb4"
- "\x45\x8a\x85\xb1\xd8\x6c\xf1\x28"
- "\x93\xef\x4b\x5b\xa1\x10\xcc\xa6"
- "\xe8\x28\xa8\xfe\x89\x69\x8b\x72",
- .assoc = "",
- .alen = 0,
- .ptext = "\xa7\x00\x93\x5b\x70\x61\x9d\xc2"
- "\x86\xf7\xde\xfa\xd5\xfe\x52\x2b"
- "\x28\x50\x51\x9d\x24\x60\x8d\xb3"
- "\x49\x3e\x17\xea\xf6\x99\x5a\xdd",
- .plen = 32,
- .ctext = "\xe2\xc9\x0b\x33\x31\x02\xb3\xb4"
- "\x33\xfe\xeb\xa8\xb7\x9b\xb2\xd7"
- "\xeb\x0f\x05\x2b\xba\xb3\xca\xef"
- "\xf6\xd1\xb6\xc0\xb9\x9b\x85\xc5"
- "\xbf\x7a\x3e\xcc\x31\x76\x09\x80"
- "\x32\x5d\xbb\xe8\x38\x0e\x77\xd3",
- .clen = 48,
- }, {
- .key = "\xb6\xca\x22\xc3\xb4\x00\x47\x49"
- "\xa6\x2d\x0a\x1e\xd4\xc7\x83\xad"
- "\xf3\x00\xd4\xbf\x65\x08\x6e\xb9"
- "\x7a\x4a\x4f\xe0\xad\xb0\x42\x93",
- .klen = 32,
- .iv = "\xc5\x93\xb0\x2a\xf8\x9f\xf1\xd0"
- "\xc6\x64\x37\x42\xd2\x90\xb3\x2e"
- "\xbd\xb1\x57\xe0\xa6\xb0\x4e\xc0"
- "\xaa\x55\x87\xd6\x63\xc8\x2a\x49",
- .assoc = "\xd5",
- .alen = 1,
- .ptext = "",
- .plen = 0,
- .ctext = "\x96\x43\x30\xca\x6c\x4f\xd7\x12"
- "\xba\xd9\xb3\x18\x86\xdf\xc3\x52",
- .clen = 16,
- }, {
- .key = "\xf3\xee\x5c\x62\xc4\x7c\xf0\x65"
- "\x27\x08\xbd\xaf\xce\xec\x45\xb3"
- "\x1d\xc3\xdf\x43\x6a\xa8\xf0\xd3"
- "\x3b\x77\x2e\xb9\x87\x0f\xe1\x6a",
- .klen = 32,
- .iv = "\x02\xb8\xea\xca\x09\x1b\x9a\xec"
- "\x47\x3e\xe9\xd4\xcc\xb5\x76\x34"
- "\xe8\x73\x62\x64\xab\x50\xd0\xda"
- "\x6b\x83\x66\xaf\x3e\x27\xc9\x1f",
- .assoc = "\x11\x81\x78\x32\x4d\xb9\x44\x73"
- "\x68\x75\x16\xf8\xcb\x7e\xa7",
- .alen = 15,
- .ptext = "",
- .plen = 0,
- .ctext = "\x2f\xab\x45\xe2\xa7\x46\xc5\x83"
- "\x11\x9f\xb0\x74\xee\xc7\x03\xdd",
- .clen = 16,
- }, {
- .key = "\x2f\x13\x95\x01\xd5\xf7\x99\x81"
- "\xa8\xe2\x6f\x41\xc8\x10\x08\xb9"
- "\x47\x85\xeb\xc7\x6f\x48\x72\xed"
- "\xfc\xa5\x0d\x91\x61\x6e\x81\x40",
- .klen = 32,
- .iv = "\x3f\xdc\x24\x69\x19\x96\x43\x08"
- "\xc8\x18\x9b\x65\xc6\xd9\x39\x3b"
- "\x12\x35\x6e\xe8\xb0\xf0\x52\xf3"
- "\x2d\xb0\x45\x87\x18\x86\x68\xf6",
- .assoc = "\x4e\xa5\xb2\xd1\x5d\x35\xed\x8f"
- "\xe8\x4f\xc8\x89\xc5\xa2\x69\xbc",
- .alen = 16,
- .ptext = "",
- .plen = 0,
- .ctext = "\x16\x44\x73\x33\x5d\xf2\xb9\x04"
- "\x6b\x79\x98\xef\xdb\xd5\xc5\xf1",
- .clen = 16,
- }, {
- .key = "\x6c\x38\xcf\xa1\xe5\x73\x41\x9d"
- "\x29\xbc\x21\xd2\xc2\x35\xcb\xbf"
- "\x72\x47\xf6\x4b\x74\xe8\xf4\x06"
- "\xbe\xd3\xec\x6a\x3b\xcd\x20\x17",
- .klen = 32,
- .iv = "\x7b\x01\x5d\x08\x29\x12\xec\x24"
- "\x49\xf3\x4d\xf7\xc0\xfe\xfb\x41"
- "\x3c\xf8\x79\x6c\xb6\x90\xd4\x0d"
- "\xee\xde\x23\x60\xf2\xe5\x08\xcc",
- .assoc = "\x8a\xca\xec\x70\x6d\xb1\x96\xab"
- "\x69\x29\x7a\x1b\xbf\xc7\x2c\xc2"
- "\x07",
- .alen = 17,
- .ptext = "",
- .plen = 0,
- .ctext = "\xa4\x9b\xb8\x47\xc0\xed\x7a\x45"
- "\x98\x54\x8c\xed\x3d\x17\xf0\xdd",
- .clen = 16,
- }, {
- .key = "\xa8\x5c\x09\x40\xf5\xef\xea\xb8"
- "\xaa\x96\xd3\x64\xbc\x59\x8d\xc6"
- "\x9c\x0a\x02\xd0\x79\x88\x76\x20"
- "\x7f\x00\xca\x42\x15\x2c\xbf\xed",
- .klen = 32,
- .iv = "\xb8\x26\x97\xa8\x39\x8e\x94\x3f"
- "\xca\xcd\xff\x88\xba\x22\xbe\x47"
- "\x67\xba\x85\xf1\xbb\x30\x56\x26"
- "\xaf\x0b\x02\x38\xcc\x44\xa7\xa3",
- .assoc = "\xc7\xef\x26\x10\x7d\x2c\x3f\xc6"
- "\xea\x03\x2c\xac\xb9\xeb\xef\xc9"
- "\x31\x6b\x08\x12\xfc\xd8\x37\x2d"
- "\xe0\x17\x3a\x2e\x83\x5c\x8f",
- .alen = 31,
- .ptext = "",
- .plen = 0,
- .ctext = "\x20\x24\xe2\x33\x5c\x60\xc9\xf0"
- "\xa4\x96\x2f\x0d\x53\xc2\xf8\xfc",
- .clen = 16,
- }, {
- .key = "\xe5\x81\x42\xdf\x05\x6a\x93\xd4"
- "\x2b\x70\x85\xf5\xb6\x7d\x50\xcc"
- "\xc6\xcc\x0e\x54\x7f\x28\xf8\x3a"
- "\x40\x2e\xa9\x1a\xf0\x8b\x5e\xc4",
- .klen = 32,
- .iv = "\xf4\x4a\xd1\x47\x49\x09\x3d\x5b"
- "\x4b\xa7\xb1\x19\xb4\x46\x81\x4d"
- "\x91\x7c\x91\x75\xc0\xd0\xd8\x40"
- "\x71\x39\xe1\x10\xa6\xa3\x46\x7a",
- .assoc = "\x03\x14\x5f\xaf\x8d\xa8\xe7\xe2"
- "\x6b\xde\xde\x3e\xb3\x10\xb1\xcf"
- "\x5c\x2d\x14\x96\x01\x78\xb9\x47"
- "\xa1\x44\x19\x06\x5d\xbb\x2e\x2f",
- .alen = 32,
- .ptext = "",
- .plen = 0,
- .ctext = "\x6f\x4a\xb9\xe0\xff\x51\xa3\xf1"
- "\xd2\x64\x3e\x66\x6a\xb2\x03\xc0",
- .clen = 16,
- }, {
- .key = "\x22\xa6\x7c\x7f\x15\xe6\x3c\xf0"
- "\xac\x4b\x37\x86\xb0\xa2\x13\xd2"
- "\xf1\x8e\x19\xd8\x84\xc8\x7a\x53"
- "\x02\x5b\x88\xf3\xca\xea\xfe\x9b",
- .klen = 32,
- .iv = "\x31\x6f\x0b\xe6\x59\x85\xe6\x77"
- "\xcc\x81\x63\xab\xae\x6b\x43\x54"
- "\xbb\x3f\x9c\xf9\xc5\x70\x5a\x5a"
- "\x32\x67\xc0\xe9\x80\x02\xe5\x50",
- .assoc = "\x40",
- .alen = 1,
- .ptext = "\x4f",
- .plen = 1,
- .ctext = "\x2c\xfb\xad\x7e\xbe\xa0\x9a\x5b"
- "\x7a\x3f\x81\xf7\xfc\x1b\x79\x83"
- "\xc7",
- .clen = 17,
- }, {
- .key = "\x5e\xcb\xb6\x1e\x25\x62\xe4\x0c"
- "\x2d\x25\xe9\x18\xaa\xc6\xd5\xd8"
- "\x1b\x50\x25\x5d\x89\x68\xfc\x6d"
- "\xc3\x89\x67\xcb\xa4\x49\x9d\x71",
- .klen = 32,
- .iv = "\x6d\x94\x44\x86\x69\x00\x8f\x93"
- "\x4d\x5b\x15\x3c\xa8\x8f\x06\x5a"
- "\xe6\x01\xa8\x7e\xca\x10\xdc\x73"
- "\xf4\x94\x9f\xc1\x5a\x61\x85\x27",
- .assoc = "\x7c\x5d\xd3\xee\xad\x9f\x39\x1a"
- "\x6d\x92\x42\x61\xa7\x58\x37",
- .alen = 15,
- .ptext = "\x8b\x26\x61\x55\xf1\x3e\xe3\xa1"
- "\x8d\xc8\x6e\x85\xa5\x21\x67",
- .plen = 15,
- .ctext = "\x1f\x7f\xca\x3c\x2b\xe7\x27\xba"
- "\x7e\x98\x83\x02\x34\x23\xf7\x94"
- "\xde\x35\xe6\x1d\x14\x18\xe5\x38"
- "\x14\x80\x6a\xa7\x1b\xae\x1d",
- .clen = 31,
- }, {
- .key = "\x9b\xef\xf0\xbd\x35\xdd\x8d\x28"
- "\xad\xff\x9b\xa9\xa4\xeb\x98\xdf"
- "\x46\x13\x31\xe1\x8e\x08\x7e\x87"
- "\x85\xb6\x46\xa3\x7e\xa8\x3c\x48",
- .klen = 32,
- .iv = "\xaa\xb8\x7e\x25\x79\x7c\x37\xaf"
- "\xce\x36\xc7\xce\xa2\xb4\xc9\x60"
- "\x10\xc3\xb3\x02\xcf\xb0\x5e\x8d"
- "\xb5\xc2\x7e\x9a\x35\xc0\x24\xfd",
- .assoc = "\xb9\x82\x0c\x8d\xbd\x1b\xe2\x36"
- "\xee\x6c\xf4\xf2\xa1\x7d\xf9\xe2",
- .alen = 16,
- .ptext = "\xc8\x4b\x9b\xf5\x01\xba\x8c\xbd"
- "\x0e\xa3\x21\x16\x9f\x46\x2a\x63",
- .plen = 16,
- .ctext = "\x05\x86\x9e\xd7\x2b\xa3\x97\x01"
- "\xbe\x28\x98\x10\x6f\xe9\x61\x32"
- "\x96\xbb\xb1\x2e\x8f\x0c\x44\xb9"
- "\x46\x2d\x55\xe3\x42\x67\xf2\xaf",
- .clen = 32,
- }, {
- .key = "\xd7\x14\x29\x5d\x45\x59\x36\x44"
- "\x2e\xd9\x4d\x3b\x9e\x0f\x5b\xe5"
- "\x70\xd5\x3c\x65\x93\xa8\x00\xa0"
- "\x46\xe4\x25\x7c\x58\x08\xdb\x1e",
- .klen = 32,
- .iv = "\xe6\xdd\xb8\xc4\x89\xf8\xe0\xca"
- "\x4f\x10\x7a\x5f\x9c\xd8\x8b\x66"
- "\x3b\x86\xbf\x86\xd4\x50\xe0\xa7"
- "\x76\xef\x5c\x72\x0f\x1f\xc3\xd4",
- .assoc = "\xf5\xa6\x46\x2c\xce\x97\x8a\x51"
- "\x6f\x46\xa6\x83\x9b\xa1\xbc\xe8"
- "\x05",
- .alen = 17,
- .ptext = "\x05\x70\xd5\x94\x12\x36\x35\xd8"
- "\x8f\x7d\xd3\xa8\x99\x6a\xed\x69"
- "\xd0",
- .plen = 17,
- .ctext = "\x9c\xe0\x06\x7b\x86\xcf\x2e\xd8"
- "\x45\x65\x1b\x72\x9b\xaa\xa3\x1e"
- "\x87\x9d\x26\xdf\xff\x81\x11\xd2"
- "\x47\x41\xb9\x24\xc1\x8a\xa3\x8b"
- "\x55",
- .clen = 33,
- }, {
- .key = "\x14\x39\x63\xfc\x56\xd5\xdf\x5f"
- "\xaf\xb3\xff\xcc\x98\x33\x1d\xeb"
- "\x9a\x97\x48\xe9\x98\x48\x82\xba"
- "\x07\x11\x04\x54\x32\x67\x7b\xf5",
- .klen = 32,
- .iv = "\x23\x02\xf1\x64\x9a\x73\x89\xe6"
- "\xd0\xea\x2c\xf1\x96\xfc\x4e\x6d"
- "\x65\x48\xcb\x0a\xda\xf0\x62\xc0"
- "\x38\x1d\x3b\x4a\xe9\x7e\x62\xaa",
- .assoc = "\x32\xcb\x80\xcc\xde\x12\x33\x6d"
- "\xf0\x20\x58\x15\x95\xc6\x7f\xee"
- "\x2f\xf9\x4e\x2c\x1b\x98\x43\xc7"
- "\x68\x28\x73\x40\x9f\x96\x4a",
- .alen = 31,
- .ptext = "\x41\x94\x0e\x33\x22\xb1\xdd\xf4"
- "\x10\x57\x85\x39\x93\x8f\xaf\x70"
- "\xfa\xa9\xd0\x4d\x5c\x40\x23\xcd"
- "\x98\x34\xab\x37\x56\xae\x32",
- .plen = 31,
- .ctext = "\xa0\xc8\xde\x83\x0d\xc3\x4e\xd5"
- "\x69\x7f\x7a\xdd\x8c\x46\xda\xba"
- "\x0a\x5c\x0e\x7f\xac\xee\x02\xd2"
- "\xe5\x4b\x0a\xba\xb8\xa4\x7b\x66"
- "\xde\xae\xdb\xc2\xc0\x0b\xf7\x2b"
- "\xdf\xb8\xea\xd8\xa9\x38\xed",
- .clen = 47,
- }, {
- .key = "\x50\x5d\x9d\x9b\x66\x50\x88\x7b"
- "\x30\x8e\xb1\x5e\x92\x58\xe0\xf1"
- "\xc5\x5a\x53\x6e\x9d\xe8\x04\xd4"
- "\xc9\x3f\xe2\x2d\x0c\xc6\x1a\xcb",
- .klen = 32,
- .iv = "\x5f\x27\x2b\x03\xaa\xef\x32\x02"
- "\x50\xc4\xde\x82\x90\x21\x11\x73"
- "\x8f\x0a\xd6\x8f\xdf\x90\xe4\xda"
- "\xf9\x4a\x1a\x23\xc3\xdd\x02\x81",
- .assoc = "\x6e\xf0\xba\x6b\xee\x8e\xdc\x89"
- "\x71\xfb\x0a\xa6\x8f\xea\x41\xf4"
- "\x5a\xbb\x59\xb0\x20\x38\xc5\xe0"
- "\x29\x56\x52\x19\x79\xf5\xe9\x37",
- .alen = 32,
- .ptext = "\x7e\xb9\x48\xd3\x32\x2d\x86\x10"
- "\x91\x31\x37\xcb\x8d\xb3\x72\x76"
- "\x24\x6b\xdc\xd1\x61\xe0\xa5\xe7"
- "\x5a\x61\x8a\x0f\x30\x0d\xd1\xec",
- .plen = 32,
- .ctext = "\xd3\x68\x14\x70\x3c\x01\x43\x86"
- "\x02\xab\xbe\x75\xaa\xe7\xf5\x53"
- "\x5c\x05\xbd\x9b\x19\xbb\x2a\x61"
- "\x8f\x69\x05\x75\x8e\xca\x60\x0c"
- "\x5b\xa2\x48\x61\x32\x74\x11\x2b"
- "\xf6\xcf\x06\x78\x6f\x78\x1a\x4a",
- .clen = 48,
- }, {
- .key = "\x8d\x82\xd6\x3b\x76\xcc\x30\x97"
- "\xb1\x68\x63\xef\x8c\x7c\xa3\xf7"
- "\xef\x1c\x5f\xf2\xa3\x88\x86\xed"
- "\x8a\x6d\xc1\x05\xe7\x25\xb9\xa2",
- .klen = 32,
- .iv = "\x9c\x4b\x65\xa2\xba\x6b\xdb\x1e"
- "\xd1\x9e\x90\x13\x8a\x45\xd3\x79"
- "\xba\xcd\xe2\x13\xe4\x30\x66\xf4"
- "\xba\x78\xf9\xfb\x9d\x3c\xa1\x58",
- .assoc = "\xab\x14\xf3\x0a\xfe\x0a\x85\xa5"
- "\xf2\xd5\xbc\x38\x89\x0e\x04\xfb"
- "\x84\x7d\x65\x34\x25\xd8\x47\xfa"
- "\xeb\x83\x31\xf1\x54\x54\x89\x0d"
- "\x9d",
- .alen = 33,
- .ptext = "\xba\xde\x82\x72\x42\xa9\x2f\x2c"
- "\x12\x0b\xe9\x5c\x87\xd7\x35\x7c"
- "\x4f\x2e\xe8\x55\x66\x80\x27\x00"
- "\x1b\x8f\x68\xe7\x0a\x6c\x71\xc3"
- "\x21\x78\x55\x9d\x9c\x65\x7b\xcd"
- "\x0a\x34\x97\xff\x47\x37\xb0\x2a"
- "\x80\x0d\x19\x98\x33\xa9\x7a\xe3"
- "\x2e\x4c\xc6\xf3\x8c\x88\x42\x01"
- "\xbd",
- .plen = 65,
- .ctext = "\x07\x0a\x35\xb0\x82\x03\x5a\xd2"
- "\x15\x3a\x6c\x72\x83\x9b\xb1\x75"
- "\xea\xf2\xfc\xff\xc6\xf1\x13\xa4"
- "\x1a\x93\x33\x79\x97\x82\x81\xc0"
- "\x96\xc2\x00\xab\x39\xae\xa1\x62"
- "\x53\xa3\x86\xc9\x07\x8c\xaf\x22"
- "\x47\x31\x29\xca\x4a\x95\xf5\xd5"
- "\x20\x63\x5a\x54\x80\x2c\x4a\x63"
- "\xfb\x18\x73\x31\x4f\x08\x21\x5d"
- "\x20\xe9\xc3\x7e\xea\x25\x77\x3a"
- "\x65",
- .clen = 81,
- }, {
- .key = "\xc9\xa7\x10\xda\x86\x48\xd9\xb3"
- "\x32\x42\x15\x80\x85\xa1\x65\xfe"
- "\x19\xde\x6b\x76\xa8\x28\x08\x07"
- "\x4b\x9a\xa0\xdd\xc1\x84\x58\x79",
- .klen = 32,
- .iv = "\xd8\x70\x9f\x42\xca\xe6\x83\x3a"
- "\x52\x79\x42\xa5\x84\x6a\x96\x7f"
- "\xe4\x8f\xed\x97\xe9\xd0\xe8\x0d"
- "\x7c\xa6\xd8\xd4\x77\x9b\x40\x2e",
- .assoc = "\xe8\x39\x2d\xaa\x0e\x85\x2d\xc1"
- "\x72\xaf\x6e\xc9\x82\x33\xc7\x01"
- "\xaf\x40\x70\xb8\x2a\x78\xc9\x14"
- "\xac\xb1\x10\xca\x2e\xb3\x28\xe4"
- "\xac\xfa\x58\x7f\xe5\x73\x09\x8c"
- "\x1d\x40\x87\x8c\xd9\x75\xc0\x55"
- "\xa2\xda\x07\xd1\xc2\xa9\xd1\xbb"
- "\x09\x4f\x77\x62\x88\x2d\xf2\x68"
- "\x54",
- .alen = 65,
- .ptext = "\xf7\x02\xbb\x11\x52\x24\xd8\x48"
- "\x93\xe6\x9b\xee\x81\xfc\xf7\x82"
- "\x79\xf0\xf3\xd9\x6c\x20\xa9\x1a"
- "\xdc\xbc\x47\xc0\xe4\xcb\x10\x99"
- "\x2f",
- .plen = 33,
- .ctext = "\x33\xc1\xda\xfa\x15\x21\x07\x8e"
- "\x93\x68\xea\x64\x7b\x3d\x4b\x6b"
- "\x71\x5e\x5e\x6b\x92\xaa\x65\xc2"
- "\x7a\x2a\xc1\xa9\x0a\xa1\x24\x81"
- "\x26\x3a\x5a\x09\xe8\xce\x73\x72"
- "\xde\x7b\x58\x9e\x85\xb9\xa4\x28"
- "\xda",
- .clen = 49,
- }, {
- .key = "\x06\xcc\x4a\x79\x96\xc3\x82\xcf"
- "\xb3\x1c\xc7\x12\x7f\xc5\x28\x04"
- "\x44\xa1\x76\xfb\xad\xc8\x8a\x21"
- "\x0d\xc8\x7f\xb6\x9b\xe3\xf8\x4f",
- .klen = 32,
- .iv = "\x15\x95\xd8\xe1\xda\x62\x2c\x56"
- "\xd3\x53\xf4\x36\x7e\x8e\x59\x85"
- "\x0e\x51\xf9\x1c\xee\x70\x6a\x27"
- "\x3d\xd3\xb7\xac\x51\xfa\xdf\x05",
- .assoc = "\x24\x5e\x67\x49\x1e\x01\xd6\xdd"
- "\xf3\x89\x20\x5b\x7c\x57\x89\x07",
- .alen = 16,
- .ptext = "\x33\x27\xf5\xb1\x62\xa0\x80\x63"
- "\x14\xc0\x4d\x7f\x7b\x20\xba\x89",
- .plen = 16,
- .ctext = "\x3e\xf8\x86\x3d\x39\xf8\x96\x02"
- "\x0f\xdf\xc9\x6e\x37\x1e\x57\x99"
- "\x07\x2a\x1a\xac\xd1\xda\xfd\x3b"
- "\xc7\xff\xbd\xbc\x85\x09\x0b",
- .clen = 31,
- }, {
- .key = "\x42\xf0\x84\x19\xa6\x3f\x2b\xea"
- "\x34\xf6\x79\xa3\x79\xe9\xeb\x0a"
- "\x6e\x63\x82\x7f\xb2\x68\x0c\x3a"
- "\xce\xf5\x5e\x8e\x75\x42\x97\x26",
- .klen = 32,
- .iv = "\x51\xb9\x12\x80\xea\xde\xd5\x71"
- "\x54\x2d\xa6\xc8\x78\xb2\x1b\x8c"
- "\x39\x14\x05\xa0\xf3\x10\xec\x41"
- "\xff\x01\x95\x84\x2b\x59\x7f\xdb",
- .assoc = "\x61\x83\xa0\xe8\x2e\x7d\x7f\xf8"
- "\x74\x63\xd2\xec\x76\x7c\x4c\x0d",
- .alen = 16,
- .ptext = "\x70\x4c\x2f\x50\x72\x1c\x29\x7f"
- "\x95\x9a\xff\x10\x75\x45\x7d\x8f",
- .plen = 16,
- .ctext = "\x2f\xc4\xd8\x0d\xa6\x07\xef\x2e"
- "\x6c\xd9\x84\x63\x70\x97\x61\x37"
- "\x08\x2f\x16\x90\x9e\x62\x30\x0d"
- "\x62\xd5\xc8\xf0\x46\x1a",
- .clen = 30,
- }, {
- .key = "\x7f\x15\xbd\xb8\xb6\xba\xd3\x06"
- "\xb5\xd1\x2b\x35\x73\x0e\xad\x10"
- "\x98\x25\x8d\x03\xb7\x08\x8e\x54"
- "\x90\x23\x3d\x67\x4f\xa1\x36\xfc",
- .klen = 32,
- .iv = "\x8e\xde\x4c\x20\xfa\x59\x7e\x8d"
- "\xd5\x07\x58\x59\x72\xd7\xde\x92"
- "\x63\xd6\x10\x24\xf8\xb0\x6e\x5a"
- "\xc0\x2e\x74\x5d\x06\xb8\x1e\xb2",
- .assoc = "\x9d\xa7\xda\x88\x3e\xf8\x28\x14"
- "\xf5\x3e\x85\x7d\x70\xa0\x0f\x13",
- .alen = 16,
- .ptext = "\xac\x70\x69\xef\x82\x97\xd2\x9b"
- "\x15\x74\xb1\xa2\x6f\x69\x3f\x95",
- .plen = 16,
- .ctext = "\xce\xf3\x17\x87\x49\xc2\x00\x46"
- "\xc6\x12\x5c\x8f\x81\x38\xaa\x55"
- "\xf8\x67\x75\xf1\x75\xe3\x2a\x24",
- .clen = 24,
- },
-};
-
-/*
- * MORUS-640 test vectors - generated via reference implementation from
- * SUPERCOP (https://bench.cr.yp.to/supercop.html):
- *
- * https://bench.cr.yp.to/supercop/supercop-20170228.tar.xz
- * (see crypto_aead/morus640128v2/)
- */
-static const struct aead_testvec morus640_tv_template[] = {
- {
- .key = "\x00\x00\x00\x00\x00\x00\x00\x00"
- "\x00\x00\x00\x00\x00\x00\x00\x00",
- .klen = 16,
- .iv = "\x0f\xc9\x8e\x67\x44\x9e\xaa\x86"
- "\x20\x36\x2c\x24\xfe\xc9\x30\x81",
- .assoc = "",
- .alen = 0,
- .ptext = "",
- .plen = 0,
- .ctext = "\x89\x62\x7d\xf3\x07\x9d\x52\x05"
- "\x53\xc3\x04\x60\x93\xb4\x37\x9a",
- .clen = 16,
- }, {
- .key = "\x3c\x24\x39\x9f\x10\x7b\xa8\x1b"
- "\x80\xda\xb2\x91\xf9\x24\xc2\x06",
- .klen = 16,
- .iv = "\x4b\xed\xc8\x07\x54\x1a\x52\xa2"
- "\xa1\x10\xde\xb5\xf8\xed\xf3\x87",
- .assoc = "",
- .alen = 0,
- .ptext = "\x69",
- .plen = 1,
- .ctext = "\xa8\x8d\xe4\x90\xb5\x50\x8f\x78"
- "\xb6\x10\x9a\x59\x5f\x61\x37\x70"
- "\x09",
- .clen = 17,
- }, {
- .key = "\x79\x49\x73\x3e\x20\xf7\x51\x37"
- "\x01\xb4\x64\x22\xf3\x48\x85\x0c",
- .klen = 16,
- .iv = "\x88\x12\x01\xa6\x64\x96\xfb\xbe"
- "\x22\xea\x90\x47\xf2\x11\xb5\x8e",
- .assoc = "",
- .alen = 0,
- .ptext = "\xa6\xa4\x1e\x76\xec\xd4\x50\xcc"
- "\x62\x58\xe9\x8f\xef\xa4\x17",
- .plen = 15,
- .ctext = "\x76\xdd\xb9\x05\x3d\xce\x61\x38"
- "\xf3\xef\xf7\xe5\xd7\xfd\x70\xa5"
- "\xcf\x9d\x64\xb8\x0a\x9f\xfd\x8b"
- "\xd4\x6e\xfe\xd9\xc8\x63\x4b",
- .clen = 31,
- }, {
- .key = "\xb5\x6e\xad\xdd\x30\x72\xfa\x53"
- "\x82\x8e\x16\xb4\xed\x6d\x47\x12",
- .klen = 16,
- .iv = "\xc4\x37\x3b\x45\x74\x11\xa4\xda"
- "\xa2\xc5\x42\xd8\xec\x36\x78\x94",
- .assoc = "",
- .alen = 0,
- .ptext = "\xe2\xc9\x58\x15\xfc\x4f\xf8\xe8"
- "\xe3\x32\x9b\x21\xe9\xc8\xd9\x97",
- .plen = 16,
- .ctext = "\xdc\x72\xe8\x14\xfb\x63\xad\x72"
- "\x1f\x57\x9a\x1f\x88\x81\xdb\xd6"
- "\xc1\x91\x9d\xb9\x25\xc4\x99\x4c"
- "\x97\xcd\x8a\x0c\x9d\x68\x00\x1c",
- .clen = 32,
- }, {
- .key = "\xf2\x92\xe6\x7d\x40\xee\xa3\x6f"
- "\x03\x68\xc8\x45\xe7\x91\x0a\x18",
- .klen = 16,
- .iv = "\x01\x5c\x75\xe5\x84\x8d\x4d\xf6"
- "\x23\x9f\xf4\x6a\xe6\x5a\x3b\x9a",
- .assoc = "",
- .alen = 0,
- .ptext = "\x1f\xee\x92\xb4\x0c\xcb\xa1\x04"
- "\x64\x0c\x4d\xb2\xe3\xec\x9c\x9d"
- "\x09",
- .plen = 17,
- .ctext = "\x6b\x4f\x3b\x90\x9a\xa2\xb3\x82"
- "\x0a\xb8\x55\xee\xeb\x73\x4d\x7f"
- "\x54\x11\x3a\x8a\x31\xa3\xb5\xf2"
- "\xcd\x49\xdb\xf3\xee\x26\xbd\xa2"
- "\x0d",
- .clen = 33,
- }, {
- .key = "\x2e\xb7\x20\x1c\x50\x6a\x4b\x8b"
- "\x84\x42\x7a\xd7\xe1\xb5\xcd\x1f",
- .klen = 16,
- .iv = "\x3d\x80\xae\x84\x94\x09\xf6\x12"
- "\xa4\x79\xa6\xfb\xe0\x7f\xfd\xa0",
- .assoc = "",
- .alen = 0,
- .ptext = "\x5c\x13\xcb\x54\x1c\x47\x4a\x1f"
- "\xe5\xe6\xff\x44\xdd\x11\x5f\xa3"
- "\x33\xdd\xc2\xf8\xdd\x18\x2b\x93"
- "\x57\x05\x01\x1c\x66\x22\xd3",
- .plen = 31,
- .ctext = "\x59\xd1\x0f\x6b\xee\x27\x84\x92"
- "\xb7\xa9\xb5\xdd\x02\xa4\x12\xa5"
- "\x50\x32\xb4\x9a\x2e\x35\x83\x55"
- "\x36\x12\x12\xed\xa3\x31\xc5\x30"
- "\xa7\xe2\x4a\x6d\x05\x59\x43\x91"
- "\x75\xfa\x6c\x17\xc6\x73\xca",
- .clen = 47,
- }, {
- .key = "\x6b\xdc\x5a\xbb\x60\xe5\xf4\xa6"
- "\x05\x1d\x2c\x68\xdb\xda\x8f\x25",
- .klen = 16,
- .iv = "\x7a\xa5\xe8\x23\xa4\x84\x9e\x2d"
- "\x25\x53\x58\x8c\xda\xa3\xc0\xa6",
- .assoc = "",
- .alen = 0,
- .ptext = "\x98\x37\x05\xf3\x2c\xc2\xf3\x3b"
- "\x66\xc0\xb1\xd5\xd7\x35\x21\xaa"
- "\x5d\x9f\xce\x7c\xe2\xb8\xad\xad"
- "\x19\x33\xe0\xf4\x40\x81\x72\x28",
- .plen = 32,
- .ctext = "\xdb\x49\x68\x0f\x91\x5b\x21\xb1"
- "\xcf\x50\xb2\x4c\x32\xe1\xa6\x69"
- "\xc0\xfb\x44\x1f\xa0\x9a\xeb\x39"
- "\x1b\xde\x68\x38\xcc\x27\x52\xc5"
- "\xf6\x3e\x74\xea\x66\x5b\x5f\x0c"
- "\x65\x9e\x58\xe6\x52\xa2\xfe\x59",
- .clen = 48,
- }, {
- .key = "\xa7\x00\x93\x5b\x70\x61\x9d\xc2"
- "\x86\xf7\xde\xfa\xd5\xfe\x52\x2b",
- .klen = 16,
- .iv = "\xb6\xca\x22\xc3\xb4\x00\x47\x49"
- "\xa6\x2d\x0a\x1e\xd4\xc7\x83\xad",
- .assoc = "\xc5",
- .alen = 1,
- .ptext = "",
- .plen = 0,
- .ctext = "\x56\xe7\x24\x52\xdd\x95\x60\x5b"
- "\x09\x48\x39\x69\x9c\xb3\x62\x46",
- .clen = 16,
- }, {
- .key = "\xe4\x25\xcd\xfa\x80\xdd\x46\xde"
- "\x07\xd1\x90\x8b\xcf\x23\x15\x31",
- .klen = 16,
- .iv = "\xf3\xee\x5c\x62\xc4\x7c\xf0\x65"
- "\x27\x08\xbd\xaf\xce\xec\x45\xb3",
- .assoc = "\x02\xb8\xea\xca\x09\x1b\x9a\xec"
- "\x47\x3e\xe9\xd4\xcc\xb5\x76",
- .alen = 15,
- .ptext = "",
- .plen = 0,
- .ctext = "\xdd\xfa\x6c\x1f\x5d\x86\x87\x01"
- "\x13\xe5\x73\x46\x46\xf2\x5c\xe1",
- .clen = 16,
- }, {
- .key = "\x20\x4a\x07\x99\x91\x58\xee\xfa"
- "\x88\xab\x42\x1c\xc9\x47\xd7\x38",
- .klen = 16,
- .iv = "\x2f\x13\x95\x01\xd5\xf7\x99\x81"
- "\xa8\xe2\x6f\x41\xc8\x10\x08\xb9",
- .assoc = "\x3f\xdc\x24\x69\x19\x96\x43\x08"
- "\xc8\x18\x9b\x65\xc6\xd9\x39\x3b",
- .alen = 16,
- .ptext = "",
- .plen = 0,
- .ctext = "\xa6\x1b\xb9\xd7\x5e\x3c\xcf\xac"
- "\xa9\x21\x45\x0b\x16\x52\xf7\xe1",
- .clen = 16,
- }, {
- .key = "\x5d\x6f\x41\x39\xa1\xd4\x97\x16"
- "\x09\x85\xf4\xae\xc3\x6b\x9a\x3e",
- .klen = 16,
- .iv = "\x6c\x38\xcf\xa1\xe5\x73\x41\x9d"
- "\x29\xbc\x21\xd2\xc2\x35\xcb\xbf",
- .assoc = "\x7b\x01\x5d\x08\x29\x12\xec\x24"
- "\x49\xf3\x4d\xf7\xc0\xfe\xfb\x41"
- "\x3c",
- .alen = 17,
- .ptext = "",
- .plen = 0,
- .ctext = "\x15\xff\xde\x3b\x34\xfc\xf6\xf9"
- "\xbb\xa8\x62\xad\x0a\xf5\x48\x60",
- .clen = 16,
- }, {
- .key = "\x99\x93\x7a\xd8\xb1\x50\x40\x31"
- "\x8a\x60\xa6\x3f\xbd\x90\x5d\x44",
- .klen = 16,
- .iv = "\xa8\x5c\x09\x40\xf5\xef\xea\xb8"
- "\xaa\x96\xd3\x64\xbc\x59\x8d\xc6",
- .assoc = "\xb8\x26\x97\xa8\x39\x8e\x94\x3f"
- "\xca\xcd\xff\x88\xba\x22\xbe\x47"
- "\x67\xba\x85\xf1\xbb\x30\x56\x26"
- "\xaf\x0b\x02\x38\xcc\x44\xa7",
- .alen = 31,
- .ptext = "",
- .plen = 0,
- .ctext = "\xd2\x9d\xf8\x3b\xd7\x84\xe9\x2d"
- "\x4b\xef\x75\x16\x0a\x99\xae\x6b",
- .clen = 16,
- }, {
- .key = "\xd6\xb8\xb4\x77\xc1\xcb\xe9\x4d"
- "\x0a\x3a\x58\xd1\xb7\xb4\x1f\x4a",
- .klen = 16,
- .iv = "\xe5\x81\x42\xdf\x05\x6a\x93\xd4"
- "\x2b\x70\x85\xf5\xb6\x7d\x50\xcc",
- .assoc = "\xf4\x4a\xd1\x47\x49\x09\x3d\x5b"
- "\x4b\xa7\xb1\x19\xb4\x46\x81\x4d"
- "\x91\x7c\x91\x75\xc0\xd0\xd8\x40"
- "\x71\x39\xe1\x10\xa6\xa3\x46\x7a",
- .alen = 32,
- .ptext = "",
- .plen = 0,
- .ctext = "\xe4\x8d\xa7\xa7\x45\xc1\x31\x4f"
- "\xce\xfb\xaf\xd6\xc2\xe6\xee\xc0",
- .clen = 16,
- }, {
- .key = "\x12\xdd\xee\x17\xd1\x47\x92\x69"
- "\x8b\x14\x0a\x62\xb1\xd9\xe2\x50",
- .klen = 16,
- .iv = "\x22\xa6\x7c\x7f\x15\xe6\x3c\xf0"
- "\xac\x4b\x37\x86\xb0\xa2\x13\xd2",
- .assoc = "\x31",
- .alen = 1,
- .ptext = "\x40",
- .plen = 1,
- .ctext = "\xe2\x67\x38\x4f\xb9\xad\x7d\x38"
- "\x01\xfe\x84\x14\x85\xf8\xd1\xe3"
- "\x22",
- .clen = 17,
- }, {
- .key = "\x4f\x01\x27\xb6\xe1\xc3\x3a\x85"
- "\x0c\xee\xbc\xf4\xab\xfd\xa5\x57",
- .klen = 16,
- .iv = "\x5e\xcb\xb6\x1e\x25\x62\xe4\x0c"
- "\x2d\x25\xe9\x18\xaa\xc6\xd5\xd8",
- .assoc = "\x6d\x94\x44\x86\x69\x00\x8f\x93"
- "\x4d\x5b\x15\x3c\xa8\x8f\x06",
- .alen = 15,
- .ptext = "\x7c\x5d\xd3\xee\xad\x9f\x39\x1a"
- "\x6d\x92\x42\x61\xa7\x58\x37",
- .plen = 15,
- .ctext = "\x77\x32\x61\xeb\xb4\x33\x29\x92"
- "\x29\x95\xc5\x8e\x85\x76\xab\xfc"
- "\x07\x95\xa7\x44\x74\xf7\x22\xff"
- "\xd8\xd8\x36\x3d\x8a\x7f\x9e",
- .clen = 31,
- }, {
- .key = "\x8b\x26\x61\x55\xf1\x3e\xe3\xa1"
- "\x8d\xc8\x6e\x85\xa5\x21\x67\x5d",
- .klen = 16,
- .iv = "\x9b\xef\xf0\xbd\x35\xdd\x8d\x28"
- "\xad\xff\x9b\xa9\xa4\xeb\x98\xdf",
- .assoc = "\xaa\xb8\x7e\x25\x79\x7c\x37\xaf"
- "\xce\x36\xc7\xce\xa2\xb4\xc9\x60",
- .alen = 16,
- .ptext = "\xb9\x82\x0c\x8d\xbd\x1b\xe2\x36"
- "\xee\x6c\xf4\xf2\xa1\x7d\xf9\xe2",
- .plen = 16,
- .ctext = "\xd8\xfd\x44\x45\xf6\x42\x12\x38"
- "\xf2\x0b\xea\x4f\x9e\x11\x61\x07"
- "\x48\x67\x98\x18\x9b\xd0\x0c\x59"
- "\x67\xa4\x11\xb3\x2b\xd6\xc1\x70",
- .clen = 32,
- }, {
- .key = "\xc8\x4b\x9b\xf5\x01\xba\x8c\xbd"
- "\x0e\xa3\x21\x16\x9f\x46\x2a\x63",
- .klen = 16,
- .iv = "\xd7\x14\x29\x5d\x45\x59\x36\x44"
- "\x2e\xd9\x4d\x3b\x9e\x0f\x5b\xe5",
- .assoc = "\xe6\xdd\xb8\xc4\x89\xf8\xe0\xca"
- "\x4f\x10\x7a\x5f\x9c\xd8\x8b\x66"
- "\x3b",
- .alen = 17,
- .ptext = "\xf5\xa6\x46\x2c\xce\x97\x8a\x51"
- "\x6f\x46\xa6\x83\x9b\xa1\xbc\xe8"
- "\x05",
- .plen = 17,
- .ctext = "\xb1\xab\x53\x4e\xc7\x40\x16\xb6"
- "\x71\x3a\x00\x9f\x41\x88\xb0\xb2"
- "\x71\x83\x85\x5f\xc8\x79\x0a\x99"
- "\x99\xdc\x89\x1c\x88\xd2\x3e\xf9"
- "\x83",
- .clen = 33,
- }, {
- .key = "\x05\x70\xd5\x94\x12\x36\x35\xd8"
- "\x8f\x7d\xd3\xa8\x99\x6a\xed\x69",
- .klen = 16,
- .iv = "\x14\x39\x63\xfc\x56\xd5\xdf\x5f"
- "\xaf\xb3\xff\xcc\x98\x33\x1d\xeb",
- .assoc = "\x23\x02\xf1\x64\x9a\x73\x89\xe6"
- "\xd0\xea\x2c\xf1\x96\xfc\x4e\x6d"
- "\x65\x48\xcb\x0a\xda\xf0\x62\xc0"
- "\x38\x1d\x3b\x4a\xe9\x7e\x62",
- .alen = 31,
- .ptext = "\x32\xcb\x80\xcc\xde\x12\x33\x6d"
- "\xf0\x20\x58\x15\x95\xc6\x7f\xee"
- "\x2f\xf9\x4e\x2c\x1b\x98\x43\xc7"
- "\x68\x28\x73\x40\x9f\x96\x4a",
- .plen = 31,
- .ctext = "\x29\xc4\xf0\x03\xc1\x86\xdf\x06"
- "\x5c\x7b\xef\x64\x87\x00\xd1\x37"
- "\xa7\x08\xbc\x7f\x8f\x41\x54\xd0"
- "\x3e\xf1\xc3\xa2\x96\x84\xdd\x2a"
- "\x2d\x21\x30\xf9\x02\xdb\x06\x0c"
- "\xf1\x5a\x66\x69\xe0\xca\x83",
- .clen = 47,
- }, {
- .key = "\x41\x94\x0e\x33\x22\xb1\xdd\xf4"
- "\x10\x57\x85\x39\x93\x8f\xaf\x70",
- .klen = 16,
- .iv = "\x50\x5d\x9d\x9b\x66\x50\x88\x7b"
- "\x30\x8e\xb1\x5e\x92\x58\xe0\xf1",
- .assoc = "\x5f\x27\x2b\x03\xaa\xef\x32\x02"
- "\x50\xc4\xde\x82\x90\x21\x11\x73"
- "\x8f\x0a\xd6\x8f\xdf\x90\xe4\xda"
- "\xf9\x4a\x1a\x23\xc3\xdd\x02\x81",
- .alen = 32,
- .ptext = "\x6e\xf0\xba\x6b\xee\x8e\xdc\x89"
- "\x71\xfb\x0a\xa6\x8f\xea\x41\xf4"
- "\x5a\xbb\x59\xb0\x20\x38\xc5\xe0"
- "\x29\x56\x52\x19\x79\xf5\xe9\x37",
- .plen = 32,
- .ctext = "\xe2\x2e\x44\xdf\xd3\x60\x6d\xb2"
- "\x70\x57\x37\xc5\xc2\x4f\x8d\x14"
- "\xc6\xbf\x8b\xec\xf5\x62\x67\xf2"
- "\x2f\xa1\xe6\xd6\xa7\xb1\x8c\x54"
- "\xe5\x6b\x49\xf9\x6e\x90\xc3\xaa"
- "\x7a\x00\x2e\x4d\x7f\x31\x2e\x81",
- .clen = 48,
- }, {
- .key = "\x7e\xb9\x48\xd3\x32\x2d\x86\x10"
- "\x91\x31\x37\xcb\x8d\xb3\x72\x76",
- .klen = 16,
- .iv = "\x8d\x82\xd6\x3b\x76\xcc\x30\x97"
- "\xb1\x68\x63\xef\x8c\x7c\xa3\xf7",
- .assoc = "\x9c\x4b\x65\xa2\xba\x6b\xdb\x1e"
- "\xd1\x9e\x90\x13\x8a\x45\xd3\x79"
- "\xba\xcd\xe2\x13\xe4\x30\x66\xf4"
- "\xba\x78\xf9\xfb\x9d\x3c\xa1\x58"
- "\x1a",
- .alen = 33,
- .ptext = "\xab\x14\xf3\x0a\xfe\x0a\x85\xa5"
- "\xf2\xd5\xbc\x38\x89\x0e\x04\xfb"
- "\x84\x7d\x65\x34\x25\xd8\x47\xfa"
- "\xeb\x83\x31\xf1\x54\x54\x89\x0d"
- "\x9d\x4d\x54\x51\x84\x61\xf6\x8e"
- "\x03\x31\xf2\x25\x16\xcc\xaa\xc6"
- "\x75\x73\x20\x30\x59\x54\xb2\xf0"
- "\x3a\x4b\xe0\x23\x8e\xa6\x08\x35"
- "\x8a",
- .plen = 65,
- .ctext = "\xc7\xca\x26\x61\x57\xee\xa2\xb9"
- "\xb1\x37\xde\x95\x06\x90\x11\x08"
- "\x4d\x30\x9f\x24\xc0\x56\xb7\xe1"
- "\x0b\x9f\xd2\x57\xe9\xd2\xb1\x76"
- "\x56\x9a\xb4\x58\xc5\x08\xfc\xb5"
- "\xf2\x31\x9b\xc9\xcd\xb3\x64\xdb"
- "\x6f\x50\xbf\xf4\x73\x9d\xfb\x6b"
- "\xef\x35\x25\x48\xed\xcf\x29\xa8"
- "\xac\xc3\xb9\xcb\x61\x8f\x73\x92"
- "\x2c\x7a\x6f\xda\xf9\x09\x6f\xe1"
- "\xc4",
- .clen = 81,
- }, {
- .key = "\xba\xde\x82\x72\x42\xa9\x2f\x2c"
- "\x12\x0b\xe9\x5c\x87\xd7\x35\x7c",
- .klen = 16,
- .iv = "\xc9\xa7\x10\xda\x86\x48\xd9\xb3"
- "\x32\x42\x15\x80\x85\xa1\x65\xfe",
- .assoc = "\xd8\x70\x9f\x42\xca\xe6\x83\x3a"
- "\x52\x79\x42\xa5\x84\x6a\x96\x7f"
- "\xe4\x8f\xed\x97\xe9\xd0\xe8\x0d"
- "\x7c\xa6\xd8\xd4\x77\x9b\x40\x2e"
- "\x28\xce\x57\x34\xcd\x6e\x84\x4c"
- "\x17\x3c\xe1\xb2\xa8\x0b\xbb\xf1"
- "\x96\x41\x0d\x69\xe8\x54\x0a\xc8"
- "\x15\x4e\x91\x92\x89\x4b\xb7\x9b"
- "\x21",
- .alen = 65,
- .ptext = "\xe8\x39\x2d\xaa\x0e\x85\x2d\xc1"
- "\x72\xaf\x6e\xc9\x82\x33\xc7\x01"
- "\xaf\x40\x70\xb8\x2a\x78\xc9\x14"
- "\xac\xb1\x10\xca\x2e\xb3\x28\xe4"
- "\xac",
- .plen = 33,
- .ctext = "\x57\xcd\x3d\x46\xc5\xf9\x68\x3b"
- "\x2c\x0f\xb4\x7e\x7b\x64\x3e\x40"
- "\xf3\x78\x63\x34\x89\x79\x39\x6b"
- "\x61\x64\x4a\x9a\xfa\x70\xa4\xd3"
- "\x54\x0b\xea\x05\xa6\x95\x64\xed"
- "\x3d\x69\xa2\x0c\x27\x56\x2f\x34"
- "\x66",
- .clen = 49,
- }, {
- .key = "\xf7\x02\xbb\x11\x52\x24\xd8\x48"
- "\x93\xe6\x9b\xee\x81\xfc\xf7\x82",
- .klen = 16,
- .iv = "\x06\xcc\x4a\x79\x96\xc3\x82\xcf"
- "\xb3\x1c\xc7\x12\x7f\xc5\x28\x04",
- .assoc = "\x15\x95\xd8\xe1\xda\x62\x2c\x56"
- "\xd3\x53\xf4\x36\x7e\x8e\x59\x85",
- .alen = 16,
- .ptext = "\x24\x5e\x67\x49\x1e\x01\xd6\xdd"
- "\xf3\x89\x20\x5b\x7c\x57\x89\x07",
- .plen = 16,
- .ctext = "\xfc\x85\x06\x28\x8f\xe8\x23\x1f"
- "\x33\x98\x87\xde\x08\xb6\xb6\xae"
- "\x3e\xa4\xf8\x19\xf1\x92\x60\x39"
- "\xb9\x6b\x3f\xdf\xc8\xcb\x30",
- .clen = 31,
- }, {
- .key = "\x33\x27\xf5\xb1\x62\xa0\x80\x63"
- "\x14\xc0\x4d\x7f\x7b\x20\xba\x89",
- .klen = 16,
- .iv = "\x42\xf0\x84\x19\xa6\x3f\x2b\xea"
- "\x34\xf6\x79\xa3\x79\xe9\xeb\x0a",
- .assoc = "\x51\xb9\x12\x80\xea\xde\xd5\x71"
- "\x54\x2d\xa6\xc8\x78\xb2\x1b\x8c",
- .alen = 16,
- .ptext = "\x61\x83\xa0\xe8\x2e\x7d\x7f\xf8"
- "\x74\x63\xd2\xec\x76\x7c\x4c\x0d",
- .plen = 16,
- .ctext = "\x74\x7d\x70\x07\xe9\xba\x01\xee"
- "\x6c\xc6\x6f\x50\x25\x33\xbe\x50"
- "\x17\xb8\x17\x62\xed\x80\xa2\xf5"
- "\x03\xde\x85\x71\x5d\x34",
- .clen = 30,
- }, {
- .key = "\x70\x4c\x2f\x50\x72\x1c\x29\x7f"
- "\x95\x9a\xff\x10\x75\x45\x7d\x8f",
- .klen = 16,
- .iv = "\x7f\x15\xbd\xb8\xb6\xba\xd3\x06"
- "\xb5\xd1\x2b\x35\x73\x0e\xad\x10",
- .assoc = "\x8e\xde\x4c\x20\xfa\x59\x7e\x8d"
- "\xd5\x07\x58\x59\x72\xd7\xde\x92",
- .alen = 16,
- .ptext = "\x9d\xa7\xda\x88\x3e\xf8\x28\x14"
- "\xf5\x3e\x85\x7d\x70\xa0\x0f\x13",
- .plen = 16,
- .ctext = "\xf4\xb3\x85\xf9\xac\xde\xb1\x38"
- "\x29\xfd\x6c\x7c\x49\xe5\x1d\xaf"
- "\xba\xea\xd4\xfa\x3f\x11\x33\x98",
- .clen = 24,
- }, {
- .key = "\xac\x70\x69\xef\x82\x97\xd2\x9b"
- "\x15\x74\xb1\xa2\x6f\x69\x3f\x95",
- .klen = 16,
- .iv = "\xbb\x3a\xf7\x57\xc6\x36\x7c\x22"
- "\x36\xab\xde\xc6\x6d\x32\x70\x17",
- .assoc = "\xcb\x03\x85\xbf\x0a\xd5\x26\xa9"
- "\x56\xe1\x0a\xeb\x6c\xfb\xa1\x98",
- .alen = 16,
- .ptext = "\xda\xcc\x14\x27\x4e\x74\xd1\x30"
- "\x76\x18\x37\x0f\x6a\xc4\xd1\x1a",
- .plen = 16,
- .ctext = "\xe6\x5c\x49\x4f\x78\xf3\x62\x86"
- "\xe1\xb7\xa5\xc3\x32\x88\x3c\x8c"
- "\x6e",
- .clen = 17,
- },
-};
-
-/*
- * MORUS-1280 test vectors - generated via reference implementation from
- * SUPERCOP (https://bench.cr.yp.to/supercop.html):
- *
- * https://bench.cr.yp.to/supercop/supercop-20170228.tar.xz
- * (see crypto_aead/morus1280128v2/ and crypto_aead/morus1280256v2/ )
- */
-static const struct aead_testvec morus1280_tv_template[] = {
- {
- .key = "\x00\x00\x00\x00\x00\x00\x00\x00"
- "\x00\x00\x00\x00\x00\x00\x00\x00",
- .klen = 16,
- .iv = "\x0f\xc9\x8e\x67\x44\x9e\xaa\x86"
- "\x20\x36\x2c\x24\xfe\xc9\x30\x81",
- .assoc = "",
- .alen = 0,
- .ptext = "",
- .plen = 0,
- .ctext = "\x91\x85\x0f\xf5\x52\x9e\xce\xce"
- "\x65\x99\xc7\xbf\xd3\x76\xe8\x98",
- .clen = 16,
- }, {
- .key = "\x3c\x24\x39\x9f\x10\x7b\xa8\x1b"
- "\x80\xda\xb2\x91\xf9\x24\xc2\x06",
- .klen = 16,
- .iv = "\x4b\xed\xc8\x07\x54\x1a\x52\xa2"
- "\xa1\x10\xde\xb5\xf8\xed\xf3\x87",
- .assoc = "",
- .alen = 0,
- .ptext = "\x69",
- .plen = 1,
- .ctext = "\x88\xc3\x4c\xf0\x2f\x43\x76\x13"
- "\x96\xda\x76\x34\x33\x4e\xd5\x39"
- "\x73",
- .clen = 17,
- }, {
- .key = "\x79\x49\x73\x3e\x20\xf7\x51\x37"
- "\x01\xb4\x64\x22\xf3\x48\x85\x0c",
- .klen = 16,
- .iv = "\x88\x12\x01\xa6\x64\x96\xfb\xbe"
- "\x22\xea\x90\x47\xf2\x11\xb5\x8e",
- .assoc = "",
- .alen = 0,
- .ptext = "\xa6\xa4\x1e\x76\xec\xd4\x50\xcc"
- "\x62\x58\xe9\x8f\xef\xa4\x17\x91"
- "\xb4\x96\x9f\x6b\xce\x38\xa5\x46"
- "\x13\x7d\x64\x93\xd7\x05\xf5",
- .plen = 31,
- .ctext = "\x3e\x5c\x3b\x58\x3b\x7d\x2a\x22"
- "\x75\x0b\x24\xa6\x0e\xc3\xde\x52"
- "\x97\x0b\x64\xd4\xce\x90\x52\xf7"
- "\xef\xdb\x6a\x38\xd2\xa8\xa1\x0d"
- "\xe0\x61\x33\x24\xc6\x4d\x51\xbc"
- "\xa4\x21\x74\xcf\x19\x16\x59",
- .clen = 47,
- }, {
- .key = "\xb5\x6e\xad\xdd\x30\x72\xfa\x53"
- "\x82\x8e\x16\xb4\xed\x6d\x47\x12",
- .klen = 16,
- .iv = "\xc4\x37\x3b\x45\x74\x11\xa4\xda"
- "\xa2\xc5\x42\xd8\xec\x36\x78\x94",
- .assoc = "",
- .alen = 0,
- .ptext = "\xe2\xc9\x58\x15\xfc\x4f\xf8\xe8"
- "\xe3\x32\x9b\x21\xe9\xc8\xd9\x97"
- "\xde\x58\xab\xf0\xd3\xd8\x27\x60"
- "\xd5\xaa\x43\x6b\xb1\x64\x95\xa4",
- .plen = 32,
- .ctext = "\x30\x82\x9c\x2b\x67\xcb\xf9\x1f"
- "\xde\x9f\x77\xb2\xda\x92\x61\x5c"
- "\x09\x0b\x2d\x9a\x26\xaa\x1c\x06"
- "\xab\x74\xb7\x2b\x95\x5f\x9f\xa1"
- "\x9a\xff\x50\xa0\xa2\xff\xc5\xad"
- "\x21\x8e\x84\x5c\x12\x61\xb2\xae",
- .clen = 48,
- }, {
- .key = "\xf2\x92\xe6\x7d\x40\xee\xa3\x6f"
- "\x03\x68\xc8\x45\xe7\x91\x0a\x18",
- .klen = 16,
- .iv = "\x01\x5c\x75\xe5\x84\x8d\x4d\xf6"
- "\x23\x9f\xf4\x6a\xe6\x5a\x3b\x9a",
- .assoc = "",
- .alen = 0,
- .ptext = "\x1f\xee\x92\xb4\x0c\xcb\xa1\x04"
- "\x64\x0c\x4d\xb2\xe3\xec\x9c\x9d"
- "\x09\x1a\xb7\x74\xd8\x78\xa9\x79"
- "\x96\xd8\x22\x43\x8c\xc3\x34\x7b"
- "\xc4",
- .plen = 33,
- .ctext = "\x67\x5d\x8e\x45\xc8\x39\xf5\x17"
- "\xc1\x1d\x2a\xdd\x88\x67\xda\x1f"
- "\x6d\xe8\x37\x28\x5a\xc1\x5e\x9f"
- "\xa6\xec\xc6\x92\x05\x4b\xc0\xa3"
- "\x63\xef\x88\xa4\x9b\x0a\x5c\xed"
- "\x2b\x6a\xac\x63\x52\xaa\x10\x94"
- "\xd0",
- .clen = 49,
- }, {
- .key = "\x2e\xb7\x20\x1c\x50\x6a\x4b\x8b"
- "\x84\x42\x7a\xd7\xe1\xb5\xcd\x1f",
- .klen = 16,
- .iv = "\x3d\x80\xae\x84\x94\x09\xf6\x12"
- "\xa4\x79\xa6\xfb\xe0\x7f\xfd\xa0",
- .assoc = "",
- .alen = 0,
- .ptext = "\x5c\x13\xcb\x54\x1c\x47\x4a\x1f"
- "\xe5\xe6\xff\x44\xdd\x11\x5f\xa3"
- "\x33\xdd\xc2\xf8\xdd\x18\x2b\x93"
- "\x57\x05\x01\x1c\x66\x22\xd3\x51"
- "\xd3\xdf\x18\xc9\x30\x66\xed\xb1"
- "\x96\x58\xd5\x8c\x64\x8c\x7c\xf5"
- "\x01\xd0\x74\x5f\x9b\xaa\xf6\xd1"
- "\xe6\x16\xa2\xac\xde\x47\x40",
- .plen = 63,
- .ctext = "\x7d\x61\x1a\x35\x20\xcc\x07\x88"
- "\x03\x98\x87\xcf\xc0\x6e\x4d\x19"
- "\xe3\xd4\x0b\xfb\x29\x8f\x49\x1a"
- "\x3a\x06\x77\xce\x71\x2c\xcd\xdd"
- "\xed\xf6\xc9\xbe\xa6\x3b\xb8\xfc"
- "\x6c\xbe\x77\xed\x74\x0e\x20\x85"
- "\xd0\x65\xde\x24\x6f\xe3\x25\xc5"
- "\xdf\x5b\x0f\xbd\x8a\x88\x78\xc9"
- "\xe5\x81\x37\xde\x84\x7a\xf6\x84"
- "\x99\x7a\x72\x9c\x54\x31\xa1",
- .clen = 79,
- }, {
- .key = "\x6b\xdc\x5a\xbb\x60\xe5\xf4\xa6"
- "\x05\x1d\x2c\x68\xdb\xda\x8f\x25",
- .klen = 16,
- .iv = "\x7a\xa5\xe8\x23\xa4\x84\x9e\x2d"
- "\x25\x53\x58\x8c\xda\xa3\xc0\xa6",
- .assoc = "",
- .alen = 0,
- .ptext = "\x98\x37\x05\xf3\x2c\xc2\xf3\x3b"
- "\x66\xc0\xb1\xd5\xd7\x35\x21\xaa"
- "\x5d\x9f\xce\x7c\xe2\xb8\xad\xad"
- "\x19\x33\xe0\xf4\x40\x81\x72\x28"
- "\xe1\x8b\x1c\xf8\x91\x78\xff\xaf"
- "\xb0\x68\x69\xf2\x27\x35\x91\x84"
- "\x2e\x37\x5b\x00\x04\xff\x16\x9c"
- "\xb5\x19\x39\xeb\xd9\xcd\x29\x9a",
- .plen = 64,
- .ctext = "\x05\xc5\xb1\xf9\x1b\xb9\xab\x2c"
- "\xa5\x07\x12\xa7\x12\x39\x60\x66"
- "\x30\x81\x4a\x03\x78\x28\x45\x52"
- "\xd2\x2b\x24\xfd\x8b\xa5\xb7\x66"
- "\x6f\x45\xd7\x3b\x67\x6f\x51\xb9"
- "\xc0\x3d\x6c\xca\x1e\xae\xff\xb6"
- "\x79\xa9\xe4\x82\x5d\x4c\x2d\xdf"
- "\xeb\x71\x40\xc9\x2c\x40\x45\x6d"
- "\x73\x77\x01\xf3\x4f\xf3\x9d\x2a"
- "\x5d\x57\xa8\xa1\x18\xa2\xad\xcb",
- .clen = 80,
- }, {
- .key = "\xa7\x00\x93\x5b\x70\x61\x9d\xc2"
- "\x86\xf7\xde\xfa\xd5\xfe\x52\x2b",
- .klen = 16,
- .iv = "\xb6\xca\x22\xc3\xb4\x00\x47\x49"
- "\xa6\x2d\x0a\x1e\xd4\xc7\x83\xad",
- .assoc = "\xc5",
- .alen = 1,
- .ptext = "",
- .plen = 0,
- .ctext = "\x4d\xbf\x11\xac\x7f\x97\x0b\x2e"
- "\x89\x3b\x9d\x0f\x83\x1c\x08\xc3",
- .clen = 16,
- }, {
- .key = "\xe4\x25\xcd\xfa\x80\xdd\x46\xde"
- "\x07\xd1\x90\x8b\xcf\x23\x15\x31",
- .klen = 16,
- .iv = "\xf3\xee\x5c\x62\xc4\x7c\xf0\x65"
- "\x27\x08\xbd\xaf\xce\xec\x45\xb3",
- .assoc = "\x02\xb8\xea\xca\x09\x1b\x9a\xec"
- "\x47\x3e\xe9\xd4\xcc\xb5\x76\x34"
- "\xe8\x73\x62\x64\xab\x50\xd0\xda"
- "\x6b\x83\x66\xaf\x3e\x27\xc9",
- .alen = 31,
- .ptext = "",
- .plen = 0,
- .ctext = "\x5b\xc0\x8d\x54\xe4\xec\xbe\x38"
- "\x03\x12\xf9\xcc\x9e\x46\x42\x92",
- .clen = 16,
- }, {
- .key = "\x20\x4a\x07\x99\x91\x58\xee\xfa"
- "\x88\xab\x42\x1c\xc9\x47\xd7\x38",
- .klen = 16,
- .iv = "\x2f\x13\x95\x01\xd5\xf7\x99\x81"
- "\xa8\xe2\x6f\x41\xc8\x10\x08\xb9",
- .assoc = "\x3f\xdc\x24\x69\x19\x96\x43\x08"
- "\xc8\x18\x9b\x65\xc6\xd9\x39\x3b"
- "\x12\x35\x6e\xe8\xb0\xf0\x52\xf3"
- "\x2d\xb0\x45\x87\x18\x86\x68\xf6",
- .alen = 32,
- .ptext = "",
- .plen = 0,
- .ctext = "\x48\xc5\xc3\x4c\x40\x2e\x2f\xc2"
- "\x6d\x65\xe0\x67\x9c\x1d\xa0\xf0",
- .clen = 16,
- }, {
- .key = "\x5d\x6f\x41\x39\xa1\xd4\x97\x16"
- "\x09\x85\xf4\xae\xc3\x6b\x9a\x3e",
- .klen = 16,
- .iv = "\x6c\x38\xcf\xa1\xe5\x73\x41\x9d"
- "\x29\xbc\x21\xd2\xc2\x35\xcb\xbf",
- .assoc = "\x7b\x01\x5d\x08\x29\x12\xec\x24"
- "\x49\xf3\x4d\xf7\xc0\xfe\xfb\x41"
- "\x3c\xf8\x79\x6c\xb6\x90\xd4\x0d"
- "\xee\xde\x23\x60\xf2\xe5\x08\xcc"
- "\x97",
- .alen = 33,
- .ptext = "",
- .plen = 0,
- .ctext = "\x28\x64\x78\x51\x55\xd8\x56\x4a"
- "\x58\x3e\xf7\xbe\xee\x21\xfe\x94",
- .clen = 16,
- }, {
- .key = "\x99\x93\x7a\xd8\xb1\x50\x40\x31"
- "\x8a\x60\xa6\x3f\xbd\x90\x5d\x44",
- .klen = 16,
- .iv = "\xa8\x5c\x09\x40\xf5\xef\xea\xb8"
- "\xaa\x96\xd3\x64\xbc\x59\x8d\xc6",
- .assoc = "\xb8\x26\x97\xa8\x39\x8e\x94\x3f"
- "\xca\xcd\xff\x88\xba\x22\xbe\x47"
- "\x67\xba\x85\xf1\xbb\x30\x56\x26"
- "\xaf\x0b\x02\x38\xcc\x44\xa7\xa3"
- "\xa6\xbf\x31\x93\x60\xcd\xda\x63"
- "\x2c\xb1\xaa\x19\xc8\x19\xf8\xeb"
- "\x03\xa1\xe8\xbe\x37\x54\xec\xa2"
- "\xcd\x2c\x45\x58\xbd\x8e\x80",
- .alen = 63,
- .ptext = "",
- .plen = 0,
- .ctext = "\xb3\xa6\x00\x4e\x09\x20\xac\x21"
- "\x77\x72\x69\x76\x2d\x36\xe5\xc8",
- .clen = 16,
- }, {
- .key = "\xd6\xb8\xb4\x77\xc1\xcb\xe9\x4d"
- "\x0a\x3a\x58\xd1\xb7\xb4\x1f\x4a",
- .klen = 16,
- .iv = "\xe5\x81\x42\xdf\x05\x6a\x93\xd4"
- "\x2b\x70\x85\xf5\xb6\x7d\x50\xcc",
- .assoc = "\xf4\x4a\xd1\x47\x49\x09\x3d\x5b"
- "\x4b\xa7\xb1\x19\xb4\x46\x81\x4d"
- "\x91\x7c\x91\x75\xc0\xd0\xd8\x40"
- "\x71\x39\xe1\x10\xa6\xa3\x46\x7a"
- "\xb4\x6b\x35\xc2\xc1\xdf\xed\x60"
- "\x46\xc1\x3e\x7f\x8c\xc2\x0e\x7a"
- "\x30\x08\xd0\x5f\xa0\xaa\x0c\x6d"
- "\x9c\x2f\xdb\x97\xb8\x15\x69\x01",
- .alen = 64,
- .ptext = "",
- .plen = 0,
- .ctext = "\x65\x33\x7b\xa1\x63\xf4\x20\xdd"
- "\xe4\xb9\x4a\xaa\x9a\x21\xaa\x14",
- .clen = 16,
- }, {
- .key = "\x12\xdd\xee\x17\xd1\x47\x92\x69"
- "\x8b\x14\x0a\x62\xb1\xd9\xe2\x50",
- .klen = 16,
- .iv = "\x22\xa6\x7c\x7f\x15\xe6\x3c\xf0"
- "\xac\x4b\x37\x86\xb0\xa2\x13\xd2",
- .assoc = "\x31",
- .alen = 1,
- .ptext = "\x40",
- .plen = 1,
- .ctext = "\x1d\x47\x17\x34\x86\xf5\x54\x1a"
- "\x6d\x28\xb8\x5d\x6c\xcf\xa0\xb9"
- "\xbf",
- .clen = 17,
- }, {
- .key = "\x4f\x01\x27\xb6\xe1\xc3\x3a\x85"
- "\x0c\xee\xbc\xf4\xab\xfd\xa5\x57",
- .klen = 16,
- .iv = "\x5e\xcb\xb6\x1e\x25\x62\xe4\x0c"
- "\x2d\x25\xe9\x18\xaa\xc6\xd5\xd8",
- .assoc = "\x6d\x94\x44\x86\x69\x00\x8f\x93"
- "\x4d\x5b\x15\x3c\xa8\x8f\x06\x5a"
- "\xe6\x01\xa8\x7e\xca\x10\xdc\x73"
- "\xf4\x94\x9f\xc1\x5a\x61\x85",
- .alen = 31,
- .ptext = "\x7c\x5d\xd3\xee\xad\x9f\x39\x1a"
- "\x6d\x92\x42\x61\xa7\x58\x37\xdb"
- "\xb0\xb2\x2b\x9f\x0b\xb8\xbd\x7a"
- "\x24\xa0\xd6\xb7\x11\x79\x6c",
- .plen = 31,
- .ctext = "\x78\x90\x52\xae\x0f\xf7\x2e\xef"
- "\x63\x09\x08\x58\xb5\x56\xbd\x72"
- "\x6e\x42\xcf\x27\x04\x7c\xdb\x92"
- "\x18\xe9\xa4\x33\x90\xba\x62\xb5"
- "\x70\xd3\x88\x9b\x4f\x05\xa7\x51"
- "\x85\x87\x17\x09\x42\xed\x4e",
- .clen = 47,
- }, {
- .key = "\x8b\x26\x61\x55\xf1\x3e\xe3\xa1"
- "\x8d\xc8\x6e\x85\xa5\x21\x67\x5d",
- .klen = 16,
- .iv = "\x9b\xef\xf0\xbd\x35\xdd\x8d\x28"
- "\xad\xff\x9b\xa9\xa4\xeb\x98\xdf",
- .assoc = "\xaa\xb8\x7e\x25\x79\x7c\x37\xaf"
- "\xce\x36\xc7\xce\xa2\xb4\xc9\x60"
- "\x10\xc3\xb3\x02\xcf\xb0\x5e\x8d"
- "\xb5\xc2\x7e\x9a\x35\xc0\x24\xfd",
- .alen = 32,
- .ptext = "\xb9\x82\x0c\x8d\xbd\x1b\xe2\x36"
- "\xee\x6c\xf4\xf2\xa1\x7d\xf9\xe2"
- "\xdb\x74\x36\x23\x11\x58\x3f\x93"
- "\xe5\xcd\xb5\x90\xeb\xd8\x0c\xb3",
- .plen = 32,
- .ctext = "\x1d\x2c\x57\xe0\x50\x38\x3d\x41"
- "\x2e\x71\xc8\x3b\x92\x43\x58\xaf"
- "\x5a\xfb\xad\x8f\xd9\xd5\x8a\x5e"
- "\xdb\xf3\xcd\x3a\x2b\xe1\x2c\x1a"
- "\xb0\xed\xe3\x0c\x6e\xf9\xf2\xd6"
- "\x90\xe6\xb1\x0e\xa5\x8a\xac\xb7",
- .clen = 48,
- }, {
- .key = "\xc8\x4b\x9b\xf5\x01\xba\x8c\xbd"
- "\x0e\xa3\x21\x16\x9f\x46\x2a\x63",
- .klen = 16,
- .iv = "\xd7\x14\x29\x5d\x45\x59\x36\x44"
- "\x2e\xd9\x4d\x3b\x9e\x0f\x5b\xe5",
- .assoc = "\xe6\xdd\xb8\xc4\x89\xf8\xe0\xca"
- "\x4f\x10\x7a\x5f\x9c\xd8\x8b\x66"
- "\x3b\x86\xbf\x86\xd4\x50\xe0\xa7"
- "\x76\xef\x5c\x72\x0f\x1f\xc3\xd4"
- "\xee",
- .alen = 33,
- .ptext = "\xf5\xa6\x46\x2c\xce\x97\x8a\x51"
- "\x6f\x46\xa6\x83\x9b\xa1\xbc\xe8"
- "\x05\x36\x42\xa7\x16\xf8\xc1\xad"
- "\xa7\xfb\x94\x68\xc5\x37\xab\x8a"
- "\x72",
- .plen = 33,
- .ctext = "\x59\x10\x84\x1c\x83\x4c\x8b\xfc"
- "\xfd\x2e\x4b\x46\x84\xff\x78\x4e"
- "\x50\xda\x5c\xb9\x61\x1d\xf5\xb9"
- "\xfe\xbb\x7f\xae\x8c\xc1\x24\xbd"
- "\x8c\x6f\x1f\x9b\xce\xc6\xc1\x37"
- "\x08\x06\x5a\xe5\x96\x10\x95\xc2"
- "\x5e",
- .clen = 49,
- }, {
- .key = "\x05\x70\xd5\x94\x12\x36\x35\xd8"
- "\x8f\x7d\xd3\xa8\x99\x6a\xed\x69",
- .klen = 16,
- .iv = "\x14\x39\x63\xfc\x56\xd5\xdf\x5f"
- "\xaf\xb3\xff\xcc\x98\x33\x1d\xeb",
- .assoc = "\x23\x02\xf1\x64\x9a\x73\x89\xe6"
- "\xd0\xea\x2c\xf1\x96\xfc\x4e\x6d"
- "\x65\x48\xcb\x0a\xda\xf0\x62\xc0"
- "\x38\x1d\x3b\x4a\xe9\x7e\x62\xaa"
- "\xfd\xc9\x4a\xa9\xa9\x39\x4b\x54"
- "\xc8\x0e\x24\x7f\x5e\x10\x7a\x45"
- "\x10\x0b\x56\x85\xad\x54\xaa\x66"
- "\xa8\x43\xcd\xd4\x9b\xb7\xfa",
- .alen = 63,
- .ptext = "\x32\xcb\x80\xcc\xde\x12\x33\x6d"
- "\xf0\x20\x58\x15\x95\xc6\x7f\xee"
- "\x2f\xf9\x4e\x2c\x1b\x98\x43\xc7"
- "\x68\x28\x73\x40\x9f\x96\x4a\x60"
- "\x80\xf4\x4b\xf4\xc1\x3d\xd0\x93"
- "\xcf\x12\xc9\x59\x8f\x7a\x7f\xa8"
- "\x1b\xa5\x50\xed\x87\xa9\x72\x59"
- "\x9c\x44\xb2\xa4\x99\x98\x34",
- .plen = 63,
- .ctext = "\x9a\x12\xbc\xdf\x72\xa8\x56\x22"
- "\x49\x2d\x07\x92\xfc\x3d\x6d\x5f"
- "\xef\x36\x19\xae\x91\xfa\xd6\x63"
- "\x46\xea\x8a\x39\x14\x21\xa6\x37"
- "\x18\xfc\x97\x3e\x16\xa5\x4d\x39"
- "\x45\x2e\x69\xcc\x9c\x5f\xdf\x6d"
- "\x5e\xa2\xbf\xac\x83\x32\x72\x52"
- "\x58\x58\x23\x40\xfd\xa5\xc2\xe6"
- "\xe9\x5a\x50\x98\x00\x58\xc9\x86"
- "\x4f\x20\x37\xdb\x7b\x22\xa3",
- .clen = 79,
- }, {
- .key = "\x41\x94\x0e\x33\x22\xb1\xdd\xf4"
- "\x10\x57\x85\x39\x93\x8f\xaf\x70",
- .klen = 16,
- .iv = "\x50\x5d\x9d\x9b\x66\x50\x88\x7b"
- "\x30\x8e\xb1\x5e\x92\x58\xe0\xf1",
- .assoc = "\x5f\x27\x2b\x03\xaa\xef\x32\x02"
- "\x50\xc4\xde\x82\x90\x21\x11\x73"
- "\x8f\x0a\xd6\x8f\xdf\x90\xe4\xda"
- "\xf9\x4a\x1a\x23\xc3\xdd\x02\x81"
- "\x0b\x76\x4f\xd7\x0a\x4b\x5e\x51"
- "\xe3\x1d\xb9\xe5\x21\xb9\x8f\xd4"
- "\x3d\x72\x3e\x26\x16\xa9\xca\x32"
- "\x77\x47\x63\x14\x95\x3d\xe4\x34",
- .alen = 64,
- .ptext = "\x6e\xf0\xba\x6b\xee\x8e\xdc\x89"
- "\x71\xfb\x0a\xa6\x8f\xea\x41\xf4"
- "\x5a\xbb\x59\xb0\x20\x38\xc5\xe0"
- "\x29\x56\x52\x19\x79\xf5\xe9\x37"
- "\x8f\xa1\x50\x23\x22\x4f\xe3\x91"
- "\xe9\x21\x5e\xbf\x52\x23\x95\x37"
- "\x48\x0c\x38\x8f\xf0\xff\x92\x24"
- "\x6b\x47\x49\xe3\x94\x1f\x1e\x01",
- .plen = 64,
- .ctext = "\xe6\xeb\x92\x5a\x5b\xf0\x2d\xbb"
- "\x23\xec\x35\xe3\xae\xc9\xfb\x0b"
- "\x90\x14\x46\xeb\xa8\x8d\xb0\x9b"
- "\x39\xda\x8b\x48\xec\xb2\x00\x4e"
- "\x80\x6f\x46\x4f\x9b\x1e\xbb\x35"
- "\xea\x5a\xbc\xa2\x36\xa5\x89\x45"
- "\xc2\xd6\xd7\x15\x0b\xf6\x6c\x56"
- "\xec\x99\x7d\x61\xb3\x15\x93\xed"
- "\x83\x1e\xd9\x48\x84\x0b\x37\xfe"
- "\x95\x74\x44\xd5\x54\xa6\x27\x06",
- .clen = 80,
- }, {
- .key = "\x7e\xb9\x48\xd3\x32\x2d\x86\x10"
- "\x91\x31\x37\xcb\x8d\xb3\x72\x76",
- .klen = 16,
- .iv = "\x8d\x82\xd6\x3b\x76\xcc\x30\x97"
- "\xb1\x68\x63\xef\x8c\x7c\xa3\xf7",
- .assoc = "\x9c\x4b\x65\xa2\xba\x6b\xdb\x1e"
- "\xd1\x9e\x90\x13\x8a\x45\xd3\x79"
- "\xba\xcd\xe2\x13\xe4\x30\x66\xf4"
- "\xba\x78\xf9\xfb\x9d\x3c\xa1\x58"
- "\x1a\x22\x53\x05\x6b\x5c\x71\x4f"
- "\xfd\x2d\x4d\x4c\xe5\x62\xa5\x63"
- "\x6a\xda\x26\xc8\x7f\xff\xea\xfd"
- "\x46\x4a\xfa\x53\x8f\xc4\xcd\x68"
- "\x58",
- .alen = 65,
- .ptext = "\xab\x14\xf3\x0a\xfe\x0a\x85\xa5"
- "\xf2\xd5\xbc\x38\x89\x0e\x04\xfb"
- "\x84\x7d\x65\x34\x25\xd8\x47\xfa"
- "\xeb\x83\x31\xf1\x54\x54\x89\x0d"
- "\x9d\x4d\x54\x51\x84\x61\xf6\x8e"
- "\x03\x31\xf2\x25\x16\xcc\xaa\xc6"
- "\x75\x73\x20\x30\x59\x54\xb2\xf0"
- "\x3a\x4b\xe0\x23\x8e\xa6\x08\x35"
- "\x8a\xdf\x27\xa0\xe4\x60\x99\xae"
- "\x8e\x43\xd9\x39\x7b\x10\x40\x67"
- "\x5c\x7e\xc9\x70\x63\x34\xca\x59"
- "\xfe\x86\xbc\xb7\x9c\x39\xf3\x6d"
- "\x6a\x41\x64\x6f\x16\x7f\x65\x7e"
- "\x89\x84\x68\xeb\xb0\x51\xbe\x55"
- "\x33\x16\x59\x6c\x3b\xef\x88\xad"
- "\x2f\xab\xbc\x25\x76\x87\x41\x2f"
- "\x36",
- .plen = 129,
- .ctext = "\x89\x24\x27\x86\xdc\xd7\x6b\xd9"
- "\xd1\xcd\xdc\x16\xdd\x2c\xc1\xfb"
- "\x52\xb5\xb3\xab\x50\x99\x3f\xa0"
- "\x38\xa4\x74\xa5\x04\x15\x63\x05"
- "\x8f\x54\x81\x06\x5a\x6b\xa4\x63"
- "\x6d\xa7\x21\xcb\xff\x42\x30\x8e"
- "\x3b\xd1\xca\x3f\x4b\x1a\xb8\xc3"
- "\x42\x01\xe6\xbc\x75\x15\x87\xee"
- "\xc9\x8e\x65\x01\xd9\xd8\xb5\x9f"
- "\x48\x86\xa6\x5f\x2c\xc7\xb5\xb0"
- "\xed\x5d\x14\x7c\x3f\x40\xb1\x0b"
- "\x72\xef\x94\x8d\x7a\x85\x56\xe5"
- "\x56\x08\x15\x56\xba\xaf\xbd\xf0"
- "\x20\xef\xa0\xf6\xa9\xad\xa2\xc9"
- "\x1c\x3b\x28\x51\x7e\x77\xb2\x18"
- "\x4f\x61\x64\x37\x22\x36\x6d\x78"
- "\xed\xed\x35\xe8\x83\xa5\xec\x25"
- "\x6b\xff\x5f\x1a\x09\x96\x3d\xdc"
- "\x20",
- .clen = 145,
- }, {
- .key = "\xba\xde\x82\x72\x42\xa9\x2f\x2c"
- "\x12\x0b\xe9\x5c\x87\xd7\x35\x7c",
- .klen = 16,
- .iv = "\xc9\xa7\x10\xda\x86\x48\xd9\xb3"
- "\x32\x42\x15\x80\x85\xa1\x65\xfe",
- .assoc = "\xd8\x70\x9f\x42\xca\xe6\x83\x3a"
- "\x52\x79\x42\xa5\x84\x6a\x96\x7f"
- "\xe4\x8f\xed\x97\xe9\xd0\xe8\x0d"
- "\x7c\xa6\xd8\xd4\x77\x9b\x40\x2e"
- "\x28\xce\x57\x34\xcd\x6e\x84\x4c"
- "\x17\x3c\xe1\xb2\xa8\x0b\xbb\xf1"
- "\x96\x41\x0d\x69\xe8\x54\x0a\xc8"
- "\x15\x4e\x91\x92\x89\x4b\xb7\x9b"
- "\x21\xf7\x42\x89\xac\x12\x2a\x54"
- "\x69\xee\x18\xc7\x8d\xed\xe8\xfd"
- "\xbb\x04\x28\xe6\x8a\x3c\x98\xc1"
- "\x04\x2d\xa9\xa1\x24\x83\xff\xe9"
- "\x55\x7a\xf0\xd1\xf6\x63\x05\xe1"
- "\xd9\x1e\x75\x72\xc1\x9f\xae\x32"
- "\xe1\x6b\xcd\x9e\x61\x19\x23\x86"
- "\xd9\xd2\xaf\x8e\xd5\xd3\xa8\xa9"
- "\x51",
- .alen = 129,
- .ptext = "\xe8\x39\x2d\xaa\x0e\x85\x2d\xc1"
- "\x72\xaf\x6e\xc9\x82\x33\xc7\x01"
- "\xaf\x40\x70\xb8\x2a\x78\xc9\x14"
- "\xac\xb1\x10\xca\x2e\xb3\x28\xe4"
- "\xac\xfa\x58\x7f\xe5\x73\x09\x8c"
- "\x1d\x40\x87\x8c\xd9\x75\xc0\x55"
- "\xa2\xda\x07\xd1\xc2\xa9\xd1\xbb"
- "\x09\x4f\x77\x62\x88\x2d\xf2\x68"
- "\x54",
- .plen = 65,
- .ctext = "\x36\x78\xb9\x22\xde\x62\x35\x55"
- "\x1a\x7a\xf5\x45\xbc\xd7\x15\x82"
- "\x01\xe9\x5a\x07\xea\x46\xaf\x91"
- "\xcb\x73\xa5\xee\xe1\xb4\xbf\xc2"
- "\xdb\xd2\x9d\x59\xde\xfc\x83\x00"
- "\xf5\x46\xac\x97\xd5\x57\xa9\xb9"
- "\x1f\x8c\xe8\xca\x68\x8b\x91\x0c"
- "\x01\xbe\x0a\xaf\x7c\xf6\x67\xa4"
- "\xbf\xbc\x88\x3f\x5d\xd1\xf9\x19"
- "\x0f\x9d\xb2\xaf\xb9\x6e\x17\xdf"
- "\xa2",
- .clen = 81,
- }, {
- .key = "\xf7\x02\xbb\x11\x52\x24\xd8\x48"
- "\x93\xe6\x9b\xee\x81\xfc\xf7\x82",
- .klen = 16,
- .iv = "\x06\xcc\x4a\x79\x96\xc3\x82\xcf"
- "\xb3\x1c\xc7\x12\x7f\xc5\x28\x04",
- .assoc = "\x15\x95\xd8\xe1\xda\x62\x2c\x56"
- "\xd3\x53\xf4\x36\x7e\x8e\x59\x85"
- "\x0e\x51\xf9\x1c\xee\x70\x6a\x27"
- "\x3d\xd3\xb7\xac\x51\xfa\xdf\x05",
- .alen = 32,
- .ptext = "\x24\x5e\x67\x49\x1e\x01\xd6\xdd"
- "\xf3\x89\x20\x5b\x7c\x57\x89\x07"
- "\xd9\x02\x7c\x3d\x2f\x18\x4b\x2d"
- "\x6e\xde\xee\xa2\x08\x12\xc7\xba",
- .plen = 32,
- .ctext = "\x08\x1b\x95\x0e\x41\x95\x02\x4b"
- "\x9c\xbb\xa8\xd0\x7c\xd3\x44\x6e"
- "\x89\x14\x33\x70\x0a\xbc\xea\x39"
- "\x88\xaa\x2b\xd5\x73\x11\x55\xf5"
- "\x33\x33\x9c\xd7\x42\x34\x49\x8e"
- "\x2f\x03\x30\x05\x47\xaf\x34",
- .clen = 47,
- }, {
- .key = "\x33\x27\xf5\xb1\x62\xa0\x80\x63"
- "\x14\xc0\x4d\x7f\x7b\x20\xba\x89",
- .klen = 16,
- .iv = "\x42\xf0\x84\x19\xa6\x3f\x2b\xea"
- "\x34\xf6\x79\xa3\x79\xe9\xeb\x0a",
- .assoc = "\x51\xb9\x12\x80\xea\xde\xd5\x71"
- "\x54\x2d\xa6\xc8\x78\xb2\x1b\x8c"
- "\x39\x14\x05\xa0\xf3\x10\xec\x41"
- "\xff\x01\x95\x84\x2b\x59\x7f\xdb",
- .alen = 32,
- .ptext = "\x61\x83\xa0\xe8\x2e\x7d\x7f\xf8"
- "\x74\x63\xd2\xec\x76\x7c\x4c\x0d"
- "\x03\xc4\x88\xc1\x35\xb8\xcd\x47"
- "\x2f\x0c\xcd\x7a\xe2\x71\x66\x91",
- .plen = 32,
- .ctext = "\x97\xca\xf4\xe0\x8d\x89\xbf\x68"
- "\x0c\x60\xb9\x27\xdf\xaa\x41\xc6"
- "\x25\xd8\xf7\x1f\x10\x15\x48\x61"
- "\x4c\x95\x00\xdf\x51\x9b\x7f\xe6"
- "\x24\x40\x9e\xbe\x3b\xeb\x1b\x98"
- "\xb9\x9c\xe5\xef\xf2\x05",
- .clen = 46,
- }, {
- .key = "\x70\x4c\x2f\x50\x72\x1c\x29\x7f"
- "\x95\x9a\xff\x10\x75\x45\x7d\x8f",
- .klen = 16,
- .iv = "\x7f\x15\xbd\xb8\xb6\xba\xd3\x06"
- "\xb5\xd1\x2b\x35\x73\x0e\xad\x10",
- .assoc = "\x8e\xde\x4c\x20\xfa\x59\x7e\x8d"
- "\xd5\x07\x58\x59\x72\xd7\xde\x92"
- "\x63\xd6\x10\x24\xf8\xb0\x6e\x5a"
- "\xc0\x2e\x74\x5d\x06\xb8\x1e\xb2",
- .alen = 32,
- .ptext = "\x9d\xa7\xda\x88\x3e\xf8\x28\x14"
- "\xf5\x3e\x85\x7d\x70\xa0\x0f\x13"
- "\x2e\x86\x93\x45\x3a\x58\x4f\x61"
- "\xf0\x3a\xac\x53\xbc\xd0\x06\x68",
- .plen = 32,
- .ctext = "\x63\x4c\x2a\x8e\xb4\x6b\x63\x0d"
- "\xb5\xec\x9b\x4e\x12\x23\xa3\xcf"
- "\x1a\x5a\x70\x15\x5a\x10\x40\x51"
- "\xca\x47\x4c\x9d\xc9\x97\xf4\x77"
- "\xdb\xc8\x10\x2d\xdc\x65\x20\x3f",
- .clen = 40,
- }, {
- .key = "\xac\x70\x69\xef\x82\x97\xd2\x9b"
- "\x15\x74\xb1\xa2\x6f\x69\x3f\x95",
- .klen = 16,
- .iv = "\xbb\x3a\xf7\x57\xc6\x36\x7c\x22"
- "\x36\xab\xde\xc6\x6d\x32\x70\x17",
- .assoc = "\xcb\x03\x85\xbf\x0a\xd5\x26\xa9"
- "\x56\xe1\x0a\xeb\x6c\xfb\xa1\x98"
- "\x8d\x98\x1c\xa8\xfe\x50\xf0\x74"
- "\x81\x5c\x53\x35\xe0\x17\xbd\x88",
- .alen = 32,
- .ptext = "\xda\xcc\x14\x27\x4e\x74\xd1\x30"
- "\x76\x18\x37\x0f\x6a\xc4\xd1\x1a"
- "\x58\x49\x9f\xc9\x3f\xf8\xd1\x7a"
- "\xb2\x67\x8b\x2b\x96\x2f\xa5\x3e",
- .plen = 32,
- .ctext = "\xf1\x62\x44\xc7\x5f\x19\xca\x43"
- "\x47\x2c\xaf\x68\x82\xbd\x51\xef"
- "\x3d\x65\xd8\x45\x2d\x06\x07\x78"
- "\x08\x2e\xb3\x23\xcd\x81\x12\x55"
- "\x1a",
- .clen = 33,
- }, {
- .key = "\xe9\x95\xa2\x8f\x93\x13\x7b\xb7"
- "\x96\x4e\x63\x33\x69\x8d\x02\x9b"
- "\x23\xf9\x22\xeb\x80\xa0\xb1\x81"
- "\xe2\x73\xc3\x21\x4d\x47\x8d\xf4",
- .klen = 32,
- .iv = "\xf8\x5e\x31\xf7\xd7\xb2\x25\x3e"
- "\xb7\x85\x90\x58\x67\x57\x33\x1d",
- .assoc = "",
- .alen = 0,
- .ptext = "",
- .plen = 0,
- .ctext = "\xdf\x2f\x83\xc0\x45\x4a\x2c\xcf"
- "\xb9\xd2\x41\xf6\x80\xa1\x52\x70",
- .clen = 16,
- }, {
- .key = "\x25\xba\xdc\x2e\xa3\x8f\x24\xd3"
- "\x17\x29\x15\xc5\x63\xb2\xc5\xa1"
- "\x4d\xbc\x2d\x6f\x85\x40\x33\x9a"
- "\xa3\xa0\xa1\xfa\x27\xa6\x2c\xca",
- .klen = 32,
- .iv = "\x34\x83\x6a\x96\xe7\x2d\xce\x5a"
- "\x38\x5f\x42\xe9\x61\x7b\xf5\x23",
- .assoc = "",
- .alen = 0,
- .ptext = "\x53",
- .plen = 1,
- .ctext = "\x01\xd8\x55\x3c\xc0\x5a\x4b\xc7"
- "\x01\xf4\x08\xe3\x0d\xf7\xf0\x78"
- "\x53",
- .clen = 17,
- }, {
- .key = "\x62\xdf\x16\xcd\xb3\x0a\xcc\xef"
- "\x98\x03\xc7\x56\x5d\xd6\x87\xa8"
- "\x77\x7e\x39\xf3\x8a\xe0\xb5\xb4"
- "\x65\xce\x80\xd2\x01\x05\xcb\xa1",
- .klen = 32,
- .iv = "\x71\xa8\xa4\x35\xf7\xa9\x76\x75"
- "\xb8\x39\xf4\x7a\x5b\x9f\xb8\x29",
- .assoc = "",
- .alen = 0,
- .ptext = "\x8f\x3a\xc1\x05\x7f\xe7\xcb\x83"
- "\xf9\xa6\x4d\xc3\x58\x31\x19\x2c"
- "\xd7\x90\xc2\x56\x4e\xd8\x57\xc7"
- "\xf6\xf0\x27\xb4\x25\x4c\x83",
- .plen = 31,
- .ctext = "\xc2\x4b\x41\x0f\x2d\xb9\x62\x07"
- "\xff\x8e\x74\xf8\xa1\xa6\xd5\x37"
- "\xa5\x64\x31\x5c\xca\x73\x9b\x43"
- "\xe6\x70\x63\x46\x95\xcb\xf7\xb5"
- "\x20\x8c\x75\x7a\x2a\x17\x2f\xa9"
- "\xb8\x4d\x11\x42\xd1\xf8\xf1",
- .clen = 47,
- }, {
- .key = "\x9e\x03\x4f\x6d\xc3\x86\x75\x0a"
- "\x19\xdd\x79\xe8\x57\xfb\x4a\xae"
- "\xa2\x40\x45\x77\x90\x80\x37\xce"
- "\x26\xfb\x5f\xaa\xdb\x64\x6b\x77",
- .klen = 32,
- .iv = "\xae\xcc\xde\xd5\x07\x25\x1f\x91"
- "\x39\x14\xa6\x0c\x55\xc4\x7b\x30",
- .assoc = "",
- .alen = 0,
- .ptext = "\xcc\x5f\xfb\xa4\x8f\x63\x74\x9f"
- "\x7a\x81\xff\x55\x52\x56\xdc\x33"
- "\x01\x52\xcd\xdb\x53\x78\xd9\xe1"
- "\xb7\x1d\x06\x8d\xff\xab\x22\x98",
- .plen = 32,
- .ctext = "\xbb\x01\x7c\xd1\x2c\x33\x7b\x37"
- "\x0a\xee\xc4\x30\x19\xd7\x3a\x6f"
- "\xf8\x2b\x67\xf5\x3b\x84\x87\x2a"
- "\xfb\x07\x7a\x82\xb5\xe4\x85\x26"
- "\x1e\xa8\xe5\x04\x54\xce\xe5\x5f"
- "\xb5\x3f\xc1\xd5\x7f\xbd\xd2\xa6",
- .clen = 48,
- }, {
- .key = "\xdb\x28\x89\x0c\xd3\x01\x1e\x26"
- "\x9a\xb7\x2b\x79\x51\x1f\x0d\xb4"
- "\xcc\x03\x50\xfc\x95\x20\xb9\xe7"
- "\xe8\x29\x3e\x83\xb5\xc3\x0a\x4e",
- .klen = 32,
- .iv = "\xea\xf1\x18\x74\x17\xa0\xc8\xad"
- "\xba\xee\x58\x9d\x4f\xe8\x3d\x36",
- .assoc = "",
- .alen = 0,
- .ptext = "\x08\x84\x34\x44\x9f\xde\x1c\xbb"
- "\xfb\x5b\xb1\xe6\x4c\x7a\x9f\x39"
- "\x2c\x14\xd9\x5f\x59\x18\x5b\xfb"
- "\x79\x4b\xe5\x65\xd9\x0a\xc1\x6f"
- "\x2e",
- .plen = 33,
- .ctext = "\xc2\xf4\x40\x55\xf9\x59\xff\x73"
- "\x08\xf5\x98\x92\x0c\x7b\x35\x9a"
- "\xa8\xf4\x42\x7e\x6f\x93\xca\x22"
- "\x23\x06\x1e\xf8\x89\x22\xf4\x46"
- "\x7c\x7c\x67\x75\xab\xe5\x75\xaa"
- "\x15\xd7\x83\x19\xfd\x31\x59\x5b"
- "\x32",
- .clen = 49,
- }, {
- .key = "\x17\x4d\xc3\xab\xe3\x7d\xc7\x42"
- "\x1b\x91\xdd\x0a\x4b\x43\xcf\xba"
- "\xf6\xc5\x5c\x80\x9a\xc0\x3b\x01"
- "\xa9\x56\x1d\x5b\x8f\x22\xa9\x25",
- .klen = 32,
- .iv = "\x27\x16\x51\x13\x27\x1c\x71\xc9"
- "\x3b\xc8\x0a\x2f\x49\x0c\x00\x3c",
- .assoc = "",
- .alen = 0,
- .ptext = "\x45\xa8\x6e\xe3\xaf\x5a\xc5\xd7"
- "\x7c\x35\x63\x77\x46\x9f\x61\x3f"
- "\x56\xd7\xe4\xe3\x5e\xb8\xdc\x14"
- "\x3a\x79\xc4\x3e\xb3\x69\x61\x46"
- "\x3c\xb6\x83\x4e\xb4\x26\xc7\x73"
- "\x22\xda\x52\x8b\x7d\x11\x98\xea"
- "\x62\xe1\x14\x1e\xdc\xfe\x0f\xad"
- "\x20\x76\x5a\xdc\x4e\x71\x13",
- .plen = 63,
- .ctext = "\xc9\x82\x3b\x4b\x87\x84\xa5\xdb"
- "\xa0\x8c\xd3\x3e\x7f\x8d\xe8\x28"
- "\x2a\xdc\xfa\x01\x84\x87\x9a\x70"
- "\x81\x75\x37\x0a\xd2\x75\xa9\xb6"
- "\x21\x72\xee\x7e\x65\x95\xe5\xcc"
- "\x01\xb7\x39\xa6\x51\x15\xca\xff"
- "\x61\xdc\x97\x38\xcc\xf4\xca\xc7"
- "\x83\x9b\x05\x11\x72\x60\xf0\xb4"
- "\x7e\x06\xab\x0a\xc0\xbb\x59\x23"
- "\xaa\x2d\xfc\x4e\x35\x05\x59",
- .clen = 79,
- }, {
- .key = "\x54\x71\xfd\x4b\xf3\xf9\x6f\x5e"
- "\x9c\x6c\x8f\x9c\x45\x68\x92\xc1"
- "\x21\x87\x67\x04\x9f\x60\xbd\x1b"
- "\x6a\x84\xfc\x34\x6a\x81\x48\xfb",
- .klen = 32,
- .iv = "\x63\x3b\x8b\xb3\x37\x98\x1a\xe5"
- "\xbc\xa2\xbc\xc0\x43\x31\xc2\x42",
- .assoc = "",
- .alen = 0,
- .ptext = "\x81\xcd\xa8\x82\xbf\xd6\x6e\xf3"
- "\xfd\x0f\x15\x09\x40\xc3\x24\x45"
- "\x81\x99\xf0\x67\x63\x58\x5e\x2e"
- "\xfb\xa6\xa3\x16\x8d\xc8\x00\x1c"
- "\x4b\x62\x87\x7c\x15\x38\xda\x70"
- "\x3d\xea\xe7\xf2\x40\xba\xae\x79"
- "\x8f\x48\xfc\xbf\x45\x53\x2e\x78"
- "\xef\x79\xf0\x1b\x49\xf7\xfd\x9c",
- .plen = 64,
- .ctext = "\x11\x7c\x7d\xef\xce\x29\x95\xec"
- "\x7e\x9f\x42\xa6\x26\x07\xa1\x75"
- "\x2f\x4e\x09\x9a\xf6\x6b\xc2\xfa"
- "\x0d\xd0\x17\xdc\x25\x1e\x9b\xdc"
- "\x5f\x8c\x1c\x60\x15\x4f\x9b\x20"
- "\x7b\xff\xcd\x82\x60\x84\xf4\xa5"
- "\x20\x9a\x05\x19\x5b\x02\x0a\x72"
- "\x43\x11\x26\x58\xcf\xc5\x41\xcf"
- "\x13\xcc\xde\x32\x92\xfa\x86\xf2"
- "\xaf\x16\xe8\x8f\xca\xb6\xfd\x54",
- .clen = 80,
- }, {
- .key = "\x90\x96\x36\xea\x03\x74\x18\x7a"
- "\x1d\x46\x42\x2d\x3f\x8c\x54\xc7"
- "\x4b\x4a\x73\x89\xa4\x00\x3f\x34"
- "\x2c\xb1\xdb\x0c\x44\xe0\xe8\xd2",
- .klen = 32,
- .iv = "\xa0\x5f\xc5\x52\x47\x13\xc2\x01"
- "\x3d\x7c\x6e\x52\x3d\x55\x85\x48",
- .assoc = "\xaf",
- .alen = 1,
- .ptext = "",
- .plen = 0,
- .ctext = "\x9b\xc5\x3b\x20\x0a\x88\x56\xbe"
- "\x69\xdf\xc4\xc4\x02\x46\x3a\xf0",
- .clen = 16,
- }, {
- .key = "\xcd\xbb\x70\x89\x13\xf0\xc1\x95"
- "\x9e\x20\xf4\xbf\x39\xb1\x17\xcd"
- "\x76\x0c\x7f\x0d\xa9\xa0\xc1\x4e"
- "\xed\xdf\xb9\xe4\x1e\x3f\x87\xa8",
- .klen = 32,
- .iv = "\xdc\x84\xfe\xf1\x58\x8f\x6b\x1c"
- "\xbe\x57\x20\xe3\x37\x7a\x48\x4f",
- .assoc = "\xeb\x4d\x8d\x59\x9c\x2e\x15\xa3"
- "\xde\x8d\x4d\x07\x36\x43\x78\xd0"
- "\x0b\x6d\x84\x4f\x2c\xf0\x82\x5b"
- "\x4e\xf6\x29\xd1\x8b\x6f\x56",
- .alen = 31,
- .ptext = "",
- .plen = 0,
- .ctext = "\xe0\x6d\xa1\x07\x98\x2f\x40\x2d"
- "\x2e\x9a\xd6\x61\x43\xc0\x74\x69",
- .clen = 16,
- }, {
- .key = "\x0a\xe0\xaa\x29\x24\x6c\x6a\xb1"
- "\x1f\xfa\xa6\x50\x33\xd5\xda\xd3"
- "\xa0\xce\x8a\x91\xae\x40\x43\x68"
- "\xae\x0d\x98\xbd\xf8\x9e\x26\x7f",
- .klen = 32,
- .iv = "\x19\xa9\x38\x91\x68\x0b\x14\x38"
- "\x3f\x31\xd2\x74\x31\x9e\x0a\x55",
- .assoc = "\x28\x72\xc7\xf8\xac\xaa\xbe\xbf"
- "\x5f\x67\xff\x99\x30\x67\x3b\xd6"
- "\x35\x2f\x90\xd3\x31\x90\x04\x74"
- "\x0f\x23\x08\xa9\x65\xce\xf6\xea",
- .alen = 32,
- .ptext = "",
- .plen = 0,
- .ctext = "\xb9\x57\x13\x3e\x82\x31\x61\x65"
- "\x0d\x7f\x6c\x96\x93\x5c\x50\xe2",
- .clen = 16,
- }, {
- .key = "\x46\x04\xe3\xc8\x34\xe7\x12\xcd"
- "\xa0\xd4\x58\xe2\x2d\xf9\x9c\xda"
- "\xca\x91\x96\x15\xb4\xe0\xc5\x81"
- "\x70\x3a\x77\x95\xd2\xfd\xc5\x55",
- .klen = 32,
- .iv = "\x55\xcd\x72\x30\x78\x86\xbd\x54"
- "\xc0\x0b\x84\x06\x2b\xc2\xcd\x5b",
- .assoc = "\x64\x97\x00\x98\xbc\x25\x67\xdb"
- "\xe0\x41\xb1\x2a\x2a\x8c\xfe\xdd"
- "\x5f\xf2\x9c\x58\x36\x30\x86\x8e"
- "\xd1\x51\xe6\x81\x3f\x2d\x95\xc1"
- "\x01",
- .alen = 33,
- .ptext = "",
- .plen = 0,
- .ctext = "\x81\x96\x34\xde\xbb\x36\xdd\x3e"
- "\x4e\x5e\xcb\x44\x21\xb8\x3f\xf1",
- .clen = 16,
- }, {
- .key = "\x83\x29\x1d\x67\x44\x63\xbb\xe9"
- "\x20\xaf\x0a\x73\x27\x1e\x5f\xe0"
- "\xf5\x53\xa1\x9a\xb9\x80\x47\x9b"
- "\x31\x68\x56\x6e\xac\x5c\x65\x2c",
- .klen = 32,
- .iv = "\x92\xf2\xac\xcf\x88\x02\x65\x70"
- "\x41\xe5\x36\x97\x25\xe7\x90\x61",
- .assoc = "\xa1\xbb\x3a\x37\xcc\xa1\x10\xf7"
- "\x61\x1c\x63\xbc\x24\xb0\xc0\xe3"
- "\x8a\xb4\xa7\xdc\x3b\xd0\x08\xa8"
- "\x92\x7f\xc5\x5a\x19\x8c\x34\x97"
- "\x0f\x95\x9b\x18\xe4\x8d\xb4\x24"
- "\xb9\x33\x28\x18\xe1\x9d\x14\xe0"
- "\x64\xb2\x89\x7d\x78\xa8\x05\x7e"
- "\x07\x8c\xfc\x88\x2d\xb8\x53",
- .alen = 63,
- .ptext = "",
- .plen = 0,
- .ctext = "\x2e\x99\xb6\x79\x57\x56\x80\x36"
- "\x8e\xc4\x1c\x12\x7d\x71\x36\x0c",
- .clen = 16,
- }, {
- .key = "\xbf\x4e\x57\x07\x54\xdf\x64\x05"
- "\xa1\x89\xbc\x04\x21\x42\x22\xe6"
- "\x1f\x15\xad\x1e\xbe\x20\xc9\xb4"
- "\xf3\x95\x35\x46\x86\xbb\x04\x03",
- .klen = 32,
- .iv = "\xce\x17\xe5\x6f\x98\x7e\x0e\x8c"
- "\xc2\xbf\xe8\x29\x1f\x0b\x52\x68",
- .assoc = "\xdd\xe0\x74\xd6\xdc\x1d\xb8\x13"
- "\xe2\xf6\x15\x4d\x1e\xd4\x83\xe9"
- "\xb4\x76\xb3\x60\x40\x70\x8a\xc1"
- "\x53\xac\xa4\x32\xf3\xeb\xd3\x6e"
- "\x1e\x42\xa0\x46\x45\x9f\xc7\x22"
- "\xd3\x43\xbc\x7e\xa5\x47\x2a\x6f"
- "\x91\x19\x70\x1e\xe1\xfe\x25\x49"
- "\xd6\x8f\x93\xc7\x28\x3f\x3d\x03",
- .alen = 64,
- .ptext = "",
- .plen = 0,
- .ctext = "\x7b\x25\x3d\x47\xd4\xa7\x08\xce"
- "\x3b\x89\x40\x36\xba\x6d\x0e\xa2",
- .clen = 16,
- }, {
- .key = "\xfc\x72\x90\xa6\x64\x5a\x0d\x21"
- "\x22\x63\x6e\x96\x1b\x67\xe4\xec"
- "\x49\xd7\xb9\xa2\xc3\xc0\x4b\xce"
- "\xb4\xc3\x14\x1e\x61\x1a\xa3\xd9",
- .klen = 32,
- .iv = "\x0b\x3c\x1f\x0e\xa8\xf9\xb7\xa7"
- "\x42\x9a\x9a\xba\x19\x30\x15\x6e",
- .assoc = "\x1a",
- .alen = 1,
- .ptext = "\x29",
- .plen = 1,
- .ctext = "\xe6\x09\x6f\x95\x9a\x18\xc8\xf6"
- "\x17\x75\x81\x16\xdf\x26\xff\x67"
- "\x92",
- .clen = 17,
- }, {
- .key = "\x38\x97\xca\x45\x74\xd6\xb6\x3c"
- "\xa3\x3d\x20\x27\x15\x8b\xa7\xf2"
- "\x74\x9a\xc4\x27\xc8\x60\xcd\xe8"
- "\x75\xf0\xf2\xf7\x3b\x79\x42\xb0",
- .klen = 32,
- .iv = "\x47\x60\x59\xad\xb8\x75\x60\xc3"
- "\xc3\x74\x4c\x4c\x13\x54\xd8\x74",
- .assoc = "\x56\x29\xe7\x15\xfc\x14\x0a\x4a"
- "\xe4\xaa\x79\x70\x12\x1d\x08\xf6"
- "\x09\xfb\xca\x69\x4b\xb0\x8e\xf5"
- "\xd6\x07\x62\xe3\xa8\xa9\x12",
- .alen = 31,
- .ptext = "\x66\xf3\x75\x7d\x40\xb3\xb4\xd1"
- "\x04\xe1\xa6\x94\x10\xe6\x39\x77"
- "\xd3\xac\x4d\x8a\x8c\x58\x6e\xfb"
- "\x06\x13\x9a\xd9\x5e\xc0\xfa",
- .plen = 31,
- .ctext = "\x82\xc0\x56\xf0\xd7\xc4\xc9\xfd"
- "\x3c\xd1\x2a\xd4\x15\x86\x9d\xda"
- "\xea\x6c\x6f\xa1\x33\xb0\x7a\x01"
- "\x57\xe7\xf3\x7b\x73\xe7\x54\x10"
- "\xc6\x91\xe2\xc6\xa0\x69\xe7\xe6"
- "\x76\xc3\xf5\x3a\x76\xfd\x4a",
- .clen = 47,
- }, {
- .key = "\x75\xbc\x04\xe5\x84\x52\x5e\x58"
- "\x24\x17\xd2\xb9\x0e\xaf\x6a\xf9"
- "\x9e\x5c\xd0\xab\xcd\x00\x4f\x01"
- "\x37\x1e\xd1\xcf\x15\xd8\xe2\x86",
- .klen = 32,
- .iv = "\x84\x85\x92\x4d\xc8\xf1\x08\xdf"
- "\x44\x4e\xff\xdd\x0d\x78\x9a\x7a",
- .assoc = "\x93\x4e\x21\xb4\x0c\x90\xb3\x66"
- "\x65\x84\x2b\x01\x0b\x42\xcb\xfc"
- "\x33\xbd\xd6\xed\x50\x50\x10\x0e"
- "\x97\x35\x41\xbb\x82\x08\xb1\xf2",
- .alen = 32,
- .ptext = "\xa2\x17\xaf\x1c\x50\x2e\x5d\xed"
- "\x85\xbb\x58\x26\x0a\x0b\xfc\x7d"
- "\xfe\x6e\x59\x0e\x91\xf8\xf0\x15"
- "\xc8\x40\x78\xb1\x38\x1f\x99\xa7",
- .plen = 32,
- .ctext = "\x01\x47\x8e\x6c\xf6\x64\x89\x3a"
- "\x71\xce\xe4\xaa\x45\x70\xe6\x84"
- "\x62\x48\x08\x64\x86\x6a\xdf\xec"
- "\xb4\xa0\xfb\x34\x03\x0c\x19\xf4"
- "\x2b\x7b\x36\x73\xec\x54\xa9\x1e"
- "\x30\x85\xdb\xe4\xac\xe9\x2c\xca",
- .clen = 48,
- }, {
- .key = "\xb1\xe1\x3e\x84\x94\xcd\x07\x74"
- "\xa5\xf2\x84\x4a\x08\xd4\x2c\xff"
- "\xc8\x1e\xdb\x2f\xd2\xa0\xd1\x1b"
- "\xf8\x4c\xb0\xa8\xef\x37\x81\x5d",
- .klen = 32,
- .iv = "\xc0\xaa\xcc\xec\xd8\x6c\xb1\xfb"
- "\xc5\x28\xb1\x6e\x07\x9d\x5d\x81",
- .assoc = "\xd0\x73\x5a\x54\x1d\x0b\x5b\x82"
- "\xe5\x5f\xdd\x93\x05\x66\x8e\x02"
- "\x5e\x80\xe1\x71\x55\xf0\x92\x28"
- "\x59\x62\x20\x94\x5c\x67\x50\xc8"
- "\x58",
- .alen = 33,
- .ptext = "\xdf\x3c\xe9\xbc\x61\xaa\x06\x09"
- "\x06\x95\x0a\xb7\x04\x2f\xbe\x84"
- "\x28\x30\x64\x92\x96\x98\x72\x2e"
- "\x89\x6e\x57\x8a\x13\x7e\x38\x7e"
- "\xdb",
- .plen = 33,
- .ctext = "\x85\xe0\xf8\x0f\x8e\x49\xe3\x60"
- "\xcb\x4a\x54\x94\xcf\xf5\x7e\x34"
- "\xe9\xf8\x80\x65\x53\xd0\x72\x70"
- "\x4f\x7d\x9d\xd1\x15\x6f\xb9\x2c"
- "\xfa\xe8\xdd\xac\x2e\xe1\x3f\x67"
- "\x63\x0f\x1a\x59\xb7\x89\xdb\xf4"
- "\xc3",
- .clen = 49,
- }, {
- .key = "\xee\x05\x77\x23\xa5\x49\xb0\x90"
- "\x26\xcc\x36\xdc\x02\xf8\xef\x05"
- "\xf3\xe1\xe7\xb3\xd8\x40\x53\x35"
- "\xb9\x79\x8f\x80\xc9\x96\x20\x33",
- .klen = 32,
- .iv = "\xfd\xce\x06\x8b\xe9\xe8\x5a\x17"
- "\x46\x02\x63\x00\x01\xc1\x20\x87",
- .assoc = "\x0c\x98\x94\xf3\x2d\x87\x04\x9e"
- "\x66\x39\x8f\x24\xff\x8a\x50\x08"
- "\x88\x42\xed\xf6\x5a\x90\x14\x42"
- "\x1a\x90\xfe\x6c\x36\xc6\xf0\x9f"
- "\x66\xa0\xb5\x2d\x2c\xf8\x25\x15"
- "\x55\x90\xa2\x7e\x77\x94\x96\x3a"
- "\x71\x1c\xf7\x44\xee\xa8\xc3\x42"
- "\xe2\xa3\x84\x04\x0b\xe1\xce",
- .alen = 63,
- .ptext = "\x1b\x61\x23\x5b\x71\x26\xae\x25"
- "\x87\x6f\xbc\x49\xfe\x53\x81\x8a"
- "\x53\xf2\x70\x17\x9b\x38\xf4\x48"
- "\x4b\x9b\x36\x62\xed\xdd\xd8\x54"
- "\xea\xcb\xb6\x79\x45\xfc\xaa\x54"
- "\x5c\x94\x47\x58\xa7\xff\x9c\x9e"
- "\x7c\xb6\xf1\xac\xc8\xfd\x8b\x35"
- "\xd5\xa4\x6a\xd4\x09\xc2\x08",
- .plen = 63,
- .ctext = "\x00\xe5\x5b\x87\x5c\x20\x22\x8a"
- "\xda\x1f\xd3\xff\xbb\xb2\xb0\xf8"
- "\xef\xe9\xeb\x9e\x7c\x80\xf4\x2b"
- "\x59\xc0\x79\xbc\x17\xa0\x15\x01"
- "\xf5\x72\xfb\x5a\xe7\xaf\x07\xe3"
- "\x1b\x49\x21\x34\x23\x63\x55\x5e"
- "\xee\x4f\x34\x17\xfa\xfe\xa5\x0c"
- "\xed\x0b\x23\xea\x9b\xda\x57\x2f"
- "\xf6\xa9\xae\x0d\x4e\x40\x96\x45"
- "\x7f\xfa\xf0\xbf\xc4\x98\x78",
- .clen = 79,
- }, {
- .key = "\x2a\x2a\xb1\xc3\xb5\xc5\x59\xac"
- "\xa7\xa6\xe8\x6d\xfc\x1d\xb2\x0b"
- "\x1d\xa3\xf3\x38\xdd\xe0\xd5\x4e"
- "\x7b\xa7\x6e\x58\xa3\xf5\xbf\x0a",
- .klen = 32,
- .iv = "\x39\xf3\x3f\x2b\xf9\x64\x03\x33"
- "\xc7\xdd\x15\x91\xfb\xe6\xe2\x8d",
- .assoc = "\x49\xbc\xce\x92\x3d\x02\xad\xba"
- "\xe7\x13\x41\xb6\xf9\xaf\x13\x0f"
- "\xb2\x04\xf8\x7a\x5f\x30\x96\x5b"
- "\xdc\xbd\xdd\x44\x10\x25\x8f\x75"
- "\x75\x4d\xb9\x5b\x8e\x0a\x38\x13"
- "\x6f\x9f\x36\xe4\x3a\x3e\xac\xc9"
- "\x9d\x83\xde\xe5\x57\xfd\xe3\x0e"
- "\xb1\xa7\x1b\x44\x05\x67\xb7\x37",
- .alen = 64,
- .ptext = "\x58\x85\x5c\xfa\x81\xa1\x57\x40"
- "\x08\x4a\x6e\xda\xf8\x78\x44\x90"
- "\x7d\xb5\x7b\x9b\xa1\xd8\x76\x62"
- "\x0c\xc9\x15\x3b\xc7\x3c\x77\x2b"
- "\xf8\x78\xba\xa7\xa6\x0e\xbd\x52"
- "\x76\xa3\xdc\xbe\x6b\xa8\xb1\x2d"
- "\xa9\x1d\xd8\x4e\x31\x53\xab\x00"
- "\xa5\xa7\x01\x13\x04\x49\xf2\x04",
- .plen = 64,
- .ctext = "\x28\xdd\xb9\x4a\x12\xc7\x0a\xe1"
- "\x58\x06\x1a\x9b\x8c\x67\xdf\xeb"
- "\x35\x35\x60\x9d\x06\x40\x65\xc1"
- "\x93\xe8\xb3\x82\x50\x29\xdd\xb5"
- "\x2b\xcb\xde\x18\x78\x6b\x42\xbe"
- "\x6d\x24\xd0\xb2\x7d\xd7\x08\x8f"
- "\x4a\x18\x98\xad\x8c\xf2\x97\xb4"
- "\xf4\x77\xe4\xbf\x41\x3b\xc4\x06"
- "\xce\x9e\x34\x81\xf0\x89\x11\x13"
- "\x02\x65\xa1\x7c\xdf\x07\x33\x06",
- .clen = 80,
- }, {
- .key = "\x67\x4f\xeb\x62\xc5\x40\x01\xc7"
- "\x28\x80\x9a\xfe\xf6\x41\x74\x12"
- "\x48\x65\xfe\xbc\xe2\x80\x57\x68"
- "\x3c\xd4\x4d\x31\x7d\x54\x5f\xe1",
- .klen = 32,
- .iv = "\x76\x18\x79\xca\x09\xdf\xac\x4e"
- "\x48\xb7\xc7\x23\xf5\x0a\xa5\x93",
- .assoc = "\x85\xe1\x08\x32\x4d\x7e\x56\xd5"
- "\x68\xed\xf3\x47\xf3\xd3\xd6\x15"
- "\xdd\xc7\x04\xfe\x64\xd0\x18\x75"
- "\x9d\xeb\xbc\x1d\xea\x84\x2e\x4c"
- "\x83\xf9\xbe\x8a\xef\x1c\x4b\x10"
- "\x89\xaf\xcb\x4b\xfe\xe7\xc1\x58"
- "\xca\xea\xc6\x87\xc0\x53\x03\xd9"
- "\x80\xaa\xb2\x83\xff\xee\xa1\x6a"
- "\x04",
- .alen = 65,
- .ptext = "\x94\xaa\x96\x9a\x91\x1d\x00\x5c"
- "\x88\x24\x20\x6b\xf2\x9c\x06\x96"
- "\xa7\x77\x87\x1f\xa6\x78\xf8\x7b"
- "\xcd\xf6\xf4\x13\xa1\x9b\x16\x02"
- "\x07\x24\xbf\xd5\x08\x20\xd0\x4f"
- "\x90\xb3\x70\x24\x2f\x51\xc7\xbb"
- "\xd6\x84\xc0\xef\x9a\xa8\xca\xcc"
- "\x74\xab\x97\x53\xfe\xd0\xdb\x37"
- "\x37\x6a\x0e\x9f\x3f\xa3\x2a\xe3"
- "\x1b\x34\x6d\x51\x72\x2b\x17\xe7"
- "\x4d\xaa\x2c\x18\xda\xa3\x33\x89"
- "\x2a\x9f\xf4\xd2\xed\x76\x3d\x3f"
- "\x3c\x15\x9d\x8e\x4f\x3c\x27\xb0"
- "\x42\x3f\x2f\x8a\xd4\xc2\x10\xb2"
- "\x27\x7f\xe3\x34\x80\x02\x49\x4b"
- "\x07\x68\x22\x2a\x88\x25\x53\xb2"
- "\x2f",
- .plen = 129,
- .ctext = "\x85\x39\x69\x35\xfb\xf9\xb0\xa6"
- "\x85\x43\x88\xd0\xd7\x78\x60\x19"
- "\x3e\x1f\xb1\xa4\xd6\xc5\x96\xec"
- "\xf7\x84\x85\xc7\x27\x0f\x74\x57"
- "\x28\x9e\xdd\x90\x3c\x43\x12\xc5"
- "\x51\x3d\x39\x8f\xa5\xf4\xe0\x0b"
- "\x57\x04\xf1\x6d\xfe\x9b\x84\x27"
- "\xe8\xeb\x4d\xda\x02\x0a\xc5\x49"
- "\x1a\x55\x5e\x50\x56\x4d\x94\xda"
- "\x20\xf8\x12\x54\x50\xb3\x11\xda"
- "\xed\x44\x27\x67\xd5\xd1\x8b\x4b"
- "\x38\x67\x56\x65\x59\xda\xe6\x97"
- "\x81\xae\x2f\x92\x3b\xae\x22\x1c"
- "\x91\x59\x38\x18\x00\xe8\xba\x92"
- "\x04\x19\x56\xdf\xb0\x82\xeb\x6f"
- "\x2e\xdb\x54\x3c\x4b\xbb\x60\x90"
- "\x4c\x50\x10\x62\xba\x7a\xb1\x68"
- "\x37\xd7\x87\x4e\xe4\x66\x09\x1f"
- "\xa5",
- .clen = 145,
- }, {
- .key = "\xa3\x73\x24\x01\xd5\xbc\xaa\xe3"
- "\xa9\x5a\x4c\x90\xf0\x65\x37\x18"
- "\x72\x28\x0a\x40\xe7\x20\xd9\x82"
- "\xfe\x02\x2b\x09\x57\xb3\xfe\xb7",
- .klen = 32,
- .iv = "\xb3\x3d\xb3\x69\x19\x5b\x54\x6a"
- "\xc9\x91\x79\xb4\xef\x2e\x68\x99",
- .assoc = "\xc2\x06\x41\xd1\x5d\xfa\xff\xf1"
- "\xe9\xc7\xa5\xd9\xed\xf8\x98\x1b"
- "\x07\x89\x10\x82\x6a\x70\x9a\x8f"
- "\x5e\x19\x9b\xf5\xc5\xe3\xcd\x22"
- "\x92\xa5\xc2\xb8\x51\x2e\x5e\x0e"
- "\xa4\xbe\x5f\xb1\xc1\x90\xd7\xe7"
- "\xf7\x52\xae\x28\x29\xa8\x22\xa4"
- "\x4f\xae\x48\xc2\xfa\x75\x8b\x9e"
- "\xce\x83\x2a\x88\x07\x55\xbb\x89"
- "\xf6\xdf\xac\xdf\x83\x08\xbf\x7d"
- "\xac\x30\x8b\x8e\x02\xac\x00\xf1"
- "\x30\x46\xe1\xbc\x75\xbf\x49\xbb"
- "\x26\x4e\x29\xf0\x2f\x21\xc6\x13"
- "\x92\xd9\x3d\x11\xe4\x10\x00\x8e"
- "\xd4\xd4\x58\x65\xa6\x2b\xe3\x25"
- "\xb1\x8f\x15\x93\xe7\x71\xb9\x2c"
- "\x4b",
- .alen = 129,
- .ptext = "\xd1\xcf\xd0\x39\xa1\x99\xa9\x78"
- "\x09\xfe\xd2\xfd\xec\xc1\xc9\x9d"
- "\xd2\x39\x93\xa3\xab\x18\x7a\x95"
- "\x8f\x24\xd3\xeb\x7b\xfa\xb5\xd8"
- "\x15\xd1\xc3\x04\x69\x32\xe3\x4d"
- "\xaa\xc2\x04\x8b\xf2\xfa\xdc\x4a"
- "\x02\xeb\xa8\x90\x03\xfd\xea\x97"
- "\x43\xaf\x2e\x92\xf8\x57\xc5\x6a"
- "\x00",
- .plen = 65,
- .ctext = "\x7d\xde\x53\x22\xe4\x23\x3b\x30"
- "\x78\xde\x35\x90\x7a\xd9\x0b\x93"
- "\xf6\x0e\x0b\xed\x40\xee\x10\x9c"
- "\x96\x3a\xd3\x34\xb2\xd0\x67\xcf"
- "\x63\x7f\x2d\x0c\xcf\x96\xec\x64"
- "\x1a\x87\xcc\x7d\x2c\x5e\x81\x4b"
- "\xd2\x8f\x4c\x7c\x00\xb1\xb4\xe0"
- "\x87\x4d\xb1\xbc\xd8\x78\x2c\x17"
- "\xf2\x3b\xd8\x28\x40\xe2\x76\xf6"
- "\x20\x13\x83\x46\xaf\xff\xe3\x0f"
- "\x72",
- .clen = 81,
- }, {
- .key = "\xe0\x98\x5e\xa1\xe5\x38\x53\xff"
- "\x2a\x35\xfe\x21\xea\x8a\xfa\x1e"
- "\x9c\xea\x15\xc5\xec\xc0\x5b\x9b"
- "\xbf\x2f\x0a\xe1\x32\x12\x9d\x8e",
- .klen = 32,
- .iv = "\xef\x61\xed\x08\x29\xd7\xfd\x86"
- "\x4a\x6b\x2b\x46\xe9\x53\x2a\xa0",
- .assoc = "\xfe\x2a\x7b\x70\x6d\x75\xa7\x0d"
- "\x6a\xa2\x57\x6a\xe7\x1c\x5b\x21"
- "\x31\x4b\x1b\x07\x6f\x10\x1c\xa8"
- "\x20\x46\x7a\xce\x9f\x42\x6d\xf9",
- .alen = 32,
- .ptext = "\x0d\xf4\x09\xd8\xb1\x14\x51\x94"
- "\x8a\xd8\x84\x8e\xe6\xe5\x8c\xa3"
- "\xfc\xfc\x9e\x28\xb0\xb8\xfc\xaf"
- "\x50\x52\xb1\xc4\x55\x59\x55\xaf",
- .plen = 32,
- .ctext = "\x5a\xcd\x8c\x57\xf2\x6a\xb6\xbe"
- "\x53\xc7\xaa\x9a\x60\x74\x9c\xc4"
- "\xa2\xc2\xd0\x6d\xe1\x03\x63\xdc"
- "\xbb\x51\x7e\x9c\x89\x73\xde\x4e"
- "\x24\xf8\x52\x7c\x15\x41\x0e\xba"
- "\x69\x0e\x36\x5f\x2f\x22\x8c",
- .clen = 47,
- }, {
- .key = "\x1c\xbd\x98\x40\xf5\xb3\xfc\x1b"
- "\xaa\x0f\xb0\xb3\xe4\xae\xbc\x24"
- "\xc7\xac\x21\x49\xf1\x60\xdd\xb5"
- "\x80\x5d\xe9\xba\x0c\x71\x3c\x64",
- .klen = 32,
- .iv = "\x2c\x86\x26\xa8\x39\x52\xa6\xa2"
- "\xcb\x45\xdd\xd7\xe3\x77\xed\xa6",
- .assoc = "\x3b\x4f\xb5\x10\x7d\xf1\x50\x29"
- "\xeb\x7c\x0a\xfb\xe1\x40\x1e\x27"
- "\x5c\x0d\x27\x8b\x74\xb0\x9e\xc2"
- "\xe1\x74\x59\xa6\x79\xa1\x0c\xd0",
- .alen = 32,
- .ptext = "\x4a\x18\x43\x77\xc1\x90\xfa\xb0"
- "\x0b\xb2\x36\x20\xe0\x09\x4e\xa9"
- "\x26\xbe\xaa\xac\xb5\x58\x7e\xc8"
- "\x11\x7f\x90\x9c\x2f\xb8\xf4\x85",
- .plen = 32,
- .ctext = "\x47\xd6\xce\x78\xd6\xbf\x4a\x51"
- "\xb8\xda\x92\x3c\xfd\xda\xac\x8e"
- "\x8d\x88\xd7\x4d\x90\xe5\xeb\xa1"
- "\xab\xd6\x7c\x76\xad\xea\x7d\x76"
- "\x53\xee\xb0\xcd\xd0\x02\xbb\x70"
- "\x5b\x6f\x7b\xe2\x8c\xe8",
- .clen = 46,
- }, {
- .key = "\x59\xe1\xd2\xdf\x05\x2f\xa4\x37"
- "\x2b\xe9\x63\x44\xde\xd3\x7f\x2b"
- "\xf1\x6f\x2d\xcd\xf6\x00\x5f\xcf"
- "\x42\x8a\xc8\x92\xe6\xd0\xdc\x3b",
- .klen = 32,
- .iv = "\x68\xab\x60\x47\x49\xce\x4f\xbe"
- "\x4c\x20\x8f\x68\xdd\x9c\xb0\xac",
- .assoc = "\x77\x74\xee\xaf\x8d\x6d\xf9\x45"
- "\x6c\x56\xbc\x8d\xdb\x65\xe0\x2e"
- "\x86\xd0\x32\x0f\x79\x50\x20\xdb"
- "\xa2\xa1\x37\x7e\x53\x00\xab\xa6",
- .alen = 32,
- .ptext = "\x86\x3d\x7d\x17\xd1\x0c\xa3\xcc"
- "\x8c\x8d\xe8\xb1\xda\x2e\x11\xaf"
- "\x51\x80\xb5\x30\xba\xf8\x00\xe2"
- "\xd3\xad\x6f\x75\x09\x18\x93\x5c",
- .plen = 32,
- .ctext = "\x9f\xa9\x2b\xa4\x8f\x00\x05\x2b"
- "\xe7\x68\x81\x51\xbb\xfb\xdf\x60"
- "\xbb\xac\xe8\xc1\xdc\x68\xae\x68"
- "\x3a\xcd\x7a\x06\x49\xfe\x80\x11"
- "\xe6\x61\x99\xe2\xdd\xbe\x2c\xbf",
- .clen = 40,
- }, {
- .key = "\x96\x06\x0b\x7f\x15\xab\x4d\x53"
- "\xac\xc3\x15\xd6\xd8\xf7\x42\x31"
- "\x1b\x31\x38\x51\xfc\xa0\xe1\xe8"
- "\x03\xb8\xa7\x6b\xc0\x2f\x7b\x11",
- .klen = 32,
- .iv = "\xa5\xcf\x9a\xe6\x59\x4a\xf7\xd9"
- "\xcd\xfa\x41\xfa\xd7\xc0\x72\xb2",
- .assoc = "\xb4\x99\x28\x4e\x9d\xe8\xa2\x60"
- "\xed\x30\x6e\x1e\xd5\x89\xa3\x34"
- "\xb1\x92\x3e\x93\x7e\xf0\xa2\xf5"
- "\x64\xcf\x16\x57\x2d\x5f\x4a\x7d",
- .alen = 32,
- .ptext = "\xc3\x62\xb7\xb6\xe2\x87\x4c\xe7"
- "\x0d\x67\x9a\x43\xd4\x52\xd4\xb5"
- "\x7b\x43\xc1\xb5\xbf\x98\x82\xfc"
- "\x94\xda\x4e\x4d\xe4\x77\x32\x32",
- .plen = 32,
- .ctext = "\xe2\x34\xfa\x25\xfd\xfb\x89\x5e"
- "\x5b\x4e\x0b\x15\x6e\x39\xfb\x0c"
- "\x73\xc7\xd9\x6b\xbe\xce\x9b\x70"
- "\xc7\x4f\x96\x16\x03\xfc\xea\xfb"
- "\x56",
- .clen = 33,
- },
-};
-
-/*
* All key wrapping test vectors taken from
* http://csrc.nist.gov/groups/STM/cavp/documents/mac/kwtestvectors.zip
*
@@ -32454,6 +31369,86 @@ static const struct comp_testvec lzo_decomp_tv_template[] = {
},
};
+static const struct comp_testvec lzorle_comp_tv_template[] = {
+ {
+ .inlen = 70,
+ .outlen = 59,
+ .input = "Join us now and share the software "
+ "Join us now and share the software ",
+ .output = "\x11\x01\x00\x0d\x4a\x6f\x69\x6e"
+ "\x20\x75\x73\x20\x6e\x6f\x77\x20"
+ "\x61\x6e\x64\x20\x73\x68\x61\x72"
+ "\x65\x20\x74\x68\x65\x20\x73\x6f"
+ "\x66\x74\x77\x70\x01\x32\x88\x00"
+ "\x0c\x65\x20\x74\x68\x65\x20\x73"
+ "\x6f\x66\x74\x77\x61\x72\x65\x20"
+ "\x11\x00\x00",
+ }, {
+ .inlen = 159,
+ .outlen = 133,
+ .input = "This document describes a compression method based on the LZO "
+ "compression algorithm. This document defines the application of "
+ "the LZO algorithm used in UBIFS.",
+ .output = "\x11\x01\x00\x2c\x54\x68\x69\x73"
+ "\x20\x64\x6f\x63\x75\x6d\x65\x6e"
+ "\x74\x20\x64\x65\x73\x63\x72\x69"
+ "\x62\x65\x73\x20\x61\x20\x63\x6f"
+ "\x6d\x70\x72\x65\x73\x73\x69\x6f"
+ "\x6e\x20\x6d\x65\x74\x68\x6f\x64"
+ "\x20\x62\x61\x73\x65\x64\x20\x6f"
+ "\x6e\x20\x74\x68\x65\x20\x4c\x5a"
+ "\x4f\x20\x2a\x8c\x00\x09\x61\x6c"
+ "\x67\x6f\x72\x69\x74\x68\x6d\x2e"
+ "\x20\x20\x2e\x54\x01\x03\x66\x69"
+ "\x6e\x65\x73\x20\x74\x06\x05\x61"
+ "\x70\x70\x6c\x69\x63\x61\x74\x76"
+ "\x0a\x6f\x66\x88\x02\x60\x09\x27"
+ "\xf0\x00\x0c\x20\x75\x73\x65\x64"
+ "\x20\x69\x6e\x20\x55\x42\x49\x46"
+ "\x53\x2e\x11\x00\x00",
+ },
+};
+
+static const struct comp_testvec lzorle_decomp_tv_template[] = {
+ {
+ .inlen = 133,
+ .outlen = 159,
+ .input = "\x00\x2b\x54\x68\x69\x73\x20\x64"
+ "\x6f\x63\x75\x6d\x65\x6e\x74\x20"
+ "\x64\x65\x73\x63\x72\x69\x62\x65"
+ "\x73\x20\x61\x20\x63\x6f\x6d\x70"
+ "\x72\x65\x73\x73\x69\x6f\x6e\x20"
+ "\x6d\x65\x74\x68\x6f\x64\x20\x62"
+ "\x61\x73\x65\x64\x20\x6f\x6e\x20"
+ "\x74\x68\x65\x20\x4c\x5a\x4f\x2b"
+ "\x8c\x00\x0d\x61\x6c\x67\x6f\x72"
+ "\x69\x74\x68\x6d\x2e\x20\x20\x54"
+ "\x68\x69\x73\x2a\x54\x01\x02\x66"
+ "\x69\x6e\x65\x73\x94\x06\x05\x61"
+ "\x70\x70\x6c\x69\x63\x61\x74\x76"
+ "\x0a\x6f\x66\x88\x02\x60\x09\x27"
+ "\xf0\x00\x0c\x20\x75\x73\x65\x64"
+ "\x20\x69\x6e\x20\x55\x42\x49\x46"
+ "\x53\x2e\x11\x00\x00",
+ .output = "This document describes a compression method based on the LZO "
+ "compression algorithm. This document defines the application of "
+ "the LZO algorithm used in UBIFS.",
+ }, {
+ .inlen = 59,
+ .outlen = 70,
+ .input = "\x11\x01\x00\x0d\x4a\x6f\x69\x6e"
+ "\x20\x75\x73\x20\x6e\x6f\x77\x20"
+ "\x61\x6e\x64\x20\x73\x68\x61\x72"
+ "\x65\x20\x74\x68\x65\x20\x73\x6f"
+ "\x66\x74\x77\x70\x01\x32\x88\x00"
+ "\x0c\x65\x20\x74\x68\x65\x20\x73"
+ "\x6f\x66\x74\x77\x61\x72\x65\x20"
+ "\x11\x00\x00",
+ .output = "Join us now and share the software "
+ "Join us now and share the software ",
+ },
+};
+
/*
* Michael MIC test vectors from IEEE 802.11i
*/
@@ -33681,4 +32676,1025 @@ static const struct comp_testvec zstd_decomp_tv_template[] = {
"functions.",
},
};
+
+/* based on aes_cbc_tv_template */
+static const struct cipher_testvec essiv_aes_cbc_tv_template[] = {
+ {
+ .key = "\x06\xa9\x21\x40\x36\xb8\xa1\x5b"
+ "\x51\x2e\x03\xd5\x34\x12\x00\x06",
+ .klen = 16,
+ .iv = "\x3d\xaf\xba\x42\x9d\x9e\xb4\x30"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .ptext = "Single block msg",
+ .ctext = "\xfa\x59\xe7\x5f\x41\x56\x65\xc3"
+ "\x36\xca\x6b\x72\x10\x9f\x8c\xd4",
+ .len = 16,
+ }, {
+ .key = "\xc2\x86\x69\x6d\x88\x7c\x9a\xa0"
+ "\x61\x1b\xbb\x3e\x20\x25\xa4\x5a",
+ .klen = 16,
+ .iv = "\x56\x2e\x17\x99\x6d\x09\x3d\x28"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .ptext = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f",
+ .ctext = "\xc8\x59\x9a\xfe\x79\xe6\x7b\x20"
+ "\x06\x7d\x55\x0a\x5e\xc7\xb5\xa7"
+ "\x0b\x9c\x80\xd2\x15\xa1\xb8\x6d"
+ "\xc6\xab\x7b\x65\xd9\xfd\x88\xeb",
+ .len = 32,
+ }, {
+ .key = "\x8e\x73\xb0\xf7\xda\x0e\x64\x52"
+ "\xc8\x10\xf3\x2b\x80\x90\x79\xe5"
+ "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b",
+ .klen = 24,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .ptext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .ctext = "\x96\x6d\xa9\x7a\x42\xe6\x01\xc7"
+ "\x17\xfc\xa7\x41\xd3\x38\x0b\xe5"
+ "\x51\x48\xf7\x7e\x5e\x26\xa9\xfe"
+ "\x45\x72\x1c\xd9\xde\xab\xf3\x4d"
+ "\x39\x47\xc5\x4f\x97\x3a\x55\x63"
+ "\x80\x29\x64\x4c\x33\xe8\x21\x8a"
+ "\x6a\xef\x6b\x6a\x8f\x43\xc0\xcb"
+ "\xf0\xf3\x6e\x74\x54\x44\x92\x44",
+ .len = 64,
+ }, {
+ .key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe"
+ "\x2b\x73\xae\xf0\x85\x7d\x77\x81"
+ "\x1f\x35\x2c\x07\x3b\x61\x08\xd7"
+ "\x2d\x98\x10\xa3\x09\x14\xdf\xf4",
+ .klen = 32,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .ptext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .ctext = "\x24\x52\xf1\x48\x74\xd0\xa7\x93"
+ "\x75\x9b\x63\x46\xc0\x1c\x1e\x17"
+ "\x4d\xdc\x5b\x3a\x27\x93\x2a\x63"
+ "\xf7\xf1\xc7\xb3\x54\x56\x5b\x50"
+ "\xa3\x31\xa5\x8b\xd6\xfd\xb6\x3c"
+ "\x8b\xf6\xf2\x45\x05\x0c\xc8\xbb"
+ "\x32\x0b\x26\x1c\xe9\x8b\x02\xc0"
+ "\xb2\x6f\x37\xa7\x5b\xa8\xa9\x42",
+ .len = 64,
+ }, {
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55"
+ "\x0F\x32\x55\x78\x9B\xBE\x78\x9B"
+ "\xBE\xE1\x04\x27\xE1\x04\x27\x4A"
+ "\x6D\x90\x4A\x6D\x90\xB3\xD6\xF9",
+ .klen = 32,
+ .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .ptext = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB"
+ "\x54\xE0\x49\xB2\x1B\xA7\x10\x79"
+ "\x05\x6E\xD7\x40\xCC\x35\x9E\x07"
+ "\x93\xFC\x65\xF1\x5A\xC3\x2C\xB8"
+ "\x21\x8A\x16\x7F\xE8\x51\xDD\x46"
+ "\xAF\x18\xA4\x0D\x76\x02\x6B\xD4"
+ "\x3D\xC9\x32\x9B\x04\x90\xF9\x62"
+ "\xEE\x57\xC0\x29\xB5\x1E\x87\x13"
+ "\x7C\xE5\x4E\xDA\x43\xAC\x15\xA1"
+ "\x0A\x73\xFF\x68\xD1\x3A\xC6\x2F"
+ "\x98\x01\x8D\xF6\x5F\xEB\x54\xBD"
+ "\x26\xB2\x1B\x84\x10\x79\xE2\x4B"
+ "\xD7\x40\xA9\x12\x9E\x07\x70\xFC"
+ "\x65\xCE\x37\xC3\x2C\x95\x21\x8A"
+ "\xF3\x5C\xE8\x51\xBA\x23\xAF\x18"
+ "\x81\x0D\x76\xDF\x48\xD4\x3D\xA6"
+ "\x0F\x9B\x04\x6D\xF9\x62\xCB\x34"
+ "\xC0\x29\x92\x1E\x87\xF0\x59\xE5"
+ "\x4E\xB7\x20\xAC\x15\x7E\x0A\x73"
+ "\xDC\x45\xD1\x3A\xA3\x0C\x98\x01"
+ "\x6A\xF6\x5F\xC8\x31\xBD\x26\x8F"
+ "\x1B\x84\xED\x56\xE2\x4B\xB4\x1D"
+ "\xA9\x12\x7B\x07\x70\xD9\x42\xCE"
+ "\x37\xA0\x09\x95\xFE\x67\xF3\x5C"
+ "\xC5\x2E\xBA\x23\x8C\x18\x81\xEA"
+ "\x53\xDF\x48\xB1\x1A\xA6\x0F\x78"
+ "\x04\x6D\xD6\x3F\xCB\x34\x9D\x06"
+ "\x92\xFB\x64\xF0\x59\xC2\x2B\xB7"
+ "\x20\x89\x15\x7E\xE7\x50\xDC\x45"
+ "\xAE\x17\xA3\x0C\x75\x01\x6A\xD3"
+ "\x3C\xC8\x31\x9A\x03\x8F\xF8\x61"
+ "\xED\x56\xBF\x28\xB4\x1D\x86\x12",
+ .ctext = "\x97\x7f\x69\x0f\x0f\x34\xa6\x33"
+ "\x66\x49\x7e\xd0\x4d\x1b\xc9\x64"
+ "\xf9\x61\x95\x98\x11\x00\x88\xf8"
+ "\x2e\x88\x01\x0f\x2b\xe1\xae\x3e"
+ "\xfe\xd6\x47\x30\x11\x68\x7d\x99"
+ "\xad\x69\x6a\xe8\x41\x5f\x1e\x16"
+ "\x00\x3a\x47\xdf\x8e\x7d\x23\x1c"
+ "\x19\x5b\x32\x76\x60\x03\x05\xc1"
+ "\xa0\xff\xcf\xcc\x74\x39\x46\x63"
+ "\xfe\x5f\xa6\x35\xa7\xb4\xc1\xf9"
+ "\x4b\x5e\x38\xcc\x8c\xc1\xa2\xcf"
+ "\x9a\xc3\xae\x55\x42\x46\x93\xd9"
+ "\xbd\x22\xd3\x8a\x19\x96\xc3\xb3"
+ "\x7d\x03\x18\xf9\x45\x09\x9c\xc8"
+ "\x90\xf3\x22\xb3\x25\x83\x9a\x75"
+ "\xbb\x04\x48\x97\x3a\x63\x08\x04"
+ "\xa0\x69\xf6\x52\xd4\x89\x93\x69"
+ "\xb4\x33\xa2\x16\x58\xec\x4b\x26"
+ "\x76\x54\x10\x0b\x6e\x53\x1e\xbc"
+ "\x16\x18\x42\xb1\xb1\xd3\x4b\xda"
+ "\x06\x9f\x8b\x77\xf7\xab\xd6\xed"
+ "\xa3\x1d\x90\xda\x49\x38\x20\xb8"
+ "\x6c\xee\xae\x3e\xae\x6c\x03\xb8"
+ "\x0b\xed\xc8\xaa\x0e\xc5\x1f\x90"
+ "\x60\xe2\xec\x1b\x76\xd0\xcf\xda"
+ "\x29\x1b\xb8\x5a\xbc\xf4\xba\x13"
+ "\x91\xa6\xcb\x83\x3f\xeb\xe9\x7b"
+ "\x03\xba\x40\x9e\xe6\x7a\xb2\x4a"
+ "\x73\x49\xfc\xed\xfb\x55\xa4\x24"
+ "\xc7\xa4\xd7\x4b\xf5\xf7\x16\x62"
+ "\x80\xd3\x19\x31\x52\x25\xa8\x69"
+ "\xda\x9a\x87\xf5\xf2\xee\x5d\x61"
+ "\xc1\x12\x72\x3e\x52\x26\x45\x3a"
+ "\xd8\x9d\x57\xfa\x14\xe2\x9b\x2f"
+ "\xd4\xaa\x5e\x31\xf4\x84\x89\xa4"
+ "\xe3\x0e\xb0\x58\x41\x75\x6a\xcb"
+ "\x30\x01\x98\x90\x15\x80\xf5\x27"
+ "\x92\x13\x81\xf0\x1c\x1e\xfc\xb1"
+ "\x33\xf7\x63\xb0\x67\xec\x2e\x5c"
+ "\x85\xe3\x5b\xd0\x43\x8a\xb8\x5f"
+ "\x44\x9f\xec\x19\xc9\x8f\xde\xdf"
+ "\x79\xef\xf8\xee\x14\x87\xb3\x34"
+ "\x76\x00\x3a\x9b\xc7\xed\xb1\x3d"
+ "\xef\x07\xb0\xe4\xfd\x68\x9e\xeb"
+ "\xc2\xb4\x1a\x85\x9a\x7d\x11\x88"
+ "\xf8\xab\x43\x55\x2b\x8a\x4f\x60"
+ "\x85\x9a\xf4\xba\xae\x48\x81\xeb"
+ "\x93\x07\x97\x9e\xde\x2a\xfc\x4e"
+ "\x31\xde\xaa\x44\xf7\x2a\xc3\xee"
+ "\x60\xa2\x98\x2c\x0a\x88\x50\xc5"
+ "\x6d\x89\xd3\xe4\xb6\xa7\xf4\xb0"
+ "\xcf\x0e\x89\xe3\x5e\x8f\x82\xf4"
+ "\x9d\xd1\xa9\x51\x50\x8a\xd2\x18"
+ "\x07\xb2\xaa\x3b\x7f\x58\x9b\xf4"
+ "\xb7\x24\x39\xd3\x66\x2f\x1e\xc0"
+ "\x11\xa3\x56\x56\x2a\x10\x73\xbc"
+ "\xe1\x23\xbf\xa9\x37\x07\x9c\xc3"
+ "\xb2\xc9\xa8\x1c\x5b\x5c\x58\xa4"
+ "\x77\x02\x26\xad\xc3\x40\x11\x53"
+ "\x93\x68\x72\xde\x05\x8b\x10\xbc"
+ "\xa6\xd4\x1b\xd9\x27\xd8\x16\x12"
+ "\x61\x2b\x31\x2a\x44\x87\x96\x58",
+ .len = 496,
+ },
+};
+
+/* based on hmac_sha256_aes_cbc_tv_temp */
+static const struct aead_testvec essiv_hmac_sha256_aes_cbc_tv_temp[] = {
+ {
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x06\xa9\x21\x40\x36\xb8\xa1\x5b"
+ "\x51\x2e\x03\xd5\x34\x12\x00\x06",
+ .klen = 8 + 32 + 16,
+ .iv = "\xb3\x0c\x5a\x11\x41\xad\xc1\x04"
+ "\xbc\x1e\x7e\x35\xb0\x5d\x78\x29",
+ .assoc = "\x3d\xaf\xba\x42\x9d\x9e\xb4\x30"
+ "\xb4\x22\xda\x80\x2c\x9f\xac\x41",
+ .alen = 16,
+ .ptext = "Single block msg",
+ .plen = 16,
+ .ctext = "\xe3\x53\x77\x9c\x10\x79\xae\xb8"
+ "\x27\x08\x94\x2d\xbe\x77\x18\x1a"
+ "\xcc\xde\x2d\x6a\xae\xf1\x0b\xcc"
+ "\x38\x06\x38\x51\xb4\xb8\xf3\x5b"
+ "\x5c\x34\xa6\xa3\x6e\x0b\x05\xe5"
+ "\x6a\x6d\x44\xaa\x26\xa8\x44\xa5",
+ .clen = 16 + 32,
+ }, {
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\xc2\x86\x69\x6d\x88\x7c\x9a\xa0"
+ "\x61\x1b\xbb\x3e\x20\x25\xa4\x5a",
+ .klen = 8 + 32 + 16,
+ .iv = "\x56\xe8\x14\xa5\x74\x18\x75\x13"
+ "\x2f\x79\xe7\xc8\x65\xe3\x48\x45",
+ .assoc = "\x56\x2e\x17\x99\x6d\x09\x3d\x28"
+ "\xdd\xb3\xba\x69\x5a\x2e\x6f\x58",
+ .alen = 16,
+ .ptext = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f",
+ .plen = 32,
+ .ctext = "\xd2\x96\xcd\x94\xc2\xcc\xcf\x8a"
+ "\x3a\x86\x30\x28\xb5\xe1\xdc\x0a"
+ "\x75\x86\x60\x2d\x25\x3c\xff\xf9"
+ "\x1b\x82\x66\xbe\xa6\xd6\x1a\xb1"
+ "\xf5\x33\x53\xf3\x68\x85\x2a\x99"
+ "\x0e\x06\x58\x8f\xba\xf6\x06\xda"
+ "\x49\x69\x0d\x5b\xd4\x36\x06\x62"
+ "\x35\x5e\x54\x58\x53\x4d\xdf\xbf",
+ .clen = 32 + 32,
+ }, {
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x6c\x3e\xa0\x47\x76\x30\xce\x21"
+ "\xa2\xce\x33\x4a\xa7\x46\xc2\xcd",
+ .klen = 8 + 32 + 16,
+ .iv = "\x1f\x6b\xfb\xd6\x6b\x72\x2f\xc9"
+ "\xb6\x9f\x8c\x10\xa8\x96\x15\x64",
+ .assoc = "\xc7\x82\xdc\x4c\x09\x8c\x66\xcb"
+ "\xd9\xcd\x27\xd8\x25\x68\x2c\x81",
+ .alen = 16,
+ .ptext = "This is a 48-byte message (exactly 3 AES blocks)",
+ .plen = 48,
+ .ctext = "\xd0\xa0\x2b\x38\x36\x45\x17\x53"
+ "\xd4\x93\x66\x5d\x33\xf0\xe8\x86"
+ "\x2d\xea\x54\xcd\xb2\x93\xab\xc7"
+ "\x50\x69\x39\x27\x67\x72\xf8\xd5"
+ "\x02\x1c\x19\x21\x6b\xad\x52\x5c"
+ "\x85\x79\x69\x5d\x83\xba\x26\x84"
+ "\x68\xb9\x3e\x90\x38\xa0\x88\x01"
+ "\xe7\xc6\xce\x10\x31\x2f\x9b\x1d"
+ "\x24\x78\xfb\xbe\x02\xe0\x4f\x40"
+ "\x10\xbd\xaa\xc6\xa7\x79\xe0\x1a",
+ .clen = 48 + 32,
+ }, {
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x56\xe4\x7a\x38\xc5\x59\x89\x74"
+ "\xbc\x46\x90\x3d\xba\x29\x03\x49",
+ .klen = 8 + 32 + 16,
+ .iv = "\x13\xe5\xf2\xef\x61\x97\x59\x35"
+ "\x9b\x36\x84\x46\x4e\x63\xd1\x41",
+ .assoc = "\x8c\xe8\x2e\xef\xbe\xa0\xda\x3c"
+ "\x44\x69\x9e\xd7\xdb\x51\xb7\xd9",
+ .alen = 16,
+ .ptext = "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf",
+ .plen = 64,
+ .ctext = "\xc3\x0e\x32\xff\xed\xc0\x77\x4e"
+ "\x6a\xff\x6a\xf0\x86\x9f\x71\xaa"
+ "\x0f\x3a\xf0\x7a\x9a\x31\xa9\xc6"
+ "\x84\xdb\x20\x7e\xb0\xef\x8e\x4e"
+ "\x35\x90\x7a\xa6\x32\xc3\xff\xdf"
+ "\x86\x8b\xb7\xb2\x9d\x3d\x46\xad"
+ "\x83\xce\x9f\x9a\x10\x2e\xe9\x9d"
+ "\x49\xa5\x3e\x87\xf4\xc3\xda\x55"
+ "\x7a\x1b\xd4\x3c\xdb\x17\x95\xe2"
+ "\xe0\x93\xec\xc9\x9f\xf7\xce\xd8"
+ "\x3f\x54\xe2\x49\x39\xe3\x71\x25"
+ "\x2b\x6c\xe9\x5d\xec\xec\x2b\x64",
+ .clen = 64 + 32,
+ }, {
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x90\xd3\x82\xb4\x10\xee\xba\x7a"
+ "\xd9\x38\xc4\x6c\xec\x1a\x82\xbf",
+ .klen = 8 + 32 + 16,
+ .iv = "\xe4\x13\xa1\x15\xe9\x6b\xb8\x23"
+ "\x81\x7a\x94\x29\xab\xfd\xd2\x2c",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01"
+ "\xe9\x6e\x8c\x08\xab\x46\x57\x63"
+ "\xfd\x09\x8d\x45\xdd\x3f\xf8\x93",
+ .alen = 24,
+ .ptext = "\x08\x00\x0e\xbd\xa7\x0a\x00\x00"
+ "\x8e\x9c\x08\x3d\xb9\x5b\x07\x00"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x01\x02\x03\x04\x05\x06\x07\x08"
+ "\x09\x0a\x0b\x0c\x0d\x0e\x0e\x01",
+ .plen = 80,
+ .ctext = "\xf6\x63\xc2\x5d\x32\x5c\x18\xc6"
+ "\xa9\x45\x3e\x19\x4e\x12\x08\x49"
+ "\xa4\x87\x0b\x66\xcc\x6b\x99\x65"
+ "\x33\x00\x13\xb4\x89\x8d\xc8\x56"
+ "\xa4\x69\x9e\x52\x3a\x55\xdb\x08"
+ "\x0b\x59\xec\x3a\x8e\x4b\x7e\x52"
+ "\x77\x5b\x07\xd1\xdb\x34\xed\x9c"
+ "\x53\x8a\xb5\x0c\x55\x1b\x87\x4a"
+ "\xa2\x69\xad\xd0\x47\xad\x2d\x59"
+ "\x13\xac\x19\xb7\xcf\xba\xd4\xa6"
+ "\xbb\xd4\x0f\xbe\xa3\x3b\x4c\xb8"
+ "\x3a\xd2\xe1\x03\x86\xa5\x59\xb7"
+ "\x73\xc3\x46\x20\x2c\xb1\xef\x68"
+ "\xbb\x8a\x32\x7e\x12\x8c\x69\xcf",
+ .clen = 80 + 32,
+ }, {
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x18" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x8e\x73\xb0\xf7\xda\x0e\x64\x52"
+ "\xc8\x10\xf3\x2b\x80\x90\x79\xe5"
+ "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b",
+ .klen = 8 + 32 + 24,
+ .iv = "\x49\xca\x41\xc9\x6b\xbf\x6c\x98"
+ "\x38\x2f\xa7\x3d\x4d\x80\x49\xb0",
+ .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .alen = 16,
+ .ptext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .plen = 64,
+ .ctext = "\x4f\x02\x1d\xb2\x43\xbc\x63\x3d"
+ "\x71\x78\x18\x3a\x9f\xa0\x71\xe8"
+ "\xb4\xd9\xad\xa9\xad\x7d\xed\xf4"
+ "\xe5\xe7\x38\x76\x3f\x69\x14\x5a"
+ "\x57\x1b\x24\x20\x12\xfb\x7a\xe0"
+ "\x7f\xa9\xba\xac\x3d\xf1\x02\xe0"
+ "\x08\xb0\xe2\x79\x88\x59\x88\x81"
+ "\xd9\x20\xa9\xe6\x4f\x56\x15\xcd"
+ "\x2f\xee\x5f\xdb\x66\xfe\x79\x09"
+ "\x61\x81\x31\xea\x5b\x3d\x8e\xfb"
+ "\xca\x71\x85\x93\xf7\x85\x55\x8b"
+ "\x7a\xe4\x94\xca\x8b\xba\x19\x33",
+ .clen = 64 + 32,
+ }, {
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x20" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x60\x3d\xeb\x10\x15\xca\x71\xbe"
+ "\x2b\x73\xae\xf0\x85\x7d\x77\x81"
+ "\x1f\x35\x2c\x07\x3b\x61\x08\xd7"
+ "\x2d\x98\x10\xa3\x09\x14\xdf\xf4",
+ .klen = 8 + 32 + 32,
+ .iv = "\xdf\xab\xf2\x7c\xdc\xe0\x33\x4c"
+ "\xf9\x75\xaf\xf9\x2f\x60\x3a\x9b",
+ .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .alen = 16,
+ .ptext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .plen = 64,
+ .ctext = "\xf5\x8c\x4c\x04\xd6\xe5\xf1\xba"
+ "\x77\x9e\xab\xfb\x5f\x7b\xfb\xd6"
+ "\x9c\xfc\x4e\x96\x7e\xdb\x80\x8d"
+ "\x67\x9f\x77\x7b\xc6\x70\x2c\x7d"
+ "\x39\xf2\x33\x69\xa9\xd9\xba\xcf"
+ "\xa5\x30\xe2\x63\x04\x23\x14\x61"
+ "\xb2\xeb\x05\xe2\xc3\x9b\xe9\xfc"
+ "\xda\x6c\x19\x07\x8c\x6a\x9d\x1b"
+ "\x24\x29\xed\xc2\x31\x49\xdb\xb1"
+ "\x8f\x74\xbd\x17\x92\x03\xbe\x8f"
+ "\xf3\x61\xde\x1c\xe9\xdb\xcd\xd0"
+ "\xcc\xce\xe9\x85\x57\xcf\x6f\x5f",
+ .clen = 64 + 32,
+ },
+};
+
+static const char blake2_ordered_sequence[] =
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff";
+
+static const struct hash_testvec blake2b_160_tv_template[] = {{
+ .digest = (u8[]){ 0x33, 0x45, 0x52, 0x4a, 0xbf, 0x6b, 0xbe, 0x18,
+ 0x09, 0x44, 0x92, 0x24, 0xb5, 0x97, 0x2c, 0x41,
+ 0x79, 0x0b, 0x6c, 0xf2, },
+}, {
+ .plaintext = blake2_ordered_sequence,
+ .psize = 64,
+ .digest = (u8[]){ 0x11, 0xcc, 0x66, 0x61, 0xe9, 0x22, 0xb0, 0xe4,
+ 0x07, 0xe0, 0xa5, 0x72, 0x49, 0xc3, 0x8d, 0x4f,
+ 0xf7, 0x6d, 0x8e, 0xc8, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 1,
+ .digest = (u8[]){ 0x31, 0xe3, 0xd9, 0xd5, 0x4e, 0x72, 0xd8, 0x0b,
+ 0x2b, 0x3b, 0xd7, 0x6b, 0x82, 0x7a, 0x1d, 0xfb,
+ 0x56, 0x2f, 0x79, 0x4c, },
+}, {
+ .ksize = 64,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 7,
+ .digest = (u8[]){ 0x28, 0x20, 0xd1, 0xbe, 0x7f, 0xcc, 0xc1, 0x62,
+ 0xd9, 0x0d, 0x9a, 0x4b, 0x47, 0xd1, 0x5e, 0x04,
+ 0x74, 0x2a, 0x53, 0x17, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .plaintext = blake2_ordered_sequence,
+ .psize = 15,
+ .digest = (u8[]){ 0x45, 0xe9, 0x95, 0xb6, 0xc4, 0xe8, 0x22, 0xea,
+ 0xfe, 0xd2, 0x37, 0xdb, 0x46, 0xbf, 0xf1, 0x25,
+ 0xd5, 0x03, 0x1d, 0x81, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 247,
+ .digest = (u8[]){ 0x7e, 0xb9, 0xf2, 0x9b, 0x2f, 0xc2, 0x01, 0xd4,
+ 0xb0, 0x4f, 0x08, 0x2b, 0x8e, 0xbd, 0x06, 0xef,
+ 0x1c, 0xc4, 0x25, 0x95, },
+}, {
+ .ksize = 64,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 256,
+ .digest = (u8[]){ 0x6e, 0x35, 0x01, 0x70, 0xbf, 0xb6, 0xc4, 0xba,
+ 0x33, 0x1b, 0xa6, 0xd3, 0xc2, 0x5d, 0xb4, 0x03,
+ 0x95, 0xaf, 0x29, 0x16, },
+}};
+
+static const struct hash_testvec blake2b_256_tv_template[] = {{
+ .plaintext = blake2_ordered_sequence,
+ .psize = 7,
+ .digest = (u8[]){ 0x9d, 0xf1, 0x4b, 0x72, 0x48, 0x76, 0x4a, 0x86,
+ 0x91, 0x97, 0xc3, 0x5e, 0x39, 0x2d, 0x2a, 0x6d,
+ 0x6f, 0xdc, 0x5b, 0x79, 0xd5, 0x97, 0x29, 0x79,
+ 0x20, 0xfd, 0x3f, 0x14, 0x91, 0xb4, 0x42, 0xd2, },
+}, {
+ .plaintext = blake2_ordered_sequence,
+ .psize = 256,
+ .digest = (u8[]){ 0x39, 0xa7, 0xeb, 0x9f, 0xed, 0xc1, 0x9a, 0xab,
+ 0xc8, 0x34, 0x25, 0xc6, 0x75, 0x5d, 0xd9, 0x0e,
+ 0x6f, 0x9d, 0x0c, 0x80, 0x49, 0x64, 0xa1, 0xf4,
+ 0xaa, 0xee, 0xa3, 0xb9, 0xfb, 0x59, 0x98, 0x35, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .digest = (u8[]){ 0xc3, 0x08, 0xb1, 0xbf, 0xe4, 0xf9, 0xbc, 0xb4,
+ 0x75, 0xaf, 0x3f, 0x59, 0x6e, 0xae, 0xde, 0x6a,
+ 0xa3, 0x8e, 0xb5, 0x94, 0xad, 0x30, 0xf0, 0x17,
+ 0x1c, 0xfb, 0xd8, 0x3e, 0x8a, 0xbe, 0xed, 0x9c, },
+}, {
+ .ksize = 64,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 1,
+ .digest = (u8[]){ 0x34, 0x75, 0x8b, 0x64, 0x71, 0x35, 0x62, 0x82,
+ 0x97, 0xfb, 0x09, 0xc7, 0x93, 0x0c, 0xd0, 0x4e,
+ 0x95, 0x28, 0xe5, 0x66, 0x91, 0x12, 0xf5, 0xb1,
+ 0x31, 0x84, 0x93, 0xe1, 0x4d, 0xe7, 0x7e, 0x55, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 15,
+ .digest = (u8[]){ 0xce, 0x74, 0xa9, 0x2e, 0xe9, 0x40, 0x3d, 0xa2,
+ 0x11, 0x4a, 0x99, 0x25, 0x7a, 0x34, 0x5d, 0x35,
+ 0xdf, 0x6a, 0x48, 0x79, 0x2a, 0x93, 0x93, 0xff,
+ 0x1f, 0x3c, 0x39, 0xd0, 0x71, 0x1f, 0x20, 0x7b, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .plaintext = blake2_ordered_sequence,
+ .psize = 64,
+ .digest = (u8[]){ 0x2e, 0x84, 0xdb, 0xa2, 0x5f, 0x0e, 0xe9, 0x52,
+ 0x79, 0x50, 0x69, 0x9f, 0xf1, 0xfd, 0xfc, 0x9d,
+ 0x89, 0x83, 0xa9, 0xb6, 0xa4, 0xd5, 0xfa, 0xb5,
+ 0xbe, 0x35, 0x1a, 0x17, 0x8a, 0x2c, 0x7f, 0x7d, },
+}, {
+ .ksize = 64,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 247,
+ .digest = (u8[]){ 0x2e, 0x26, 0xf0, 0x09, 0x02, 0x65, 0x90, 0x09,
+ 0xcc, 0xf5, 0x4c, 0x44, 0x74, 0x0e, 0xa0, 0xa8,
+ 0x25, 0x4a, 0xda, 0x61, 0x56, 0x95, 0x7d, 0x3f,
+ 0x6d, 0xc0, 0x43, 0x17, 0x95, 0x89, 0xcd, 0x9d, },
+}};
+
+static const struct hash_testvec blake2b_384_tv_template[] = {{
+ .plaintext = blake2_ordered_sequence,
+ .psize = 1,
+ .digest = (u8[]){ 0xcc, 0x01, 0x08, 0x85, 0x36, 0xf7, 0x84, 0xf0,
+ 0xbb, 0x76, 0x9e, 0x41, 0xc4, 0x95, 0x7b, 0x6d,
+ 0x0c, 0xde, 0x1f, 0xcc, 0x8c, 0xf1, 0xd9, 0x1f,
+ 0xc4, 0x77, 0xd4, 0xdd, 0x6e, 0x3f, 0xbf, 0xcd,
+ 0x43, 0xd1, 0x69, 0x8d, 0x14, 0x6f, 0x34, 0x8b,
+ 0x2c, 0x36, 0xa3, 0x39, 0x68, 0x2b, 0xec, 0x3f, },
+}, {
+ .plaintext = blake2_ordered_sequence,
+ .psize = 247,
+ .digest = (u8[]){ 0xc8, 0xf8, 0xf0, 0xa2, 0x69, 0xfa, 0xcc, 0x4d,
+ 0x32, 0x5f, 0x13, 0x88, 0xca, 0x71, 0x99, 0x8f,
+ 0xf7, 0x30, 0x41, 0x5d, 0x6e, 0x34, 0xb7, 0x6e,
+ 0x3e, 0xd0, 0x46, 0xb6, 0xca, 0x30, 0x66, 0xb2,
+ 0x6f, 0x0c, 0x35, 0x54, 0x17, 0xcd, 0x26, 0x1b,
+ 0xef, 0x48, 0x98, 0xe0, 0x56, 0x7c, 0x05, 0xd2, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .digest = (u8[]){ 0x15, 0x09, 0x7a, 0x90, 0x13, 0x23, 0xab, 0x0c,
+ 0x0b, 0x43, 0x21, 0x9a, 0xb5, 0xc6, 0x0c, 0x2e,
+ 0x7c, 0x57, 0xfc, 0xcc, 0x4b, 0x0f, 0xf0, 0x57,
+ 0xb7, 0x9c, 0xe7, 0x0f, 0xe1, 0x57, 0xac, 0x37,
+ 0x77, 0xd4, 0xf4, 0x2f, 0x03, 0x3b, 0x64, 0x09,
+ 0x84, 0xa0, 0xb3, 0x24, 0xb7, 0xae, 0x47, 0x5e, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .plaintext = blake2_ordered_sequence,
+ .psize = 7,
+ .digest = (u8[]){ 0x0b, 0x82, 0x88, 0xca, 0x05, 0x2f, 0x1b, 0x15,
+ 0xdc, 0xbb, 0x22, 0x27, 0x11, 0x6b, 0xf4, 0xd1,
+ 0xe9, 0x8f, 0x1b, 0x0b, 0x58, 0x3f, 0x5e, 0x86,
+ 0x80, 0x82, 0x6f, 0x8e, 0x54, 0xc1, 0x9f, 0x12,
+ 0xcf, 0xe9, 0x56, 0xc1, 0xfc, 0x1a, 0x08, 0xb9,
+ 0x4a, 0x57, 0x0a, 0x76, 0x3c, 0x15, 0x33, 0x18, },
+}, {
+ .ksize = 64,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 15,
+ .digest = (u8[]){ 0x4a, 0x81, 0x55, 0xb9, 0x79, 0x42, 0x8c, 0xc6,
+ 0x4f, 0xfe, 0xca, 0x82, 0x3b, 0xb2, 0xf7, 0xbc,
+ 0x5e, 0xfc, 0xab, 0x09, 0x1c, 0xd6, 0x3b, 0xe1,
+ 0x50, 0x82, 0x3b, 0xde, 0xc7, 0x06, 0xee, 0x3b,
+ 0x29, 0xce, 0xe5, 0x68, 0xe0, 0xff, 0xfa, 0xe1,
+ 0x7a, 0xf1, 0xc0, 0xfe, 0x57, 0xf4, 0x60, 0x49, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 64,
+ .digest = (u8[]){ 0x34, 0xbd, 0xe1, 0x99, 0x43, 0x9f, 0x82, 0x72,
+ 0xe7, 0xed, 0x94, 0x9e, 0xe1, 0x84, 0xee, 0x82,
+ 0xfd, 0x26, 0x23, 0xc4, 0x17, 0x8d, 0xf5, 0x04,
+ 0xeb, 0xb7, 0xbc, 0xb8, 0xf3, 0x68, 0xb7, 0xad,
+ 0x94, 0x8e, 0x05, 0x3f, 0x8a, 0x5d, 0x8d, 0x81,
+ 0x3e, 0x88, 0xa7, 0x8c, 0xa2, 0xd5, 0xdc, 0x76, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .plaintext = blake2_ordered_sequence,
+ .psize = 256,
+ .digest = (u8[]){ 0x22, 0x14, 0xf4, 0xb0, 0x4c, 0xa8, 0xb5, 0x7d,
+ 0xa7, 0x5c, 0x04, 0xeb, 0xd8, 0x8d, 0x04, 0x71,
+ 0xc7, 0x3c, 0xc7, 0x6e, 0x8b, 0x20, 0x36, 0x40,
+ 0x9d, 0xd0, 0x60, 0xc6, 0xe3, 0x0b, 0x6e, 0x50,
+ 0xf5, 0xaf, 0xf5, 0xc6, 0x3b, 0xe3, 0x84, 0x6a,
+ 0x93, 0x1b, 0x12, 0xd6, 0x18, 0x27, 0xba, 0x36, },
+}};
+
+static const struct hash_testvec blake2b_512_tv_template[] = {{
+ .plaintext = blake2_ordered_sequence,
+ .psize = 15,
+ .digest = (u8[]){ 0x44, 0x4b, 0x24, 0x0f, 0xe3, 0xed, 0x86, 0xd0,
+ 0xe2, 0xef, 0x4c, 0xe7, 0xd8, 0x51, 0xed, 0xde,
+ 0x22, 0x15, 0x55, 0x82, 0xaa, 0x09, 0x14, 0x79,
+ 0x7b, 0x72, 0x6c, 0xd0, 0x58, 0xb6, 0xf4, 0x59,
+ 0x32, 0xe0, 0xe1, 0x29, 0x51, 0x68, 0x76, 0x52,
+ 0x7b, 0x1d, 0xd8, 0x8f, 0xc6, 0x6d, 0x71, 0x19,
+ 0xf4, 0xab, 0x3b, 0xed, 0x93, 0xa6, 0x1a, 0x0e,
+ 0x2d, 0x2d, 0x2a, 0xea, 0xc3, 0x36, 0xd9, 0x58, },
+}, {
+ .ksize = 64,
+ .key = blake2_ordered_sequence,
+ .digest = (u8[]){ 0x10, 0xeb, 0xb6, 0x77, 0x00, 0xb1, 0x86, 0x8e,
+ 0xfb, 0x44, 0x17, 0x98, 0x7a, 0xcf, 0x46, 0x90,
+ 0xae, 0x9d, 0x97, 0x2f, 0xb7, 0xa5, 0x90, 0xc2,
+ 0xf0, 0x28, 0x71, 0x79, 0x9a, 0xaa, 0x47, 0x86,
+ 0xb5, 0xe9, 0x96, 0xe8, 0xf0, 0xf4, 0xeb, 0x98,
+ 0x1f, 0xc2, 0x14, 0xb0, 0x05, 0xf4, 0x2d, 0x2f,
+ 0xf4, 0x23, 0x34, 0x99, 0x39, 0x16, 0x53, 0xdf,
+ 0x7a, 0xef, 0xcb, 0xc1, 0x3f, 0xc5, 0x15, 0x68, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .plaintext = blake2_ordered_sequence,
+ .psize = 1,
+ .digest = (u8[]){ 0xd2, 0x11, 0x31, 0x29, 0x3f, 0xea, 0xca, 0x72,
+ 0x21, 0xe4, 0x06, 0x65, 0x05, 0x2a, 0xd1, 0x02,
+ 0xc0, 0x8d, 0x7b, 0xf1, 0x09, 0x3c, 0xef, 0x88,
+ 0xe1, 0x68, 0x0c, 0xf1, 0x3b, 0xa4, 0xe3, 0x03,
+ 0xed, 0xa0, 0xe3, 0x60, 0x58, 0xa0, 0xdb, 0x52,
+ 0x8a, 0x66, 0x43, 0x09, 0x60, 0x1a, 0xbb, 0x67,
+ 0xc5, 0x84, 0x31, 0x40, 0xfa, 0xde, 0xc1, 0xd0,
+ 0xff, 0x3f, 0x4a, 0x69, 0xd9, 0x92, 0x26, 0x86, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 7,
+ .digest = (u8[]){ 0xa3, 0x3e, 0x50, 0xbc, 0xfb, 0xd9, 0xf0, 0x82,
+ 0xa6, 0xd1, 0xdf, 0xaf, 0x82, 0xd0, 0xcf, 0x84,
+ 0x9a, 0x25, 0x3c, 0xae, 0x6d, 0xb5, 0xaf, 0x01,
+ 0xd7, 0xaf, 0xed, 0x50, 0xdc, 0xe2, 0xba, 0xcc,
+ 0x8c, 0x38, 0xf5, 0x16, 0x89, 0x38, 0x86, 0xce,
+ 0x68, 0x10, 0x63, 0x64, 0xa5, 0x79, 0x53, 0xb5,
+ 0x2e, 0x8e, 0xbc, 0x0a, 0xce, 0x95, 0xc0, 0x1e,
+ 0x69, 0x59, 0x1d, 0x3b, 0xd8, 0x19, 0x90, 0xd7, },
+}, {
+ .ksize = 64,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 64,
+ .digest = (u8[]){ 0x65, 0x67, 0x6d, 0x80, 0x06, 0x17, 0x97, 0x2f,
+ 0xbd, 0x87, 0xe4, 0xb9, 0x51, 0x4e, 0x1c, 0x67,
+ 0x40, 0x2b, 0x7a, 0x33, 0x10, 0x96, 0xd3, 0xbf,
+ 0xac, 0x22, 0xf1, 0xab, 0xb9, 0x53, 0x74, 0xab,
+ 0xc9, 0x42, 0xf1, 0x6e, 0x9a, 0xb0, 0xea, 0xd3,
+ 0x3b, 0x87, 0xc9, 0x19, 0x68, 0xa6, 0xe5, 0x09,
+ 0xe1, 0x19, 0xff, 0x07, 0x78, 0x7b, 0x3e, 0xf4,
+ 0x83, 0xe1, 0xdc, 0xdc, 0xcf, 0x6e, 0x30, 0x22, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .plaintext = blake2_ordered_sequence,
+ .psize = 247,
+ .digest = (u8[]){ 0xc2, 0x96, 0x2c, 0x6b, 0x84, 0xff, 0xee, 0xea,
+ 0x9b, 0xb8, 0x55, 0x2d, 0x6b, 0xa5, 0xd5, 0xe5,
+ 0xbd, 0xb1, 0x54, 0xb6, 0x1e, 0xfb, 0x63, 0x16,
+ 0x6e, 0x22, 0x04, 0xf0, 0x82, 0x7a, 0xc6, 0x99,
+ 0xf7, 0x4c, 0xff, 0x93, 0x71, 0x57, 0x64, 0xd0,
+ 0x08, 0x60, 0x39, 0x98, 0xb8, 0xd2, 0x2b, 0x4e,
+ 0x81, 0x8d, 0xe4, 0x8f, 0xb2, 0x1e, 0x8f, 0x99,
+ 0x98, 0xf1, 0x02, 0x9b, 0x4c, 0x7c, 0x97, 0x1a, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 256,
+ .digest = (u8[]){ 0x0f, 0x32, 0x05, 0x09, 0xad, 0x9f, 0x25, 0xf7,
+ 0xf2, 0x00, 0x71, 0xc9, 0x9f, 0x08, 0x58, 0xd1,
+ 0x67, 0xc3, 0xa6, 0x2c, 0x0d, 0xe5, 0x7c, 0x15,
+ 0x35, 0x18, 0x5a, 0x68, 0xc1, 0xca, 0x1c, 0x6e,
+ 0x0f, 0xc4, 0xf6, 0x0c, 0x43, 0xe1, 0xb4, 0x3d,
+ 0x28, 0xe4, 0xc7, 0xa1, 0xcf, 0x6b, 0x17, 0x4e,
+ 0xf1, 0x5b, 0xb5, 0x53, 0xd4, 0xa7, 0xd0, 0x5b,
+ 0xae, 0x15, 0x81, 0x15, 0xd0, 0x88, 0xa0, 0x3c, },
+}};
+
+static const struct hash_testvec blakes2s_128_tv_template[] = {{
+ .digest = (u8[]){ 0x64, 0x55, 0x0d, 0x6f, 0xfe, 0x2c, 0x0a, 0x01,
+ 0xa1, 0x4a, 0xba, 0x1e, 0xad, 0xe0, 0x20, 0x0c, },
+}, {
+ .plaintext = blake2_ordered_sequence,
+ .psize = 64,
+ .digest = (u8[]){ 0xdc, 0x66, 0xca, 0x8f, 0x03, 0x86, 0x58, 0x01,
+ 0xb0, 0xff, 0xe0, 0x6e, 0xd8, 0xa1, 0xa9, 0x0e, },
+}, {
+ .ksize = 16,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 1,
+ .digest = (u8[]){ 0x88, 0x1e, 0x42, 0xe7, 0xbb, 0x35, 0x80, 0x82,
+ 0x63, 0x7c, 0x0a, 0x0f, 0xd7, 0xec, 0x6c, 0x2f, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 7,
+ .digest = (u8[]){ 0xcf, 0x9e, 0x07, 0x2a, 0xd5, 0x22, 0xf2, 0xcd,
+ 0xa2, 0xd8, 0x25, 0x21, 0x80, 0x86, 0x73, 0x1c, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .plaintext = blake2_ordered_sequence,
+ .psize = 15,
+ .digest = (u8[]){ 0xf6, 0x33, 0x5a, 0x2c, 0x22, 0xa0, 0x64, 0xb2,
+ 0xb6, 0x3f, 0xeb, 0xbc, 0xd1, 0xc3, 0xe5, 0xb2, },
+}, {
+ .ksize = 16,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 247,
+ .digest = (u8[]){ 0x72, 0x66, 0x49, 0x60, 0xf9, 0x4a, 0xea, 0xbe,
+ 0x1f, 0xf4, 0x60, 0xce, 0xb7, 0x81, 0xcb, 0x09, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 256,
+ .digest = (u8[]){ 0xd5, 0xa4, 0x0e, 0xc3, 0x16, 0xc7, 0x51, 0xa6,
+ 0x3c, 0xd0, 0xd9, 0x11, 0x57, 0xfa, 0x1e, 0xbb, },
+}};
+
+static const struct hash_testvec blakes2s_160_tv_template[] = {{
+ .plaintext = blake2_ordered_sequence,
+ .psize = 7,
+ .digest = (u8[]){ 0xb4, 0xf2, 0x03, 0x49, 0x37, 0xed, 0xb1, 0x3e,
+ 0x5b, 0x2a, 0xca, 0x64, 0x82, 0x74, 0xf6, 0x62,
+ 0xe3, 0xf2, 0x84, 0xff, },
+}, {
+ .plaintext = blake2_ordered_sequence,
+ .psize = 256,
+ .digest = (u8[]){ 0xaa, 0x56, 0x9b, 0xdc, 0x98, 0x17, 0x75, 0xf2,
+ 0xb3, 0x68, 0x83, 0xb7, 0x9b, 0x8d, 0x48, 0xb1,
+ 0x9b, 0x2d, 0x35, 0x05, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .digest = (u8[]){ 0x50, 0x16, 0xe7, 0x0c, 0x01, 0xd0, 0xd3, 0xc3,
+ 0xf4, 0x3e, 0xb1, 0x6e, 0x97, 0xa9, 0x4e, 0xd1,
+ 0x79, 0x65, 0x32, 0x93, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 1,
+ .digest = (u8[]){ 0x1c, 0x2b, 0xcd, 0x9a, 0x68, 0xca, 0x8c, 0x71,
+ 0x90, 0x29, 0x6c, 0x54, 0xfa, 0x56, 0x4a, 0xef,
+ 0xa2, 0x3a, 0x56, 0x9c, },
+}, {
+ .ksize = 16,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 15,
+ .digest = (u8[]){ 0x36, 0xc3, 0x5f, 0x9a, 0xdc, 0x7e, 0xbf, 0x19,
+ 0x68, 0xaa, 0xca, 0xd8, 0x81, 0xbf, 0x09, 0x34,
+ 0x83, 0x39, 0x0f, 0x30, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .plaintext = blake2_ordered_sequence,
+ .psize = 64,
+ .digest = (u8[]){ 0x86, 0x80, 0x78, 0xa4, 0x14, 0xec, 0x03, 0xe5,
+ 0xb6, 0x9a, 0x52, 0x0e, 0x42, 0xee, 0x39, 0x9d,
+ 0xac, 0xa6, 0x81, 0x63, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 247,
+ .digest = (u8[]){ 0x2d, 0xd8, 0xd2, 0x53, 0x66, 0xfa, 0xa9, 0x01,
+ 0x1c, 0x9c, 0xaf, 0xa3, 0xe2, 0x9d, 0x9b, 0x10,
+ 0x0a, 0xf6, 0x73, 0xe8, },
+}};
+
+static const struct hash_testvec blakes2s_224_tv_template[] = {{
+ .plaintext = blake2_ordered_sequence,
+ .psize = 1,
+ .digest = (u8[]){ 0x61, 0xb9, 0x4e, 0xc9, 0x46, 0x22, 0xa3, 0x91,
+ 0xd2, 0xae, 0x42, 0xe6, 0x45, 0x6c, 0x90, 0x12,
+ 0xd5, 0x80, 0x07, 0x97, 0xb8, 0x86, 0x5a, 0xfc,
+ 0x48, 0x21, 0x97, 0xbb, },
+}, {
+ .plaintext = blake2_ordered_sequence,
+ .psize = 247,
+ .digest = (u8[]){ 0x9e, 0xda, 0xc7, 0x20, 0x2c, 0xd8, 0x48, 0x2e,
+ 0x31, 0x94, 0xab, 0x46, 0x6d, 0x94, 0xd8, 0xb4,
+ 0x69, 0xcd, 0xae, 0x19, 0x6d, 0x9e, 0x41, 0xcc,
+ 0x2b, 0xa4, 0xd5, 0xf6, },
+}, {
+ .ksize = 16,
+ .key = blake2_ordered_sequence,
+ .digest = (u8[]){ 0x32, 0xc0, 0xac, 0xf4, 0x3b, 0xd3, 0x07, 0x9f,
+ 0xbe, 0xfb, 0xfa, 0x4d, 0x6b, 0x4e, 0x56, 0xb3,
+ 0xaa, 0xd3, 0x27, 0xf6, 0x14, 0xbf, 0xb9, 0x32,
+ 0xa7, 0x19, 0xfc, 0xb8, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .plaintext = blake2_ordered_sequence,
+ .psize = 7,
+ .digest = (u8[]){ 0x73, 0xad, 0x5e, 0x6d, 0xb9, 0x02, 0x8e, 0x76,
+ 0xf2, 0x66, 0x42, 0x4b, 0x4c, 0xfa, 0x1f, 0xe6,
+ 0x2e, 0x56, 0x40, 0xe5, 0xa2, 0xb0, 0x3c, 0xe8,
+ 0x7b, 0x45, 0xfe, 0x05, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 15,
+ .digest = (u8[]){ 0x16, 0x60, 0xfb, 0x92, 0x54, 0xb3, 0x6e, 0x36,
+ 0x81, 0xf4, 0x16, 0x41, 0xc3, 0x3d, 0xd3, 0x43,
+ 0x84, 0xed, 0x10, 0x6f, 0x65, 0x80, 0x7a, 0x3e,
+ 0x25, 0xab, 0xc5, 0x02, },
+}, {
+ .ksize = 16,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 64,
+ .digest = (u8[]){ 0xca, 0xaa, 0x39, 0x67, 0x9c, 0xf7, 0x6b, 0xc7,
+ 0xb6, 0x82, 0xca, 0x0e, 0x65, 0x36, 0x5b, 0x7c,
+ 0x24, 0x00, 0xfa, 0x5f, 0xda, 0x06, 0x91, 0x93,
+ 0x6a, 0x31, 0x83, 0xb5, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .plaintext = blake2_ordered_sequence,
+ .psize = 256,
+ .digest = (u8[]){ 0x90, 0x02, 0x26, 0xb5, 0x06, 0x9c, 0x36, 0x86,
+ 0x94, 0x91, 0x90, 0x1e, 0x7d, 0x2a, 0x71, 0xb2,
+ 0x48, 0xb5, 0xe8, 0x16, 0xfd, 0x64, 0x33, 0x45,
+ 0xb3, 0xd7, 0xec, 0xcc, },
+}};
+
+static const struct hash_testvec blakes2s_256_tv_template[] = {{
+ .plaintext = blake2_ordered_sequence,
+ .psize = 15,
+ .digest = (u8[]){ 0xd9, 0x7c, 0x82, 0x8d, 0x81, 0x82, 0xa7, 0x21,
+ 0x80, 0xa0, 0x6a, 0x78, 0x26, 0x83, 0x30, 0x67,
+ 0x3f, 0x7c, 0x4e, 0x06, 0x35, 0x94, 0x7c, 0x04,
+ 0xc0, 0x23, 0x23, 0xfd, 0x45, 0xc0, 0xa5, 0x2d, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .digest = (u8[]){ 0x48, 0xa8, 0x99, 0x7d, 0xa4, 0x07, 0x87, 0x6b,
+ 0x3d, 0x79, 0xc0, 0xd9, 0x23, 0x25, 0xad, 0x3b,
+ 0x89, 0xcb, 0xb7, 0x54, 0xd8, 0x6a, 0xb7, 0x1a,
+ 0xee, 0x04, 0x7a, 0xd3, 0x45, 0xfd, 0x2c, 0x49, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .plaintext = blake2_ordered_sequence,
+ .psize = 1,
+ .digest = (u8[]){ 0x22, 0x27, 0xae, 0xaa, 0x6e, 0x81, 0x56, 0x03,
+ 0xa7, 0xe3, 0xa1, 0x18, 0xa5, 0x9a, 0x2c, 0x18,
+ 0xf4, 0x63, 0xbc, 0x16, 0x70, 0xf1, 0xe7, 0x4b,
+ 0x00, 0x6d, 0x66, 0x16, 0xae, 0x9e, 0x74, 0x4e, },
+}, {
+ .ksize = 16,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 7,
+ .digest = (u8[]){ 0x58, 0x5d, 0xa8, 0x60, 0x1c, 0xa4, 0xd8, 0x03,
+ 0x86, 0x86, 0x84, 0x64, 0xd7, 0xa0, 0x8e, 0x15,
+ 0x2f, 0x05, 0xa2, 0x1b, 0xbc, 0xef, 0x7a, 0x34,
+ 0xb3, 0xc5, 0xbc, 0x4b, 0xf0, 0x32, 0xeb, 0x12, },
+}, {
+ .ksize = 32,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 64,
+ .digest = (u8[]){ 0x89, 0x75, 0xb0, 0x57, 0x7f, 0xd3, 0x55, 0x66,
+ 0xd7, 0x50, 0xb3, 0x62, 0xb0, 0x89, 0x7a, 0x26,
+ 0xc3, 0x99, 0x13, 0x6d, 0xf0, 0x7b, 0xab, 0xab,
+ 0xbd, 0xe6, 0x20, 0x3f, 0xf2, 0x95, 0x4e, 0xd4, },
+}, {
+ .ksize = 1,
+ .key = "B",
+ .plaintext = blake2_ordered_sequence,
+ .psize = 247,
+ .digest = (u8[]){ 0x2e, 0x74, 0x1c, 0x1d, 0x03, 0xf4, 0x9d, 0x84,
+ 0x6f, 0xfc, 0x86, 0x32, 0x92, 0x49, 0x7e, 0x66,
+ 0xd7, 0xc3, 0x10, 0x88, 0xfe, 0x28, 0xb3, 0xe0,
+ 0xbf, 0x50, 0x75, 0xad, 0x8e, 0xa4, 0xe6, 0xb2, },
+}, {
+ .ksize = 16,
+ .key = blake2_ordered_sequence,
+ .plaintext = blake2_ordered_sequence,
+ .psize = 256,
+ .digest = (u8[]){ 0xb9, 0xd2, 0x81, 0x0e, 0x3a, 0xb1, 0x62, 0x9b,
+ 0xad, 0x44, 0x05, 0xf4, 0x92, 0x2e, 0x99, 0xc1,
+ 0x4a, 0x47, 0xbb, 0x5b, 0x6f, 0xb2, 0x96, 0xed,
+ 0xd5, 0x06, 0xb5, 0x3a, 0x7c, 0x7a, 0x65, 0x1d, },
+}};
+
#endif /* _CRYPTO_TESTMGR_H */
diff --git a/crypto/tgr192.c b/crypto/tgr192.c
index 052648e24909..aa29c529b44e 100644
--- a/crypto/tgr192.c
+++ b/crypto/tgr192.c
@@ -555,7 +555,7 @@ static int tgr192_final(struct shash_desc *desc, u8 * out)
__le32 *le32p;
u32 t, msb, lsb;
- tgr192_update(desc, NULL, 0); /* flush */ ;
+ tgr192_update(desc, NULL, 0); /* flush */
msb = 0;
t = tctx->nblocks;
@@ -583,7 +583,7 @@ static int tgr192_final(struct shash_desc *desc, u8 * out)
while (tctx->count < 64) {
tctx->hash[tctx->count++] = 0;
}
- tgr192_update(desc, NULL, 0); /* flush */ ;
+ tgr192_update(desc, NULL, 0); /* flush */
memset(tctx->hash, 0, 56); /* fill next block with zeroes */
}
/* append the 64 bit count */
diff --git a/crypto/twofish_common.c b/crypto/twofish_common.c
index 222fc765c57a..d23fa531b91f 100644
--- a/crypto/twofish_common.c
+++ b/crypto/twofish_common.c
@@ -567,7 +567,7 @@ static const u8 calc_sb_tbl[512] = {
/* Perform the key setup. */
int __twofish_setkey(struct twofish_ctx *ctx, const u8 *key,
- unsigned int key_len, u32 *flags)
+ unsigned int key_len)
{
int i, j, k;
@@ -584,10 +584,7 @@ int __twofish_setkey(struct twofish_ctx *ctx, const u8 *key,
/* Check key length. */
if (key_len % 8)
- {
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
return -EINVAL; /* unsupported key length */
- }
/* Compute the first two words of the S vector. The magic numbers are
* the entries of the RS matrix, preprocessed through poly_to_exp. The
@@ -688,8 +685,7 @@ EXPORT_SYMBOL_GPL(__twofish_setkey);
int twofish_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int key_len)
{
- return __twofish_setkey(crypto_tfm_ctx(tfm), key, key_len,
- &tfm->crt_flags);
+ return __twofish_setkey(crypto_tfm_ctx(tfm), key, key_len);
}
EXPORT_SYMBOL_GPL(twofish_setkey);
diff --git a/crypto/vmac.c b/crypto/vmac.c
index f50a85060b39..2d906830df96 100644
--- a/crypto/vmac.c
+++ b/crypto/vmac.c
@@ -435,10 +435,8 @@ static int vmac_setkey(struct crypto_shash *tfm,
unsigned int i;
int err;
- if (keylen != VMAC_KEY_LEN) {
- crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ if (keylen != VMAC_KEY_LEN)
return -EINVAL;
- }
err = crypto_cipher_setkey(tctx->cipher, key, keylen);
if (err)
@@ -598,7 +596,7 @@ static int vmac_final(struct shash_desc *desc, u8 *out)
static int vmac_init_tfm(struct crypto_tfm *tfm)
{
struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
- struct crypto_spawn *spawn = crypto_instance_ctx(inst);
+ struct crypto_cipher_spawn *spawn = crypto_instance_ctx(inst);
struct vmac_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
struct crypto_cipher *cipher;
@@ -620,6 +618,7 @@ static void vmac_exit_tfm(struct crypto_tfm *tfm)
static int vmac_create(struct crypto_template *tmpl, struct rtattr **tb)
{
struct shash_instance *inst;
+ struct crypto_cipher_spawn *spawn;
struct crypto_alg *alg;
int err;
@@ -627,25 +626,24 @@ static int vmac_create(struct crypto_template *tmpl, struct rtattr **tb)
if (err)
return err;
- alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
- CRYPTO_ALG_TYPE_MASK);
- if (IS_ERR(alg))
- return PTR_ERR(alg);
+ inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+ if (!inst)
+ return -ENOMEM;
+ spawn = shash_instance_ctx(inst);
+
+ err = crypto_grab_cipher(spawn, shash_crypto_instance(inst),
+ crypto_attr_alg_name(tb[1]), 0, 0);
+ if (err)
+ goto err_free_inst;
+ alg = crypto_spawn_cipher_alg(spawn);
err = -EINVAL;
if (alg->cra_blocksize != VMAC_NONCEBYTES)
- goto out_put_alg;
+ goto err_free_inst;
- inst = shash_alloc_instance(tmpl->name, alg);
- err = PTR_ERR(inst);
- if (IS_ERR(inst))
- goto out_put_alg;
-
- err = crypto_init_spawn(shash_instance_ctx(inst), alg,
- shash_crypto_instance(inst),
- CRYPTO_ALG_TYPE_MASK);
+ err = crypto_inst_setname(shash_crypto_instance(inst), tmpl->name, alg);
if (err)
- goto out_free_inst;
+ goto err_free_inst;
inst->alg.base.cra_priority = alg->cra_priority;
inst->alg.base.cra_blocksize = alg->cra_blocksize;
@@ -662,21 +660,19 @@ static int vmac_create(struct crypto_template *tmpl, struct rtattr **tb)
inst->alg.final = vmac_final;
inst->alg.setkey = vmac_setkey;
+ inst->free = shash_free_singlespawn_instance;
+
err = shash_register_instance(tmpl, inst);
if (err) {
-out_free_inst:
- shash_free_instance(shash_crypto_instance(inst));
+err_free_inst:
+ shash_free_singlespawn_instance(inst);
}
-
-out_put_alg:
- crypto_mod_put(alg);
return err;
}
static struct crypto_template vmac64_tmpl = {
.name = "vmac64",
.create = vmac_create,
- .free = shash_free_instance,
.module = THIS_MODULE,
};
diff --git a/crypto/xcbc.c b/crypto/xcbc.c
index 0bb26e8f6f5a..598ec88abf0f 100644
--- a/crypto/xcbc.c
+++ b/crypto/xcbc.c
@@ -167,7 +167,7 @@ static int xcbc_init_tfm(struct crypto_tfm *tfm)
{
struct crypto_cipher *cipher;
struct crypto_instance *inst = (void *)tfm->__crt_alg;
- struct crypto_spawn *spawn = crypto_instance_ctx(inst);
+ struct crypto_cipher_spawn *spawn = crypto_instance_ctx(inst);
struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
cipher = crypto_spawn_cipher(spawn);
@@ -188,6 +188,7 @@ static void xcbc_exit_tfm(struct crypto_tfm *tfm)
static int xcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
{
struct shash_instance *inst;
+ struct crypto_cipher_spawn *spawn;
struct crypto_alg *alg;
unsigned long alignmask;
int err;
@@ -196,28 +197,24 @@ static int xcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
if (err)
return err;
- alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
- CRYPTO_ALG_TYPE_MASK);
- if (IS_ERR(alg))
- return PTR_ERR(alg);
+ inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+ if (!inst)
+ return -ENOMEM;
+ spawn = shash_instance_ctx(inst);
- switch(alg->cra_blocksize) {
- case XCBC_BLOCKSIZE:
- break;
- default:
- goto out_put_alg;
- }
+ err = crypto_grab_cipher(spawn, shash_crypto_instance(inst),
+ crypto_attr_alg_name(tb[1]), 0, 0);
+ if (err)
+ goto err_free_inst;
+ alg = crypto_spawn_cipher_alg(spawn);
- inst = shash_alloc_instance("xcbc", alg);
- err = PTR_ERR(inst);
- if (IS_ERR(inst))
- goto out_put_alg;
+ err = -EINVAL;
+ if (alg->cra_blocksize != XCBC_BLOCKSIZE)
+ goto err_free_inst;
- err = crypto_init_spawn(shash_instance_ctx(inst), alg,
- shash_crypto_instance(inst),
- CRYPTO_ALG_TYPE_MASK);
+ err = crypto_inst_setname(shash_crypto_instance(inst), tmpl->name, alg);
if (err)
- goto out_free_inst;
+ goto err_free_inst;
alignmask = alg->cra_alignmask | 3;
inst->alg.base.cra_alignmask = alignmask;
@@ -242,21 +239,19 @@ static int xcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
inst->alg.final = crypto_xcbc_digest_final;
inst->alg.setkey = crypto_xcbc_digest_setkey;
+ inst->free = shash_free_singlespawn_instance;
+
err = shash_register_instance(tmpl, inst);
if (err) {
-out_free_inst:
- shash_free_instance(shash_crypto_instance(inst));
+err_free_inst:
+ shash_free_singlespawn_instance(inst);
}
-
-out_put_alg:
- crypto_mod_put(alg);
return err;
}
static struct crypto_template crypto_xcbc_tmpl = {
.name = "xcbc",
.create = xcbc_create,
- .free = shash_free_instance,
.module = THIS_MODULE,
};
diff --git a/crypto/xts.c b/crypto/xts.c
index 11211003db7e..29efa15f1495 100644
--- a/crypto/xts.c
+++ b/crypto/xts.c
@@ -1,8 +1,6 @@
// SPDX-License-Identifier: GPL-2.0-or-later
/* XTS: as defined in IEEE1619/D16
* http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
- * (sector sizes which are not a multiple of 16 bytes are,
- * however currently unsupported)
*
* Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
*
@@ -34,6 +32,8 @@ struct xts_instance_ctx {
struct rctx {
le128 t;
+ struct scatterlist *tail;
+ struct scatterlist sg[2];
struct skcipher_request subreq;
};
@@ -61,8 +61,6 @@ static int setkey(struct crypto_skcipher *parent, const u8 *key,
crypto_cipher_set_flags(tweak, crypto_skcipher_get_flags(parent) &
CRYPTO_TFM_REQ_MASK);
err = crypto_cipher_setkey(tweak, key + keylen, keylen);
- crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(tweak) &
- CRYPTO_TFM_RES_MASK);
if (err)
return err;
@@ -71,11 +69,7 @@ static int setkey(struct crypto_skcipher *parent, const u8 *key,
crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_skcipher_setkey(child, key, keylen);
- crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
-
- return err;
+ return crypto_skcipher_setkey(child, key, keylen);
}
/*
@@ -84,10 +78,11 @@ static int setkey(struct crypto_skcipher *parent, const u8 *key,
* mutliple calls to the 'ecb(..)' instance, which usually would be slower than
* just doing the gf128mul_x_ble() calls again.
*/
-static int xor_tweak(struct skcipher_request *req, bool second_pass)
+static int xor_tweak(struct skcipher_request *req, bool second_pass, bool enc)
{
struct rctx *rctx = skcipher_request_ctx(req);
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
+ const bool cts = (req->cryptlen % XTS_BLOCK_SIZE);
const int bs = XTS_BLOCK_SIZE;
struct skcipher_walk w;
le128 t = rctx->t;
@@ -109,6 +104,20 @@ static int xor_tweak(struct skcipher_request *req, bool second_pass)
wdst = w.dst.virt.addr;
do {
+ if (unlikely(cts) &&
+ w.total - w.nbytes + avail < 2 * XTS_BLOCK_SIZE) {
+ if (!enc) {
+ if (second_pass)
+ rctx->t = t;
+ gf128mul_x_ble(&t, &t);
+ }
+ le128_xor(wdst, &t, wsrc);
+ if (enc && second_pass)
+ gf128mul_x_ble(&rctx->t, &t);
+ skcipher_walk_done(&w, avail - bs);
+ return 0;
+ }
+
le128_xor(wdst++, &t, wsrc++);
gf128mul_x_ble(&t, &t);
} while ((avail -= bs) >= bs);
@@ -119,17 +128,91 @@ static int xor_tweak(struct skcipher_request *req, bool second_pass)
return err;
}
-static int xor_tweak_pre(struct skcipher_request *req)
+static int xor_tweak_pre(struct skcipher_request *req, bool enc)
+{
+ return xor_tweak(req, false, enc);
+}
+
+static int xor_tweak_post(struct skcipher_request *req, bool enc)
+{
+ return xor_tweak(req, true, enc);
+}
+
+static void cts_done(struct crypto_async_request *areq, int err)
{
- return xor_tweak(req, false);
+ struct skcipher_request *req = areq->data;
+ le128 b;
+
+ if (!err) {
+ struct rctx *rctx = skcipher_request_ctx(req);
+
+ scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
+ le128_xor(&b, &rctx->t, &b);
+ scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 1);
+ }
+
+ skcipher_request_complete(req, err);
+}
+
+static int cts_final(struct skcipher_request *req,
+ int (*crypt)(struct skcipher_request *req))
+{
+ struct priv *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
+ int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1);
+ struct rctx *rctx = skcipher_request_ctx(req);
+ struct skcipher_request *subreq = &rctx->subreq;
+ int tail = req->cryptlen % XTS_BLOCK_SIZE;
+ le128 b[2];
+ int err;
+
+ rctx->tail = scatterwalk_ffwd(rctx->sg, req->dst,
+ offset - XTS_BLOCK_SIZE);
+
+ scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
+ memcpy(b + 1, b, tail);
+ scatterwalk_map_and_copy(b, req->src, offset, tail, 0);
+
+ le128_xor(b, &rctx->t, b);
+
+ scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE + tail, 1);
+
+ skcipher_request_set_tfm(subreq, ctx->child);
+ skcipher_request_set_callback(subreq, req->base.flags, cts_done, req);
+ skcipher_request_set_crypt(subreq, rctx->tail, rctx->tail,
+ XTS_BLOCK_SIZE, NULL);
+
+ err = crypt(subreq);
+ if (err)
+ return err;
+
+ scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
+ le128_xor(b, &rctx->t, b);
+ scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 1);
+
+ return 0;
}
-static int xor_tweak_post(struct skcipher_request *req)
+static void encrypt_done(struct crypto_async_request *areq, int err)
{
- return xor_tweak(req, true);
+ struct skcipher_request *req = areq->data;
+
+ if (!err) {
+ struct rctx *rctx = skcipher_request_ctx(req);
+
+ rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
+ err = xor_tweak_post(req, true);
+
+ if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) {
+ err = cts_final(req, crypto_skcipher_encrypt);
+ if (err == -EINPROGRESS)
+ return;
+ }
+ }
+
+ skcipher_request_complete(req, err);
}
-static void crypt_done(struct crypto_async_request *areq, int err)
+static void decrypt_done(struct crypto_async_request *areq, int err)
{
struct skcipher_request *req = areq->data;
@@ -137,47 +220,70 @@ static void crypt_done(struct crypto_async_request *areq, int err)
struct rctx *rctx = skcipher_request_ctx(req);
rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
- err = xor_tweak_post(req);
+ err = xor_tweak_post(req, false);
+
+ if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) {
+ err = cts_final(req, crypto_skcipher_decrypt);
+ if (err == -EINPROGRESS)
+ return;
+ }
}
skcipher_request_complete(req, err);
}
-static void init_crypt(struct skcipher_request *req)
+static int init_crypt(struct skcipher_request *req, crypto_completion_t compl)
{
struct priv *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
struct rctx *rctx = skcipher_request_ctx(req);
struct skcipher_request *subreq = &rctx->subreq;
+ if (req->cryptlen < XTS_BLOCK_SIZE)
+ return -EINVAL;
+
skcipher_request_set_tfm(subreq, ctx->child);
- skcipher_request_set_callback(subreq, req->base.flags, crypt_done, req);
+ skcipher_request_set_callback(subreq, req->base.flags, compl, req);
skcipher_request_set_crypt(subreq, req->dst, req->dst,
- req->cryptlen, NULL);
+ req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL);
/* calculate first value of T */
crypto_cipher_encrypt_one(ctx->tweak, (u8 *)&rctx->t, req->iv);
+
+ return 0;
}
static int encrypt(struct skcipher_request *req)
{
struct rctx *rctx = skcipher_request_ctx(req);
struct skcipher_request *subreq = &rctx->subreq;
+ int err;
+
+ err = init_crypt(req, encrypt_done) ?:
+ xor_tweak_pre(req, true) ?:
+ crypto_skcipher_encrypt(subreq) ?:
+ xor_tweak_post(req, true);
- init_crypt(req);
- return xor_tweak_pre(req) ?:
- crypto_skcipher_encrypt(subreq) ?:
- xor_tweak_post(req);
+ if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0))
+ return err;
+
+ return cts_final(req, crypto_skcipher_encrypt);
}
static int decrypt(struct skcipher_request *req)
{
struct rctx *rctx = skcipher_request_ctx(req);
struct skcipher_request *subreq = &rctx->subreq;
+ int err;
+
+ err = init_crypt(req, decrypt_done) ?:
+ xor_tweak_pre(req, false) ?:
+ crypto_skcipher_decrypt(subreq) ?:
+ xor_tweak_post(req, false);
- init_crypt(req);
- return xor_tweak_pre(req) ?:
- crypto_skcipher_decrypt(subreq) ?:
- xor_tweak_post(req);
+ if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0))
+ return err;
+
+ return cts_final(req, crypto_skcipher_decrypt);
}
static int init_tfm(struct crypto_skcipher *tfm)
@@ -249,20 +355,21 @@ static int create(struct crypto_template *tmpl, struct rtattr **tb)
ctx = skcipher_instance_ctx(inst);
- crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst));
-
mask = crypto_requires_off(algt->type, algt->mask,
CRYPTO_ALG_NEED_FALLBACK |
CRYPTO_ALG_ASYNC);
- err = crypto_grab_skcipher(&ctx->spawn, cipher_name, 0, mask);
+ err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst),
+ cipher_name, 0, mask);
if (err == -ENOENT) {
err = -ENAMETOOLONG;
if (snprintf(ctx->name, CRYPTO_MAX_ALG_NAME, "ecb(%s)",
cipher_name) >= CRYPTO_MAX_ALG_NAME)
goto err_free_inst;
- err = crypto_grab_skcipher(&ctx->spawn, ctx->name, 0, mask);
+ err = crypto_grab_skcipher(&ctx->spawn,
+ skcipher_crypto_instance(inst),
+ ctx->name, 0, mask);
}
if (err)
diff --git a/crypto/xxhash_generic.c b/crypto/xxhash_generic.c
index 4aad2c0f40a9..55d1c8a76127 100644
--- a/crypto/xxhash_generic.c
+++ b/crypto/xxhash_generic.c
@@ -22,10 +22,8 @@ static int xxhash64_setkey(struct crypto_shash *tfm, const u8 *key,
{
struct xxhash64_tfm_ctx *tctx = crypto_shash_ctx(tfm);
- if (keylen != sizeof(tctx->seed)) {
- crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ if (keylen != sizeof(tctx->seed))
return -EINVAL;
- }
tctx->seed = get_unaligned_le64(key);
return 0;
}
OpenPOWER on IntegriCloud