diff options
Diffstat (limited to 'include/crypto/internal')
-rw-r--r-- | include/crypto/internal/acompress.h | 4 | ||||
-rw-r--r-- | include/crypto/internal/aead.h | 21 | ||||
-rw-r--r-- | include/crypto/internal/akcipher.h | 12 | ||||
-rw-r--r-- | include/crypto/internal/blake2s.h | 24 | ||||
-rw-r--r-- | include/crypto/internal/chacha.h | 43 | ||||
-rw-r--r-- | include/crypto/internal/cryptouser.h | 7 | ||||
-rw-r--r-- | include/crypto/internal/des.h | 127 | ||||
-rw-r--r-- | include/crypto/internal/geniv.h | 1 | ||||
-rw-r--r-- | include/crypto/internal/hash.h | 90 | ||||
-rw-r--r-- | include/crypto/internal/poly1305.h | 33 | ||||
-rw-r--r-- | include/crypto/internal/scompress.h | 4 | ||||
-rw-r--r-- | include/crypto/internal/skcipher.h | 88 |
12 files changed, 306 insertions, 148 deletions
diff --git a/include/crypto/internal/acompress.h b/include/crypto/internal/acompress.h index 9de57367afbb..cf478681b53e 100644 --- a/include/crypto/internal/acompress.h +++ b/include/crypto/internal/acompress.h @@ -68,10 +68,8 @@ int crypto_register_acomp(struct acomp_alg *alg); * compression algorithm * * @alg: algorithm definition - * - * Return: zero on success; error code in case of error */ -int crypto_unregister_acomp(struct acomp_alg *alg); +void crypto_unregister_acomp(struct acomp_alg *alg); int crypto_register_acomps(struct acomp_alg *algs, int count); void crypto_unregister_acomps(struct acomp_alg *algs, int count); diff --git a/include/crypto/internal/aead.h b/include/crypto/internal/aead.h index c509ec30fc65..27b7b0224ea6 100644 --- a/include/crypto/internal/aead.h +++ b/include/crypto/internal/aead.h @@ -81,14 +81,9 @@ static inline struct aead_request *aead_request_cast( return container_of(req, struct aead_request, base); } -static inline void crypto_set_aead_spawn( - struct crypto_aead_spawn *spawn, struct crypto_instance *inst) -{ - crypto_set_spawn(&spawn->base, inst); -} - -int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name, - u32 type, u32 mask); +int crypto_grab_aead(struct crypto_aead_spawn *spawn, + struct crypto_instance *inst, + const char *name, u32 type, u32 mask); static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn) { @@ -113,16 +108,6 @@ static inline void crypto_aead_set_reqsize(struct crypto_aead *aead, aead->reqsize = reqsize; } -static inline unsigned int crypto_aead_alg_maxauthsize(struct aead_alg *alg) -{ - return alg->maxauthsize; -} - -static inline unsigned int crypto_aead_maxauthsize(struct crypto_aead *aead) -{ - return crypto_aead_alg_maxauthsize(crypto_aead_alg(aead)); -} - static inline void aead_init_queue(struct aead_queue *queue, unsigned int max_qlen) { diff --git a/include/crypto/internal/akcipher.h b/include/crypto/internal/akcipher.h index d6c8a42789ad..8d3220c9ab77 100644 --- a/include/crypto/internal/akcipher.h +++ b/include/crypto/internal/akcipher.h @@ -78,15 +78,9 @@ static inline void *akcipher_instance_ctx(struct akcipher_instance *inst) return crypto_instance_ctx(akcipher_crypto_instance(inst)); } -static inline void crypto_set_akcipher_spawn( - struct crypto_akcipher_spawn *spawn, - struct crypto_instance *inst) -{ - crypto_set_spawn(&spawn->base, inst); -} - -int crypto_grab_akcipher(struct crypto_akcipher_spawn *spawn, const char *name, - u32 type, u32 mask); +int crypto_grab_akcipher(struct crypto_akcipher_spawn *spawn, + struct crypto_instance *inst, + const char *name, u32 type, u32 mask); static inline struct crypto_akcipher *crypto_spawn_akcipher( struct crypto_akcipher_spawn *spawn) diff --git a/include/crypto/internal/blake2s.h b/include/crypto/internal/blake2s.h new file mode 100644 index 000000000000..74ff77032e52 --- /dev/null +++ b/include/crypto/internal/blake2s.h @@ -0,0 +1,24 @@ +/* SPDX-License-Identifier: GPL-2.0 OR MIT */ + +#ifndef BLAKE2S_INTERNAL_H +#define BLAKE2S_INTERNAL_H + +#include <crypto/blake2s.h> + +struct blake2s_tfm_ctx { + u8 key[BLAKE2S_KEY_SIZE]; + unsigned int keylen; +}; + +void blake2s_compress_generic(struct blake2s_state *state,const u8 *block, + size_t nblocks, const u32 inc); + +void blake2s_compress_arch(struct blake2s_state *state,const u8 *block, + size_t nblocks, const u32 inc); + +static inline void blake2s_set_lastblock(struct blake2s_state *state) +{ + state->f[0] = -1; +} + +#endif /* BLAKE2S_INTERNAL_H */ diff --git a/include/crypto/internal/chacha.h b/include/crypto/internal/chacha.h new file mode 100644 index 000000000000..b085dc1ac151 --- /dev/null +++ b/include/crypto/internal/chacha.h @@ -0,0 +1,43 @@ +/* SPDX-License-Identifier: GPL-2.0 */ + +#ifndef _CRYPTO_INTERNAL_CHACHA_H +#define _CRYPTO_INTERNAL_CHACHA_H + +#include <crypto/chacha.h> +#include <crypto/internal/skcipher.h> +#include <linux/crypto.h> + +struct chacha_ctx { + u32 key[8]; + int nrounds; +}; + +static inline int chacha_setkey(struct crypto_skcipher *tfm, const u8 *key, + unsigned int keysize, int nrounds) +{ + struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm); + int i; + + if (keysize != CHACHA_KEY_SIZE) + return -EINVAL; + + for (i = 0; i < ARRAY_SIZE(ctx->key); i++) + ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32)); + + ctx->nrounds = nrounds; + return 0; +} + +static inline int chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key, + unsigned int keysize) +{ + return chacha_setkey(tfm, key, keysize, 20); +} + +static inline int chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key, + unsigned int keysize) +{ + return chacha_setkey(tfm, key, keysize, 12); +} + +#endif /* _CRYPTO_CHACHA_H */ diff --git a/include/crypto/internal/cryptouser.h b/include/crypto/internal/cryptouser.h index 8c602b187c58..fd54074332f5 100644 --- a/include/crypto/internal/cryptouser.h +++ b/include/crypto/internal/cryptouser.h @@ -1,14 +1,15 @@ /* SPDX-License-Identifier: GPL-2.0 */ +#include <linux/cryptouser.h> #include <net/netlink.h> -extern struct sock *crypto_nlsk; - struct crypto_alg *crypto_alg_match(struct crypto_user_alg *p, int exact); #ifdef CONFIG_CRYPTO_STATS int crypto_reportstat(struct sk_buff *in_skb, struct nlmsghdr *in_nlh, struct nlattr **attrs); #else -static int crypto_reportstat(struct sk_buff *in_skb, struct nlmsghdr *in_nlh, struct nlattr **attrs) +static inline int crypto_reportstat(struct sk_buff *in_skb, + struct nlmsghdr *in_nlh, + struct nlattr **attrs) { return -ENOTSUPP; } diff --git a/include/crypto/internal/des.h b/include/crypto/internal/des.h new file mode 100644 index 000000000000..723fe5bf16da --- /dev/null +++ b/include/crypto/internal/des.h @@ -0,0 +1,127 @@ +/* SPDX-License-Identifier: GPL-2.0 */ +/* + * DES & Triple DES EDE key verification helpers + */ + +#ifndef __CRYPTO_INTERNAL_DES_H +#define __CRYPTO_INTERNAL_DES_H + +#include <linux/crypto.h> +#include <linux/fips.h> +#include <crypto/des.h> +#include <crypto/aead.h> +#include <crypto/skcipher.h> + +/** + * crypto_des_verify_key - Check whether a DES key is weak + * @tfm: the crypto algo + * @key: the key buffer + * + * Returns -EINVAL if the key is weak and the crypto TFM does not permit weak + * keys. Otherwise, 0 is returned. + * + * It is the job of the caller to ensure that the size of the key equals + * DES_KEY_SIZE. + */ +static inline int crypto_des_verify_key(struct crypto_tfm *tfm, const u8 *key) +{ + struct des_ctx tmp; + int err; + + err = des_expand_key(&tmp, key, DES_KEY_SIZE); + if (err == -ENOKEY) { + if (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS) + err = -EINVAL; + else + err = 0; + } + memzero_explicit(&tmp, sizeof(tmp)); + return err; +} + +/* + * RFC2451: + * + * For DES-EDE3, there is no known need to reject weak or + * complementation keys. Any weakness is obviated by the use of + * multiple keys. + * + * However, if the first two or last two independent 64-bit keys are + * equal (k1 == k2 or k2 == k3), then the DES3 operation is simply the + * same as DES. Implementers MUST reject keys that exhibit this + * property. + * + */ +static inline int des3_ede_verify_key(const u8 *key, unsigned int key_len, + bool check_weak) +{ + int ret = fips_enabled ? -EINVAL : -ENOKEY; + u32 K[6]; + + memcpy(K, key, DES3_EDE_KEY_SIZE); + + if ((!((K[0] ^ K[2]) | (K[1] ^ K[3])) || + !((K[2] ^ K[4]) | (K[3] ^ K[5]))) && + (fips_enabled || check_weak)) + goto bad; + + if ((!((K[0] ^ K[4]) | (K[1] ^ K[5]))) && fips_enabled) + goto bad; + + ret = 0; +bad: + memzero_explicit(K, DES3_EDE_KEY_SIZE); + + return ret; +} + +/** + * crypto_des3_ede_verify_key - Check whether a DES3-EDE key is weak + * @tfm: the crypto algo + * @key: the key buffer + * + * Returns -EINVAL if the key is weak and the crypto TFM does not permit weak + * keys or when running in FIPS mode. Otherwise, 0 is returned. Note that some + * keys are rejected in FIPS mode even if weak keys are permitted by the TFM + * flags. + * + * It is the job of the caller to ensure that the size of the key equals + * DES3_EDE_KEY_SIZE. + */ +static inline int crypto_des3_ede_verify_key(struct crypto_tfm *tfm, + const u8 *key) +{ + return des3_ede_verify_key(key, DES3_EDE_KEY_SIZE, + crypto_tfm_get_flags(tfm) & + CRYPTO_TFM_REQ_FORBID_WEAK_KEYS); +} + +static inline int verify_skcipher_des_key(struct crypto_skcipher *tfm, + const u8 *key) +{ + return crypto_des_verify_key(crypto_skcipher_tfm(tfm), key); +} + +static inline int verify_skcipher_des3_key(struct crypto_skcipher *tfm, + const u8 *key) +{ + return crypto_des3_ede_verify_key(crypto_skcipher_tfm(tfm), key); +} + +static inline int verify_aead_des_key(struct crypto_aead *tfm, const u8 *key, + int keylen) +{ + if (keylen != DES_KEY_SIZE) + return -EINVAL; + return crypto_des_verify_key(crypto_aead_tfm(tfm), key); +} + +static inline int verify_aead_des3_key(struct crypto_aead *tfm, const u8 *key, + int keylen) +{ + if (keylen != DES3_EDE_KEY_SIZE) + return -EINVAL; + return crypto_des3_ede_verify_key(crypto_aead_tfm(tfm), key); +} + +#endif /* __CRYPTO_INTERNAL_DES_H */ diff --git a/include/crypto/internal/geniv.h b/include/crypto/internal/geniv.h index 0108c0c7b2ed..229d37681a9d 100644 --- a/include/crypto/internal/geniv.h +++ b/include/crypto/internal/geniv.h @@ -21,7 +21,6 @@ struct aead_geniv_ctx { struct aead_instance *aead_geniv_alloc(struct crypto_template *tmpl, struct rtattr **tb, u32 type, u32 mask); -void aead_geniv_free(struct aead_instance *inst); int aead_init_geniv(struct crypto_aead *tfm); void aead_exit_geniv(struct crypto_aead *tfm); diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h index bfc9db7b100d..89f6f46ab2b8 100644 --- a/include/crypto/internal/hash.h +++ b/include/crypto/internal/hash.h @@ -30,11 +30,25 @@ struct crypto_hash_walk { }; struct ahash_instance { - struct ahash_alg alg; + void (*free)(struct ahash_instance *inst); + union { + struct { + char head[offsetof(struct ahash_alg, halg.base)]; + struct crypto_instance base; + } s; + struct ahash_alg alg; + }; }; struct shash_instance { - struct shash_alg alg; + void (*free)(struct shash_instance *inst); + union { + struct { + char head[offsetof(struct shash_alg, base)]; + struct crypto_instance base; + } s; + struct shash_alg alg; + }; }; struct crypto_ahash_spawn { @@ -45,8 +59,6 @@ struct crypto_shash_spawn { struct crypto_spawn base; }; -extern const struct crypto_type crypto_ahash_type; - int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err); int crypto_hash_walk_first(struct ahash_request *req, struct crypto_hash_walk *walk); @@ -70,12 +82,11 @@ static inline int crypto_ahash_walk_last(struct crypto_hash_walk *walk) } int crypto_register_ahash(struct ahash_alg *alg); -int crypto_unregister_ahash(struct ahash_alg *alg); +void crypto_unregister_ahash(struct ahash_alg *alg); int crypto_register_ahashes(struct ahash_alg *algs, int count); void crypto_unregister_ahashes(struct ahash_alg *algs, int count); int ahash_register_instance(struct crypto_template *tmpl, struct ahash_instance *inst); -void ahash_free_instance(struct crypto_instance *inst); int shash_no_setkey(struct crypto_shash *tfm, const u8 *key, unsigned int keylen); @@ -85,37 +96,51 @@ static inline bool crypto_shash_alg_has_setkey(struct shash_alg *alg) return alg->setkey != shash_no_setkey; } +static inline bool crypto_shash_alg_needs_key(struct shash_alg *alg) +{ + return crypto_shash_alg_has_setkey(alg) && + !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY); +} + bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg); -int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn, - struct hash_alg_common *alg, - struct crypto_instance *inst); +int crypto_grab_ahash(struct crypto_ahash_spawn *spawn, + struct crypto_instance *inst, + const char *name, u32 type, u32 mask); static inline void crypto_drop_ahash(struct crypto_ahash_spawn *spawn) { crypto_drop_spawn(&spawn->base); } -struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask); +static inline struct hash_alg_common *crypto_spawn_ahash_alg( + struct crypto_ahash_spawn *spawn) +{ + return __crypto_hash_alg_common(spawn->base.alg); +} int crypto_register_shash(struct shash_alg *alg); -int crypto_unregister_shash(struct shash_alg *alg); +void crypto_unregister_shash(struct shash_alg *alg); int crypto_register_shashes(struct shash_alg *algs, int count); -int crypto_unregister_shashes(struct shash_alg *algs, int count); +void crypto_unregister_shashes(struct shash_alg *algs, int count); int shash_register_instance(struct crypto_template *tmpl, struct shash_instance *inst); -void shash_free_instance(struct crypto_instance *inst); +void shash_free_singlespawn_instance(struct shash_instance *inst); -int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, - struct shash_alg *alg, - struct crypto_instance *inst); +int crypto_grab_shash(struct crypto_shash_spawn *spawn, + struct crypto_instance *inst, + const char *name, u32 type, u32 mask); static inline void crypto_drop_shash(struct crypto_shash_spawn *spawn) { crypto_drop_spawn(&spawn->base); } -struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask); +static inline struct shash_alg *crypto_spawn_shash_alg( + struct crypto_shash_spawn *spawn) +{ + return __crypto_shash_alg(spawn->base.alg); +} int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc); int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc); @@ -143,13 +168,13 @@ static inline void crypto_ahash_set_reqsize(struct crypto_ahash *tfm, static inline struct crypto_instance *ahash_crypto_instance( struct ahash_instance *inst) { - return container_of(&inst->alg.halg.base, struct crypto_instance, alg); + return &inst->s.base; } static inline struct ahash_instance *ahash_instance( struct crypto_instance *inst) { - return container_of(&inst->alg, struct ahash_instance, alg.halg.base); + return container_of(inst, struct ahash_instance, s.base); } static inline void *ahash_instance_ctx(struct ahash_instance *inst) @@ -157,17 +182,6 @@ static inline void *ahash_instance_ctx(struct ahash_instance *inst) return crypto_instance_ctx(ahash_crypto_instance(inst)); } -static inline unsigned int ahash_instance_headroom(void) -{ - return sizeof(struct ahash_alg) - sizeof(struct crypto_alg); -} - -static inline struct ahash_instance *ahash_alloc_instance( - const char *name, struct crypto_alg *alg) -{ - return crypto_alloc_instance(name, alg, ahash_instance_headroom()); -} - static inline void ahash_request_complete(struct ahash_request *req, int err) { req->base.complete(&req->base, err); @@ -204,26 +218,24 @@ static inline void *crypto_shash_ctx(struct crypto_shash *tfm) static inline struct crypto_instance *shash_crypto_instance( struct shash_instance *inst) { - return container_of(&inst->alg.base, struct crypto_instance, alg); + return &inst->s.base; } static inline struct shash_instance *shash_instance( struct crypto_instance *inst) { - return container_of(__crypto_shash_alg(&inst->alg), - struct shash_instance, alg); + return container_of(inst, struct shash_instance, s.base); } -static inline void *shash_instance_ctx(struct shash_instance *inst) +static inline struct shash_instance *shash_alg_instance( + struct crypto_shash *shash) { - return crypto_instance_ctx(shash_crypto_instance(inst)); + return shash_instance(crypto_tfm_alg_instance(&shash->base)); } -static inline struct shash_instance *shash_alloc_instance( - const char *name, struct crypto_alg *alg) +static inline void *shash_instance_ctx(struct shash_instance *inst) { - return crypto_alloc_instance(name, alg, - sizeof(struct shash_alg) - sizeof(*alg)); + return crypto_instance_ctx(shash_crypto_instance(inst)); } static inline struct crypto_shash *crypto_spawn_shash( diff --git a/include/crypto/internal/poly1305.h b/include/crypto/internal/poly1305.h new file mode 100644 index 000000000000..064e52ca5248 --- /dev/null +++ b/include/crypto/internal/poly1305.h @@ -0,0 +1,33 @@ +/* SPDX-License-Identifier: GPL-2.0 */ +/* + * Common values for the Poly1305 algorithm + */ + +#ifndef _CRYPTO_INTERNAL_POLY1305_H +#define _CRYPTO_INTERNAL_POLY1305_H + +#include <asm/unaligned.h> +#include <linux/types.h> +#include <crypto/poly1305.h> + +/* + * Poly1305 core functions. These only accept whole blocks; the caller must + * handle any needed block buffering and padding. 'hibit' must be 1 for any + * full blocks, or 0 for the final block if it had to be padded. If 'nonce' is + * non-NULL, then it's added at the end to compute the Poly1305 MAC. Otherwise, + * only the ε-almost-∆-universal hash function (not the full MAC) is computed. + */ + +void poly1305_core_setkey(struct poly1305_core_key *key, const u8 *raw_key); +static inline void poly1305_core_init(struct poly1305_state *state) +{ + *state = (struct poly1305_state){}; +} + +void poly1305_core_blocks(struct poly1305_state *state, + const struct poly1305_core_key *key, const void *src, + unsigned int nblocks, u32 hibit); +void poly1305_core_emit(const struct poly1305_state *state, const u32 nonce[4], + void *dst); + +#endif diff --git a/include/crypto/internal/scompress.h b/include/crypto/internal/scompress.h index 6727ef0fc4d1..f834274c2493 100644 --- a/include/crypto/internal/scompress.h +++ b/include/crypto/internal/scompress.h @@ -112,10 +112,8 @@ int crypto_register_scomp(struct scomp_alg *alg); * compression algorithm * * @alg: algorithm definition - * - * Return: zero on success; error code in case of error */ -int crypto_unregister_scomp(struct scomp_alg *alg); +void crypto_unregister_scomp(struct scomp_alg *alg); int crypto_register_scomps(struct scomp_alg *algs, int count); void crypto_unregister_scomps(struct scomp_alg *algs, int count); diff --git a/include/crypto/internal/skcipher.h b/include/crypto/internal/skcipher.h index d68faa5759ad..10226c12c5df 100644 --- a/include/crypto/internal/skcipher.h +++ b/include/crypto/internal/skcipher.h @@ -88,14 +88,9 @@ static inline void skcipher_request_complete(struct skcipher_request *req, int e req->base.complete(&req->base, err); } -static inline void crypto_set_skcipher_spawn( - struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst) -{ - crypto_set_spawn(&spawn->base, inst); -} - -int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name, - u32 type, u32 mask); +int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, + struct crypto_instance *inst, + const char *name, u32 type, u32 mask); static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn) { @@ -140,23 +135,15 @@ int skcipher_walk_virt(struct skcipher_walk *walk, void skcipher_walk_atomise(struct skcipher_walk *walk); int skcipher_walk_async(struct skcipher_walk *walk, struct skcipher_request *req); -int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req, - bool atomic); int skcipher_walk_aead_encrypt(struct skcipher_walk *walk, struct aead_request *req, bool atomic); int skcipher_walk_aead_decrypt(struct skcipher_walk *walk, struct aead_request *req, bool atomic); void skcipher_walk_complete(struct skcipher_walk *walk, int err); -static inline void ablkcipher_request_complete(struct ablkcipher_request *req, - int err) +static inline void skcipher_walk_abort(struct skcipher_walk *walk) { - req->base.complete(&req->base, err); -} - -static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req) -{ - return req->base.flags; + skcipher_walk_done(walk, -ECANCELED); } static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm) @@ -177,73 +164,22 @@ static inline u32 skcipher_request_flags(struct skcipher_request *req) static inline unsigned int crypto_skcipher_alg_min_keysize( struct skcipher_alg *alg) { - if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) == - CRYPTO_ALG_TYPE_BLKCIPHER) - return alg->base.cra_blkcipher.min_keysize; - - if (alg->base.cra_ablkcipher.encrypt) - return alg->base.cra_ablkcipher.min_keysize; - return alg->min_keysize; } static inline unsigned int crypto_skcipher_alg_max_keysize( struct skcipher_alg *alg) { - if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) == - CRYPTO_ALG_TYPE_BLKCIPHER) - return alg->base.cra_blkcipher.max_keysize; - - if (alg->base.cra_ablkcipher.encrypt) - return alg->base.cra_ablkcipher.max_keysize; - return alg->max_keysize; } -static inline unsigned int crypto_skcipher_alg_chunksize( - struct skcipher_alg *alg) -{ - if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) == - CRYPTO_ALG_TYPE_BLKCIPHER) - return alg->base.cra_blocksize; - - if (alg->base.cra_ablkcipher.encrypt) - return alg->base.cra_blocksize; - - return alg->chunksize; -} - static inline unsigned int crypto_skcipher_alg_walksize( struct skcipher_alg *alg) { - if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) == - CRYPTO_ALG_TYPE_BLKCIPHER) - return alg->base.cra_blocksize; - - if (alg->base.cra_ablkcipher.encrypt) - return alg->base.cra_blocksize; - return alg->walksize; } /** - * crypto_skcipher_chunksize() - obtain chunk size - * @tfm: cipher handle - * - * The block size is set to one for ciphers such as CTR. However, - * you still need to provide incremental updates in multiples of - * the underlying block size as the IV does not have sub-block - * granularity. This is known in this API as the chunk size. - * - * Return: chunk size in bytes - */ -static inline unsigned int crypto_skcipher_chunksize( - struct crypto_skcipher *tfm) -{ - return crypto_skcipher_alg_chunksize(crypto_skcipher_alg(tfm)); -} - -/** * crypto_skcipher_walksize() - obtain walk size * @tfm: cipher handle * @@ -271,9 +207,17 @@ skcipher_cipher_simple(struct crypto_skcipher *tfm) return ctx->cipher; } -struct skcipher_instance * -skcipher_alloc_instance_simple(struct crypto_template *tmpl, struct rtattr **tb, - struct crypto_alg **cipher_alg_ret); + +struct skcipher_instance *skcipher_alloc_instance_simple( + struct crypto_template *tmpl, struct rtattr **tb); + +static inline struct crypto_alg *skcipher_ialg_simple( + struct skcipher_instance *inst) +{ + struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst); + + return crypto_spawn_cipher_alg(spawn); +} #endif /* _CRYPTO_INTERNAL_SKCIPHER_H */ |