summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2005-07-06 13:51:52 -0700
committerDavid S. Miller <davem@davemloft.net>2005-07-06 13:51:52 -0700
commit40725181b74be6b0e3bdc8c05bd1e0b9873ec5cc (patch)
treeabbc1057a5e0bd77385d17cfc6146617151e93bc
parentc774e93e2152d0be2612739418689e6e6400f4eb (diff)
downloadtalos-op-linux-40725181b74be6b0e3bdc8c05bd1e0b9873ec5cc.tar.gz
talos-op-linux-40725181b74be6b0e3bdc8c05bd1e0b9873ec5cc.zip
[CRYPTO] Add support for low-level multi-block operations
This patch adds hooks for cipher algorithms to implement multi-block ECB/CBC operations directly. This is expected to provide significant performance boots to the VIA Padlock. It could also be used for improving software implementations such as AES where operating on multiple blocks at a time may enable certain optimisations. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> Signed-off-by: David S. Miller <davem@davemloft.net>
-rw-r--r--crypto/cipher.c38
-rw-r--r--crypto/internal.h5
-rw-r--r--include/linux/crypto.h28
3 files changed, 45 insertions, 26 deletions
diff --git a/crypto/cipher.c b/crypto/cipher.c
index c4243345b154..54c4a560070d 100644
--- a/crypto/cipher.c
+++ b/crypto/cipher.c
@@ -23,14 +23,6 @@
#include "internal.h"
#include "scatterwalk.h"
-struct cipher_desc {
- struct crypto_tfm *tfm;
- void (*crfn)(void *ctx, u8 *dst, const u8 *src);
- unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
- const u8 *src, unsigned int nbytes);
- void *info;
-};
-
static inline void xor_64(u8 *a, const u8 *b)
{
((u32 *)a)[0] ^= ((u32 *)b)[0];
@@ -224,10 +216,11 @@ static int ecb_encrypt(struct crypto_tfm *tfm,
struct scatterlist *src, unsigned int nbytes)
{
struct cipher_desc desc;
+ struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
desc.tfm = tfm;
- desc.crfn = tfm->__crt_alg->cra_cipher.cia_encrypt;
- desc.prfn = ecb_process;
+ desc.crfn = cipher->cia_encrypt;
+ desc.prfn = cipher->cia_encrypt_ecb ?: ecb_process;
return crypt(&desc, dst, src, nbytes);
}
@@ -238,10 +231,11 @@ static int ecb_decrypt(struct crypto_tfm *tfm,
unsigned int nbytes)
{
struct cipher_desc desc;
+ struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
desc.tfm = tfm;
- desc.crfn = tfm->__crt_alg->cra_cipher.cia_decrypt;
- desc.prfn = ecb_process;
+ desc.crfn = cipher->cia_decrypt;
+ desc.prfn = cipher->cia_decrypt_ecb ?: ecb_process;
return crypt(&desc, dst, src, nbytes);
}
@@ -252,10 +246,11 @@ static int cbc_encrypt(struct crypto_tfm *tfm,
unsigned int nbytes)
{
struct cipher_desc desc;
+ struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
desc.tfm = tfm;
- desc.crfn = tfm->__crt_alg->cra_cipher.cia_encrypt;
- desc.prfn = cbc_process_encrypt;
+ desc.crfn = cipher->cia_encrypt;
+ desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt;
desc.info = tfm->crt_cipher.cit_iv;
return crypt(&desc, dst, src, nbytes);
@@ -267,10 +262,11 @@ static int cbc_encrypt_iv(struct crypto_tfm *tfm,
unsigned int nbytes, u8 *iv)
{
struct cipher_desc desc;
+ struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
desc.tfm = tfm;
- desc.crfn = tfm->__crt_alg->cra_cipher.cia_encrypt;
- desc.prfn = cbc_process_encrypt;
+ desc.crfn = cipher->cia_encrypt;
+ desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt;
desc.info = iv;
return crypt(&desc, dst, src, nbytes);
@@ -282,10 +278,11 @@ static int cbc_decrypt(struct crypto_tfm *tfm,
unsigned int nbytes)
{
struct cipher_desc desc;
+ struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
desc.tfm = tfm;
- desc.crfn = tfm->__crt_alg->cra_cipher.cia_decrypt;
- desc.prfn = cbc_process_decrypt;
+ desc.crfn = cipher->cia_decrypt;
+ desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt;
desc.info = tfm->crt_cipher.cit_iv;
return crypt(&desc, dst, src, nbytes);
@@ -297,10 +294,11 @@ static int cbc_decrypt_iv(struct crypto_tfm *tfm,
unsigned int nbytes, u8 *iv)
{
struct cipher_desc desc;
+ struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
desc.tfm = tfm;
- desc.crfn = tfm->__crt_alg->cra_cipher.cia_decrypt;
- desc.prfn = cbc_process_decrypt;
+ desc.crfn = cipher->cia_decrypt;
+ desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt;
desc.info = iv;
return crypt(&desc, dst, src, nbytes);
diff --git a/crypto/internal.h b/crypto/internal.h
index 964b9a60ca24..5ed383f7dce6 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -42,11 +42,6 @@ static inline void crypto_yield(struct crypto_tfm *tfm)
cond_resched();
}
-static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
-{
- return (void *)&tfm[1];
-}
-
struct crypto_alg *crypto_alg_lookup(const char *name);
/* A far more intelligent version of this is planned. For now, just
diff --git a/include/linux/crypto.h b/include/linux/crypto.h
index 387da6a3e58c..26ce01c25745 100644
--- a/include/linux/crypto.h
+++ b/include/linux/crypto.h
@@ -61,6 +61,15 @@
#define CRYPTO_DIR_DECRYPT 0
struct scatterlist;
+struct crypto_tfm;
+
+struct cipher_desc {
+ struct crypto_tfm *tfm;
+ void (*crfn)(void *ctx, u8 *dst, const u8 *src);
+ unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
+ const u8 *src, unsigned int nbytes);
+ void *info;
+};
/*
* Algorithms: modular crypto algorithm implementations, managed
@@ -73,6 +82,19 @@ struct cipher_alg {
unsigned int keylen, u32 *flags);
void (*cia_encrypt)(void *ctx, u8 *dst, const u8 *src);
void (*cia_decrypt)(void *ctx, u8 *dst, const u8 *src);
+
+ unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc,
+ u8 *dst, const u8 *src,
+ unsigned int nbytes);
+ unsigned int (*cia_decrypt_ecb)(const struct cipher_desc *desc,
+ u8 *dst, const u8 *src,
+ unsigned int nbytes);
+ unsigned int (*cia_encrypt_cbc)(const struct cipher_desc *desc,
+ u8 *dst, const u8 *src,
+ unsigned int nbytes);
+ unsigned int (*cia_decrypt_cbc)(const struct cipher_desc *desc,
+ u8 *dst, const u8 *src,
+ unsigned int nbytes);
};
struct digest_alg {
@@ -136,7 +158,6 @@ static inline int crypto_alg_available(const char *name, u32 flags)
* and core processing logic. Managed via crypto_alloc_tfm() and
* crypto_free_tfm(), as well as the various helpers below.
*/
-struct crypto_tfm;
struct cipher_tfm {
void *cit_iv;
@@ -266,6 +287,11 @@ static inline unsigned int crypto_tfm_alg_digestsize(struct crypto_tfm *tfm)
return tfm->__crt_alg->cra_digest.dia_digestsize;
}
+static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
+{
+ return (void *)&tfm[1];
+}
+
/*
* API wrappers.
*/
OpenPOWER on IntegriCloud