summaryrefslogtreecommitdiffstats
path: root/drivers/crypto/ccree/cc_cipher.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/crypto/ccree/cc_cipher.c')
-rw-r--r--drivers/crypto/ccree/cc_cipher.c93
1 files changed, 60 insertions, 33 deletions
diff --git a/drivers/crypto/ccree/cc_cipher.c b/drivers/crypto/ccree/cc_cipher.c
index 5b58226ea24d..7d6252d892d7 100644
--- a/drivers/crypto/ccree/cc_cipher.c
+++ b/drivers/crypto/ccree/cc_cipher.c
@@ -5,7 +5,7 @@
#include <linux/module.h>
#include <crypto/algapi.h>
#include <crypto/internal/skcipher.h>
-#include <crypto/des.h>
+#include <crypto/internal/des.h>
#include <crypto/xts.h>
#include <crypto/sm4.h>
#include <crypto/scatterwalk.h>
@@ -16,7 +16,7 @@
#include "cc_cipher.h"
#include "cc_request_mgr.h"
-#define MAX_ABLKCIPHER_SEQ_LEN 6
+#define MAX_SKCIPHER_SEQ_LEN 6
#define template_skcipher template_u.skcipher
@@ -116,10 +116,6 @@ static int validate_data_size(struct cc_cipher_ctx *ctx_p,
case S_DIN_to_AES:
switch (ctx_p->cipher_mode) {
case DRV_CIPHER_XTS:
- if (size >= AES_BLOCK_SIZE &&
- IS_ALIGNED(size, AES_BLOCK_SIZE))
- return 0;
- break;
case DRV_CIPHER_CBC_CTS:
if (size >= AES_BLOCK_SIZE)
return 0;
@@ -295,7 +291,6 @@ static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
/* This check the size of the protected key token */
if (keylen != sizeof(hki)) {
dev_err(dev, "Unsupported protected key size %d.\n", keylen);
- crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
return -EINVAL;
}
@@ -307,8 +302,7 @@ static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
keylen = hki.keylen;
if (validate_keys_sizes(ctx_p, keylen)) {
- dev_err(dev, "Unsupported key size %d.\n", keylen);
- crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ dev_dbg(dev, "Unsupported key size %d.\n", keylen);
return -EINVAL;
}
@@ -398,8 +392,7 @@ static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
/* STAT_PHASE_0: Init and sanity checks */
if (validate_keys_sizes(ctx_p, keylen)) {
- dev_err(dev, "Unsupported key size %d.\n", keylen);
- crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ dev_dbg(dev, "Unsupported key size %d.\n", keylen);
return -EINVAL;
}
@@ -411,16 +404,9 @@ static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
* HW does the expansion on its own.
*/
if (ctx_p->flow_mode == S_DIN_to_DES) {
- u32 tmp[DES3_EDE_EXPKEY_WORDS];
- if (keylen == DES3_EDE_KEY_SIZE &&
- __des3_ede_setkey(tmp, &tfm->crt_flags, key,
- DES3_EDE_KEY_SIZE)) {
- dev_dbg(dev, "weak 3DES key");
- return -EINVAL;
- } else if (!des_ekey(tmp, key) &&
- (crypto_tfm_get_flags(tfm) &
- CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
- tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
+ if ((keylen == DES3_EDE_KEY_SIZE &&
+ verify_skcipher_des3_key(sktfm, key)) ||
+ verify_skcipher_des_key(sktfm, key)) {
dev_dbg(dev, "weak DES key");
return -EINVAL;
}
@@ -534,6 +520,7 @@ static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
}
}
+
static void cc_setup_state_desc(struct crypto_tfm *tfm,
struct cipher_req_ctx *req_ctx,
unsigned int ivsize, unsigned int nbytes,
@@ -545,8 +532,6 @@ static void cc_setup_state_desc(struct crypto_tfm *tfm,
int cipher_mode = ctx_p->cipher_mode;
int flow_mode = ctx_p->flow_mode;
int direction = req_ctx->gen_ctx.op_type;
- dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
- unsigned int key_len = ctx_p->keylen;
dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
unsigned int du_size = nbytes;
@@ -582,6 +567,47 @@ static void cc_setup_state_desc(struct crypto_tfm *tfm,
case DRV_CIPHER_XTS:
case DRV_CIPHER_ESSIV:
case DRV_CIPHER_BITLOCKER:
+ break;
+ default:
+ dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
+ }
+}
+
+
+static void cc_setup_xex_state_desc(struct crypto_tfm *tfm,
+ struct cipher_req_ctx *req_ctx,
+ unsigned int ivsize, unsigned int nbytes,
+ struct cc_hw_desc desc[],
+ unsigned int *seq_size)
+{
+ struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
+ struct device *dev = drvdata_to_dev(ctx_p->drvdata);
+ int cipher_mode = ctx_p->cipher_mode;
+ int flow_mode = ctx_p->flow_mode;
+ int direction = req_ctx->gen_ctx.op_type;
+ dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
+ unsigned int key_len = ctx_p->keylen;
+ dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
+ unsigned int du_size = nbytes;
+
+ struct cc_crypto_alg *cc_alg =
+ container_of(tfm->__crt_alg, struct cc_crypto_alg,
+ skcipher_alg.base);
+
+ if (cc_alg->data_unit)
+ du_size = cc_alg->data_unit;
+
+ switch (cipher_mode) {
+ case DRV_CIPHER_ECB:
+ break;
+ case DRV_CIPHER_CBC:
+ case DRV_CIPHER_CBC_CTS:
+ case DRV_CIPHER_CTR:
+ case DRV_CIPHER_OFB:
+ break;
+ case DRV_CIPHER_XTS:
+ case DRV_CIPHER_ESSIV:
+ case DRV_CIPHER_BITLOCKER:
/* load XEX key */
hw_desc_init(&desc[*seq_size]);
set_cipher_mode(&desc[*seq_size], cipher_mode);
@@ -833,7 +859,7 @@ static int cc_cipher_process(struct skcipher_request *req,
void *iv = req->iv;
struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
struct device *dev = drvdata_to_dev(ctx_p->drvdata);
- struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
+ struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN];
struct cc_crypto_req cc_req = {};
int rc;
unsigned int seq_len = 0;
@@ -847,8 +873,7 @@ static int cc_cipher_process(struct skcipher_request *req,
/* TODO: check data length according to mode */
if (validate_data_size(ctx_p, nbytes)) {
- dev_err(dev, "Unsupported data size %d.\n", nbytes);
- crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
+ dev_dbg(dev, "Unsupported data size %d.\n", nbytes);
rc = -EINVAL;
goto exit_process;
}
@@ -892,12 +917,14 @@ static int cc_cipher_process(struct skcipher_request *req,
/* STAT_PHASE_2: Create sequence */
- /* Setup IV and XEX key used */
+ /* Setup state (IV) */
cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
/* Setup MLLI line, if needed */
cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
/* Setup key */
cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
+ /* Setup state (IV and XEX key) */
+ cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
/* Data processing */
cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
/* Read next IV */
@@ -945,7 +972,7 @@ static const struct cc_alg_template skcipher_algs[] = {
{
.name = "xts(paes)",
.driver_name = "xts-paes-ccree",
- .blocksize = AES_BLOCK_SIZE,
+ .blocksize = 1,
.template_skcipher = {
.setkey = cc_cipher_sethkey,
.encrypt = cc_cipher_encrypt,
@@ -963,7 +990,7 @@ static const struct cc_alg_template skcipher_algs[] = {
{
.name = "xts512(paes)",
.driver_name = "xts-paes-du512-ccree",
- .blocksize = AES_BLOCK_SIZE,
+ .blocksize = 1,
.template_skcipher = {
.setkey = cc_cipher_sethkey,
.encrypt = cc_cipher_encrypt,
@@ -982,7 +1009,7 @@ static const struct cc_alg_template skcipher_algs[] = {
{
.name = "xts4096(paes)",
.driver_name = "xts-paes-du4096-ccree",
- .blocksize = AES_BLOCK_SIZE,
+ .blocksize = 1,
.template_skcipher = {
.setkey = cc_cipher_sethkey,
.encrypt = cc_cipher_encrypt,
@@ -1203,7 +1230,7 @@ static const struct cc_alg_template skcipher_algs[] = {
{
.name = "xts(aes)",
.driver_name = "xts-aes-ccree",
- .blocksize = AES_BLOCK_SIZE,
+ .blocksize = 1,
.template_skcipher = {
.setkey = cc_cipher_setkey,
.encrypt = cc_cipher_encrypt,
@@ -1220,7 +1247,7 @@ static const struct cc_alg_template skcipher_algs[] = {
{
.name = "xts512(aes)",
.driver_name = "xts-aes-du512-ccree",
- .blocksize = AES_BLOCK_SIZE,
+ .blocksize = 1,
.template_skcipher = {
.setkey = cc_cipher_setkey,
.encrypt = cc_cipher_encrypt,
@@ -1238,7 +1265,7 @@ static const struct cc_alg_template skcipher_algs[] = {
{
.name = "xts4096(aes)",
.driver_name = "xts-aes-du4096-ccree",
- .blocksize = AES_BLOCK_SIZE,
+ .blocksize = 1,
.template_skcipher = {
.setkey = cc_cipher_setkey,
.encrypt = cc_cipher_encrypt,
OpenPOWER on IntegriCloud