summaryrefslogtreecommitdiffstats
path: root/drivers/crypto/nx/nx.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/crypto/nx/nx.c')
-rw-r--r--drivers/crypto/nx/nx.c233
1 files changed, 149 insertions, 84 deletions
diff --git a/drivers/crypto/nx/nx.c b/drivers/crypto/nx/nx.c
index 1da6dc59d0dd..f6198f29a4a8 100644
--- a/drivers/crypto/nx/nx.c
+++ b/drivers/crypto/nx/nx.c
@@ -19,8 +19,8 @@
* Author: Kent Yoder <yoder1@us.ibm.com>
*/
+#include <crypto/internal/aead.h>
#include <crypto/internal/hash.h>
-#include <crypto/hash.h>
#include <crypto/aes.h>
#include <crypto/sha.h>
#include <crypto/algapi.h>
@@ -29,10 +29,10 @@
#include <linux/moduleparam.h>
#include <linux/types.h>
#include <linux/mm.h>
-#include <linux/crypto.h>
#include <linux/scatterlist.h>
#include <linux/device.h>
#include <linux/of.h>
+#include <linux/types.h>
#include <asm/hvcall.h>
#include <asm/vio.h>
@@ -215,8 +215,15 @@ struct nx_sg *nx_walk_and_build(struct nx_sg *nx_dst,
* @delta: is the amount we need to crop in order to bound the list.
*
*/
-static long int trim_sg_list(struct nx_sg *sg, struct nx_sg *end, unsigned int delta)
+static long int trim_sg_list(struct nx_sg *sg,
+ struct nx_sg *end,
+ unsigned int delta,
+ unsigned int *nbytes)
{
+ long int oplen;
+ long int data_back;
+ unsigned int is_delta = delta;
+
while (delta && end > sg) {
struct nx_sg *last = end - 1;
@@ -228,54 +235,20 @@ static long int trim_sg_list(struct nx_sg *sg, struct nx_sg *end, unsigned int d
delta -= last->len;
}
}
- return (sg - end) * sizeof(struct nx_sg);
-}
-/**
- * nx_sha_build_sg_list - walk and build sg list to sha modes
- * using right bounds and limits.
- * @nx_ctx: NX crypto context for the lists we're building
- * @nx_sg: current sg list in or out list
- * @op_len: current op_len to be used in order to build a sg list
- * @nbytes: number or bytes to be processed
- * @offset: buf offset
- * @mode: SHA256 or SHA512
- */
-int nx_sha_build_sg_list(struct nx_crypto_ctx *nx_ctx,
- struct nx_sg *nx_in_outsg,
- s64 *op_len,
- unsigned int *nbytes,
- u8 *offset,
- u32 mode)
-{
- unsigned int delta = 0;
- unsigned int total = *nbytes;
- struct nx_sg *nx_insg = nx_in_outsg;
- unsigned int max_sg_len;
-
- max_sg_len = min_t(u64, nx_ctx->ap->sglen,
- nx_driver.of.max_sg_len/sizeof(struct nx_sg));
- max_sg_len = min_t(u64, max_sg_len,
- nx_ctx->ap->databytelen/NX_PAGE_SIZE);
-
- *nbytes = min_t(u64, *nbytes, nx_ctx->ap->databytelen);
- nx_insg = nx_build_sg_list(nx_insg, offset, nbytes, max_sg_len);
-
- switch (mode) {
- case NX_DS_SHA256:
- if (*nbytes < total)
- delta = *nbytes - (*nbytes & ~(SHA256_BLOCK_SIZE - 1));
- break;
- case NX_DS_SHA512:
- if (*nbytes < total)
- delta = *nbytes - (*nbytes & ~(SHA512_BLOCK_SIZE - 1));
- break;
- default:
- return -EINVAL;
+ /* There are cases where we need to crop list in order to make it
+ * a block size multiple, but we also need to align data. In order to
+ * that we need to calculate how much we need to put back to be
+ * processed
+ */
+ oplen = (sg - end) * sizeof(struct nx_sg);
+ if (is_delta) {
+ data_back = (abs(oplen) / AES_BLOCK_SIZE) * sg->len;
+ data_back = *nbytes - (data_back & ~(AES_BLOCK_SIZE - 1));
+ *nbytes -= data_back;
}
- *op_len = trim_sg_list(nx_in_outsg, nx_insg, delta);
- return 0;
+ return oplen;
}
/**
@@ -330,8 +303,8 @@ int nx_build_sg_lists(struct nx_crypto_ctx *nx_ctx,
/* these lengths should be negative, which will indicate to phyp that
* the input and output parameters are scatterlists, not linear
* buffers */
- nx_ctx->op.inlen = trim_sg_list(nx_ctx->in_sg, nx_insg, delta);
- nx_ctx->op.outlen = trim_sg_list(nx_ctx->out_sg, nx_outsg, delta);
+ nx_ctx->op.inlen = trim_sg_list(nx_ctx->in_sg, nx_insg, delta, nbytes);
+ nx_ctx->op.outlen = trim_sg_list(nx_ctx->out_sg, nx_outsg, delta, nbytes);
return 0;
}
@@ -426,6 +399,13 @@ static void nx_of_update_msc(struct device *dev,
goto next_loop;
}
+ if (!trip->sglen || trip->databytelen < NX_PAGE_SIZE) {
+ dev_warn(dev, "bogus sglen/databytelen: "
+ "%u/%u (ignored)\n", trip->sglen,
+ trip->databytelen);
+ goto next_loop;
+ }
+
switch (trip->keybitlen) {
case 128:
case 160:
@@ -518,6 +498,72 @@ static void nx_of_init(struct device *dev, struct nx_of *props)
nx_of_update_msc(dev, p, props);
}
+static bool nx_check_prop(struct device *dev, u32 fc, u32 mode, int slot)
+{
+ struct alg_props *props = &nx_driver.of.ap[fc][mode][slot];
+
+ if (!props->sglen || props->databytelen < NX_PAGE_SIZE) {
+ if (dev)
+ dev_warn(dev, "bogus sglen/databytelen for %u/%u/%u: "
+ "%u/%u (ignored)\n", fc, mode, slot,
+ props->sglen, props->databytelen);
+ return false;
+ }
+
+ return true;
+}
+
+static bool nx_check_props(struct device *dev, u32 fc, u32 mode)
+{
+ int i;
+
+ for (i = 0; i < 3; i++)
+ if (!nx_check_prop(dev, fc, mode, i))
+ return false;
+
+ return true;
+}
+
+static int nx_register_alg(struct crypto_alg *alg, u32 fc, u32 mode)
+{
+ return nx_check_props(&nx_driver.viodev->dev, fc, mode) ?
+ crypto_register_alg(alg) : 0;
+}
+
+static int nx_register_aead(struct aead_alg *alg, u32 fc, u32 mode)
+{
+ return nx_check_props(&nx_driver.viodev->dev, fc, mode) ?
+ crypto_register_aead(alg) : 0;
+}
+
+static int nx_register_shash(struct shash_alg *alg, u32 fc, u32 mode, int slot)
+{
+ return (slot >= 0 ? nx_check_prop(&nx_driver.viodev->dev,
+ fc, mode, slot) :
+ nx_check_props(&nx_driver.viodev->dev, fc, mode)) ?
+ crypto_register_shash(alg) : 0;
+}
+
+static void nx_unregister_alg(struct crypto_alg *alg, u32 fc, u32 mode)
+{
+ if (nx_check_props(NULL, fc, mode))
+ crypto_unregister_alg(alg);
+}
+
+static void nx_unregister_aead(struct aead_alg *alg, u32 fc, u32 mode)
+{
+ if (nx_check_props(NULL, fc, mode))
+ crypto_unregister_aead(alg);
+}
+
+static void nx_unregister_shash(struct shash_alg *alg, u32 fc, u32 mode,
+ int slot)
+{
+ if (slot >= 0 ? nx_check_prop(NULL, fc, mode, slot) :
+ nx_check_props(NULL, fc, mode))
+ crypto_unregister_shash(alg);
+}
+
/**
* nx_register_algs - register algorithms with the crypto API
*
@@ -542,72 +588,77 @@ static int nx_register_algs(void)
nx_driver.of.status = NX_OKAY;
- rc = crypto_register_alg(&nx_ecb_aes_alg);
+ rc = nx_register_alg(&nx_ecb_aes_alg, NX_FC_AES, NX_MODE_AES_ECB);
if (rc)
goto out;
- rc = crypto_register_alg(&nx_cbc_aes_alg);
+ rc = nx_register_alg(&nx_cbc_aes_alg, NX_FC_AES, NX_MODE_AES_CBC);
if (rc)
goto out_unreg_ecb;
- rc = crypto_register_alg(&nx_ctr_aes_alg);
+ rc = nx_register_alg(&nx_ctr_aes_alg, NX_FC_AES, NX_MODE_AES_CTR);
if (rc)
goto out_unreg_cbc;
- rc = crypto_register_alg(&nx_ctr3686_aes_alg);
+ rc = nx_register_alg(&nx_ctr3686_aes_alg, NX_FC_AES, NX_MODE_AES_CTR);
if (rc)
goto out_unreg_ctr;
- rc = crypto_register_alg(&nx_gcm_aes_alg);
+ rc = nx_register_aead(&nx_gcm_aes_alg, NX_FC_AES, NX_MODE_AES_GCM);
if (rc)
goto out_unreg_ctr3686;
- rc = crypto_register_alg(&nx_gcm4106_aes_alg);
+ rc = nx_register_aead(&nx_gcm4106_aes_alg, NX_FC_AES, NX_MODE_AES_GCM);
if (rc)
goto out_unreg_gcm;
- rc = crypto_register_alg(&nx_ccm_aes_alg);
+ rc = nx_register_alg(&nx_ccm_aes_alg, NX_FC_AES, NX_MODE_AES_CCM);
if (rc)
goto out_unreg_gcm4106;
- rc = crypto_register_alg(&nx_ccm4309_aes_alg);
+ rc = nx_register_alg(&nx_ccm4309_aes_alg, NX_FC_AES, NX_MODE_AES_CCM);
if (rc)
goto out_unreg_ccm;
- rc = crypto_register_shash(&nx_shash_sha256_alg);
+ rc = nx_register_shash(&nx_shash_sha256_alg, NX_FC_SHA, NX_MODE_SHA,
+ NX_PROPS_SHA256);
if (rc)
goto out_unreg_ccm4309;
- rc = crypto_register_shash(&nx_shash_sha512_alg);
+ rc = nx_register_shash(&nx_shash_sha512_alg, NX_FC_SHA, NX_MODE_SHA,
+ NX_PROPS_SHA512);
if (rc)
goto out_unreg_s256;
- rc = crypto_register_shash(&nx_shash_aes_xcbc_alg);
+ rc = nx_register_shash(&nx_shash_aes_xcbc_alg,
+ NX_FC_AES, NX_MODE_AES_XCBC_MAC, -1);
if (rc)
goto out_unreg_s512;
goto out;
out_unreg_s512:
- crypto_unregister_shash(&nx_shash_sha512_alg);
+ nx_unregister_shash(&nx_shash_sha512_alg, NX_FC_SHA, NX_MODE_SHA,
+ NX_PROPS_SHA512);
out_unreg_s256:
- crypto_unregister_shash(&nx_shash_sha256_alg);
+ nx_unregister_shash(&nx_shash_sha256_alg, NX_FC_SHA, NX_MODE_SHA,
+ NX_PROPS_SHA256);
out_unreg_ccm4309:
- crypto_unregister_alg(&nx_ccm4309_aes_alg);
+ nx_unregister_alg(&nx_ccm4309_aes_alg, NX_FC_AES, NX_MODE_AES_CCM);
out_unreg_ccm:
- crypto_unregister_alg(&nx_ccm_aes_alg);
+ nx_unregister_alg(&nx_ccm_aes_alg, NX_FC_AES, NX_MODE_AES_CCM);
out_unreg_gcm4106:
- crypto_unregister_alg(&nx_gcm4106_aes_alg);
+ nx_unregister_aead(&nx_gcm4106_aes_alg, NX_FC_AES, NX_MODE_AES_GCM);
out_unreg_gcm:
- crypto_unregister_alg(&nx_gcm_aes_alg);
+ nx_unregister_aead(&nx_gcm_aes_alg, NX_FC_AES, NX_MODE_AES_GCM);
out_unreg_ctr3686:
- crypto_unregister_alg(&nx_ctr3686_aes_alg);
+ nx_unregister_alg(&nx_ctr3686_aes_alg, NX_FC_AES, NX_MODE_AES_CTR);
out_unreg_ctr:
- crypto_unregister_alg(&nx_ctr_aes_alg);
+ nx_unregister_alg(&nx_ctr_aes_alg, NX_FC_AES, NX_MODE_AES_CTR);
out_unreg_cbc:
- crypto_unregister_alg(&nx_cbc_aes_alg);
+ nx_unregister_alg(&nx_cbc_aes_alg, NX_FC_AES, NX_MODE_AES_CBC);
out_unreg_ecb:
- crypto_unregister_alg(&nx_ecb_aes_alg);
+ nx_unregister_alg(&nx_ecb_aes_alg, NX_FC_AES, NX_MODE_AES_ECB);
out:
return rc;
}
@@ -666,9 +717,9 @@ int nx_crypto_ctx_aes_ccm_init(struct crypto_tfm *tfm)
NX_MODE_AES_CCM);
}
-int nx_crypto_ctx_aes_gcm_init(struct crypto_tfm *tfm)
+int nx_crypto_ctx_aes_gcm_init(struct crypto_aead *tfm)
{
- return nx_crypto_ctx_init(crypto_tfm_ctx(tfm), NX_FC_AES,
+ return nx_crypto_ctx_init(crypto_aead_ctx(tfm), NX_FC_AES,
NX_MODE_AES_GCM);
}
@@ -720,6 +771,13 @@ void nx_crypto_ctx_exit(struct crypto_tfm *tfm)
nx_ctx->out_sg = NULL;
}
+void nx_crypto_ctx_aead_exit(struct crypto_aead *tfm)
+{
+ struct nx_crypto_ctx *nx_ctx = crypto_aead_ctx(tfm);
+
+ kzfree(nx_ctx->kmem);
+}
+
static int nx_probe(struct vio_dev *viodev, const struct vio_device_id *id)
{
dev_dbg(&viodev->dev, "driver probed: %s resource id: 0x%x\n",
@@ -746,17 +804,24 @@ static int nx_remove(struct vio_dev *viodev)
if (nx_driver.of.status == NX_OKAY) {
NX_DEBUGFS_FINI(&nx_driver);
- crypto_unregister_alg(&nx_ccm_aes_alg);
- crypto_unregister_alg(&nx_ccm4309_aes_alg);
- crypto_unregister_alg(&nx_gcm_aes_alg);
- crypto_unregister_alg(&nx_gcm4106_aes_alg);
- crypto_unregister_alg(&nx_ctr_aes_alg);
- crypto_unregister_alg(&nx_ctr3686_aes_alg);
- crypto_unregister_alg(&nx_cbc_aes_alg);
- crypto_unregister_alg(&nx_ecb_aes_alg);
- crypto_unregister_shash(&nx_shash_sha256_alg);
- crypto_unregister_shash(&nx_shash_sha512_alg);
- crypto_unregister_shash(&nx_shash_aes_xcbc_alg);
+ nx_unregister_shash(&nx_shash_aes_xcbc_alg,
+ NX_FC_AES, NX_MODE_AES_XCBC_MAC, -1);
+ nx_unregister_shash(&nx_shash_sha512_alg,
+ NX_FC_SHA, NX_MODE_SHA, NX_PROPS_SHA256);
+ nx_unregister_shash(&nx_shash_sha256_alg,
+ NX_FC_SHA, NX_MODE_SHA, NX_PROPS_SHA512);
+ nx_unregister_alg(&nx_ccm4309_aes_alg,
+ NX_FC_AES, NX_MODE_AES_CCM);
+ nx_unregister_alg(&nx_ccm_aes_alg, NX_FC_AES, NX_MODE_AES_CCM);
+ nx_unregister_aead(&nx_gcm4106_aes_alg,
+ NX_FC_AES, NX_MODE_AES_GCM);
+ nx_unregister_aead(&nx_gcm_aes_alg,
+ NX_FC_AES, NX_MODE_AES_GCM);
+ nx_unregister_alg(&nx_ctr3686_aes_alg,
+ NX_FC_AES, NX_MODE_AES_CTR);
+ nx_unregister_alg(&nx_ctr_aes_alg, NX_FC_AES, NX_MODE_AES_CTR);
+ nx_unregister_alg(&nx_cbc_aes_alg, NX_FC_AES, NX_MODE_AES_CBC);
+ nx_unregister_alg(&nx_ecb_aes_alg, NX_FC_AES, NX_MODE_AES_ECB);
}
return 0;
OpenPOWER on IntegriCloud