[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <20260120144408.606911-2-t-pratham@ti.com>
Date: Tue, 20 Jan 2026 20:14:06 +0530
From: T Pratham <t-pratham@...com>
To: T Pratham <t-pratham@...com>, Herbert Xu <herbert@...dor.apana.org.au>,
"David S. Miller" <davem@...emloft.net>
CC: Manorit Chawdhry <m-chawdhry@...com>, Kamlesh Gurudasani <kamlesh@...com>,
Shiva Tripathi <s-tripathi1@...com>, Kavitha Malarvizhi
<k-malarvizhi@...com>, Vishal Mahaveer <vishalm@...com>, Praneeth Bajjuri
<praneeth@...com>, <linux-crypto@...r.kernel.org>,
<linux-kernel@...r.kernel.org>
Subject: [PATCH v8 1/3] crypto: ti - Add support for AES-CTR in DTHEv2 driver
Add support for CTR mode of operation for AES algorithm in the AES
Engine of the DTHEv2 hardware cryptographic engine.
Signed-off-by: T Pratham <t-pratham@...com>
---
drivers/crypto/ti/Kconfig | 1 +
drivers/crypto/ti/dthev2-aes.c | 168 ++++++++++++++++++++++++++----
drivers/crypto/ti/dthev2-common.h | 3 +
3 files changed, 153 insertions(+), 19 deletions(-)
diff --git a/drivers/crypto/ti/Kconfig b/drivers/crypto/ti/Kconfig
index a3692ceec49bc..6027e12de279d 100644
--- a/drivers/crypto/ti/Kconfig
+++ b/drivers/crypto/ti/Kconfig
@@ -6,6 +6,7 @@ config CRYPTO_DEV_TI_DTHEV2
select CRYPTO_SKCIPHER
select CRYPTO_ECB
select CRYPTO_CBC
+ select CRYPTO_CTR
select CRYPTO_XTS
help
This enables support for the TI DTHE V2 hw cryptography engine
diff --git a/drivers/crypto/ti/dthev2-aes.c b/drivers/crypto/ti/dthev2-aes.c
index 156729ccc50ec..5533805309f08 100644
--- a/drivers/crypto/ti/dthev2-aes.c
+++ b/drivers/crypto/ti/dthev2-aes.c
@@ -63,6 +63,7 @@
enum aes_ctrl_mode_masks {
AES_CTRL_ECB_MASK = 0x00,
AES_CTRL_CBC_MASK = BIT(5),
+ AES_CTRL_CTR_MASK = BIT(6),
AES_CTRL_XTS_MASK = BIT(12) | BIT(11),
};
@@ -74,6 +75,8 @@ enum aes_ctrl_mode_masks {
#define DTHE_AES_CTRL_KEYSIZE_24B BIT(4)
#define DTHE_AES_CTRL_KEYSIZE_32B (BIT(3) | BIT(4))
+#define DTHE_AES_CTRL_CTR_WIDTH_128B (BIT(7) | BIT(8))
+
#define DTHE_AES_CTRL_SAVE_CTX_SET BIT(29)
#define DTHE_AES_CTRL_OUTPUT_READY BIT_MASK(0)
@@ -100,25 +103,27 @@ static int dthe_cipher_init_tfm(struct crypto_skcipher *tfm)
return 0;
}
-static int dthe_cipher_xts_init_tfm(struct crypto_skcipher *tfm)
+static int dthe_cipher_init_tfm_fallback(struct crypto_skcipher *tfm)
{
struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
struct dthe_data *dev_data = dthe_get_dev(ctx);
+ const char *alg_name = crypto_tfm_alg_name(crypto_skcipher_tfm(tfm));
ctx->dev_data = dev_data;
ctx->keylen = 0;
- ctx->skcipher_fb = crypto_alloc_sync_skcipher("xts(aes)", 0,
+ ctx->skcipher_fb = crypto_alloc_sync_skcipher(alg_name, 0,
CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(ctx->skcipher_fb)) {
- dev_err(dev_data->dev, "fallback driver xts(aes) couldn't be loaded\n");
+ dev_err(dev_data->dev, "fallback driver %s couldn't be loaded\n",
+ alg_name);
return PTR_ERR(ctx->skcipher_fb);
}
return 0;
}
-static void dthe_cipher_xts_exit_tfm(struct crypto_skcipher *tfm)
+static void dthe_cipher_exit_tfm(struct crypto_skcipher *tfm)
{
struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
@@ -156,6 +161,24 @@ static int dthe_aes_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key, unsig
return dthe_aes_setkey(tfm, key, keylen);
}
+static int dthe_aes_ctr_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
+{
+ struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
+ int ret = dthe_aes_setkey(tfm, key, keylen);
+
+ if (ret)
+ return ret;
+
+ ctx->aes_mode = DTHE_AES_CTR;
+
+ crypto_sync_skcipher_clear_flags(ctx->skcipher_fb, CRYPTO_TFM_REQ_MASK);
+ crypto_sync_skcipher_set_flags(ctx->skcipher_fb,
+ crypto_skcipher_get_flags(tfm) &
+ CRYPTO_TFM_REQ_MASK);
+
+ return crypto_sync_skcipher_setkey(ctx->skcipher_fb, key, keylen);
+}
+
static int dthe_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
{
struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
@@ -171,8 +194,8 @@ static int dthe_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key, unsig
crypto_sync_skcipher_clear_flags(ctx->skcipher_fb, CRYPTO_TFM_REQ_MASK);
crypto_sync_skcipher_set_flags(ctx->skcipher_fb,
- crypto_skcipher_get_flags(tfm) &
- CRYPTO_TFM_REQ_MASK);
+ crypto_skcipher_get_flags(tfm) &
+ CRYPTO_TFM_REQ_MASK);
return crypto_sync_skcipher_setkey(ctx->skcipher_fb, key, keylen);
}
@@ -236,6 +259,10 @@ static void dthe_aes_set_ctrl_key(struct dthe_tfm_ctx *ctx,
case DTHE_AES_CBC:
ctrl_val |= AES_CTRL_CBC_MASK;
break;
+ case DTHE_AES_CTR:
+ ctrl_val |= AES_CTRL_CTR_MASK;
+ ctrl_val |= DTHE_AES_CTRL_CTR_WIDTH_128B;
+ break;
case DTHE_AES_XTS:
ctrl_val |= AES_CTRL_XTS_MASK;
break;
@@ -271,11 +298,14 @@ static int dthe_aes_run(struct crypto_engine *engine, void *areq)
struct scatterlist *dst = req->dst;
int src_nents = sg_nents_for_len(src, len);
- int dst_nents;
+ int dst_nents = sg_nents_for_len(dst, len);
int src_mapped_nents;
int dst_mapped_nents;
+ int src_bkup_len;
+ int dst_bkup_len;
+
bool diff_dst;
enum dma_data_direction src_dir, dst_dir;
@@ -305,25 +335,61 @@ static int dthe_aes_run(struct crypto_engine *engine, void *areq)
dst_dir = DMA_FROM_DEVICE;
}
+ /*
+ * CTR mode can operate on any input length, but the hardware
+ * requires input length to be a multiple of the block size.
+ * We need to handle the padding in the driver.
+ */
+ if (ctx->aes_mode == DTHE_AES_CTR && req->cryptlen % AES_BLOCK_SIZE) {
+ struct scatterlist *sg;
+ int i = 0;
+ unsigned int curr_len = 0;
+
+ len -= req->cryptlen % AES_BLOCK_SIZE;
+ src_nents = sg_nents_for_len(req->src, len);
+ dst_nents = sg_nents_for_len(req->dst, len);
+
+ /*
+ * Need to truncate the src and dst to len, else DMA complains.
+ * Lengths restored at end
+ */
+ for_each_sg(req->src, sg, src_nents - 1, i) {
+ curr_len += sg->length;
+ }
+ curr_len += sg->length;
+ src_bkup_len = sg->length;
+ sg->length -= curr_len % AES_BLOCK_SIZE;
+
+ if (diff_dst) {
+ curr_len = 0;
+ for_each_sg(req->dst, sg, dst_nents - 1, i) {
+ curr_len += sg->length;
+ }
+ curr_len += sg->length;
+ dst_bkup_len = sg->length;
+ sg->length -= curr_len % AES_BLOCK_SIZE;
+ }
+
+ if (len == 0)
+ goto aes_ctr_partial_block;
+ }
+
tx_dev = dmaengine_get_dma_device(dev_data->dma_aes_tx);
rx_dev = dmaengine_get_dma_device(dev_data->dma_aes_rx);
src_mapped_nents = dma_map_sg(tx_dev, src, src_nents, src_dir);
if (src_mapped_nents == 0) {
ret = -EINVAL;
- goto aes_err;
+ goto aes_ctr_partial_block;
}
if (!diff_dst) {
- dst_nents = src_nents;
dst_mapped_nents = src_mapped_nents;
} else {
- dst_nents = sg_nents_for_len(dst, len);
dst_mapped_nents = dma_map_sg(rx_dev, dst, dst_nents, dst_dir);
if (dst_mapped_nents == 0) {
- dma_unmap_sg(tx_dev, src, src_nents, src_dir);
ret = -EINVAL;
- goto aes_err;
+ goto aes_map_dst_err;
}
}
@@ -353,8 +419,8 @@ static int dthe_aes_run(struct crypto_engine *engine, void *areq)
else
dthe_aes_set_ctrl_key(ctx, rctx, (u32 *)req->iv);
- writel_relaxed(lower_32_bits(req->cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_0);
- writel_relaxed(upper_32_bits(req->cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_1);
+ writel_relaxed(lower_32_bits(len), aes_base_reg + DTHE_P_AES_C_LENGTH_0);
+ writel_relaxed(upper_32_bits(len), aes_base_reg + DTHE_P_AES_C_LENGTH_1);
dmaengine_submit(desc_in);
dmaengine_submit(desc_out);
@@ -386,11 +452,48 @@ static int dthe_aes_run(struct crypto_engine *engine, void *areq)
}
aes_prep_err:
- dma_unmap_sg(tx_dev, src, src_nents, src_dir);
if (dst_dir != DMA_BIDIRECTIONAL)
dma_unmap_sg(rx_dev, dst, dst_nents, dst_dir);
+aes_map_dst_err:
+ dma_unmap_sg(tx_dev, src, src_nents, src_dir);
+
+aes_ctr_partial_block:
+ if (ctx->aes_mode == DTHE_AES_CTR && req->cryptlen % AES_BLOCK_SIZE) {
+ /*
+ * Handle the remaining bytes that were not processed by hardware
+ */
+ SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->skcipher_fb);
+ struct scatterlist rem_src;
+ u8 *rem_buf = rctx->padding;
+ unsigned int rem_len = req->cryptlen % AES_BLOCK_SIZE;
+
+ /* Restore original sg lengths */
+ struct scatterlist *sg;
+ int i;
+
+ if (diff_dst) {
+ for_each_sg(req->dst, sg, dst_nents - 1, i);
+ sg->length = dst_bkup_len;
+ }
+ for_each_sg(req->src, sg, src_nents - 1, i);
+ sg->length = src_bkup_len;
+
+ src_nents = sg_nents_for_len(req->src, req->cryptlen);
+ dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
+
+ sg_pcopy_to_buffer(req->src, src_nents, rem_buf, rem_len, len);
+ sg_init_one(&rem_src, rem_buf, rem_len);
+
+ skcipher_request_set_callback(subreq, skcipher_request_flags(req),
+ req->base.complete, req->base.data);
+ skcipher_request_set_crypt(subreq, &rem_src, &rem_src,
+ rem_len, req->iv);
+
+ ret = rctx->enc ? crypto_skcipher_encrypt(subreq) :
+ crypto_skcipher_decrypt(subreq);
+ sg_pcopy_from_buffer(req->dst, dst_nents, rem_buf, rem_len, len);
+ }
-aes_err:
local_bh_disable();
crypto_finalize_skcipher_request(dev_data->engine, req, ret);
local_bh_enable();
@@ -408,6 +511,7 @@ static int dthe_aes_crypt(struct skcipher_request *req)
* If data is not a multiple of AES_BLOCK_SIZE:
* - need to return -EINVAL for ECB, CBC as they are block ciphers
* - need to fallback to software as H/W doesn't support Ciphertext Stealing for XTS
+ * - do nothing for CTR
*/
if (req->cryptlen % AES_BLOCK_SIZE) {
if (ctx->aes_mode == DTHE_AES_XTS) {
@@ -421,7 +525,8 @@ static int dthe_aes_crypt(struct skcipher_request *req)
return rctx->enc ? crypto_skcipher_encrypt(subreq) :
crypto_skcipher_decrypt(subreq);
}
- return -EINVAL;
+ if (ctx->aes_mode != DTHE_AES_CTR)
+ return -EINVAL;
}
/*
@@ -501,8 +606,33 @@ static struct skcipher_engine_alg cipher_algs[] = {
.op.do_one_request = dthe_aes_run,
}, /* CBC AES */
{
- .base.init = dthe_cipher_xts_init_tfm,
- .base.exit = dthe_cipher_xts_exit_tfm,
+ .base.init = dthe_cipher_init_tfm_fallback,
+ .base.exit = dthe_cipher_exit_tfm,
+ .base.setkey = dthe_aes_ctr_setkey,
+ .base.encrypt = dthe_aes_encrypt,
+ .base.decrypt = dthe_aes_decrypt,
+ .base.min_keysize = AES_MIN_KEY_SIZE,
+ .base.max_keysize = AES_MAX_KEY_SIZE,
+ .base.ivsize = AES_IV_SIZE,
+ .base.chunksize = AES_BLOCK_SIZE,
+ .base.base = {
+ .cra_name = "ctr(aes)",
+ .cra_driver_name = "ctr-aes-dthev2",
+ .cra_priority = 299,
+ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
+ CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
+ CRYPTO_ALG_NEED_FALLBACK,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct dthe_tfm_ctx),
+ .cra_reqsize = sizeof(struct dthe_aes_req_ctx),
+ .cra_module = THIS_MODULE,
+ },
+ .op.do_one_request = dthe_aes_run,
+ }, /* CTR AES */
+ {
+ .base.init = dthe_cipher_init_tfm_fallback,
+ .base.exit = dthe_cipher_exit_tfm,
.base.setkey = dthe_aes_xts_setkey,
.base.encrypt = dthe_aes_encrypt,
.base.decrypt = dthe_aes_decrypt,
diff --git a/drivers/crypto/ti/dthev2-common.h b/drivers/crypto/ti/dthev2-common.h
index c7a06a4c353ff..efbcbbb741a14 100644
--- a/drivers/crypto/ti/dthev2-common.h
+++ b/drivers/crypto/ti/dthev2-common.h
@@ -36,6 +36,7 @@
enum dthe_aes_mode {
DTHE_AES_ECB = 0,
DTHE_AES_CBC,
+ DTHE_AES_CTR,
DTHE_AES_XTS,
};
@@ -92,10 +93,12 @@ struct dthe_tfm_ctx {
/**
* struct dthe_aes_req_ctx - AES engine req ctx struct
* @enc: flag indicating encryption or decryption operation
+ * @padding: padding buffer for handling unaligned data
* @aes_compl: Completion variable for use in manual completion in case of DMA callback failure
*/
struct dthe_aes_req_ctx {
int enc;
+ u8 padding[AES_BLOCK_SIZE];
struct completion aes_compl;
};
--
2.34.1
Powered by blists - more mailing lists