[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <1492121135-4437-10-git-send-email-logang@deltatee.com>
Date: Thu, 13 Apr 2017 16:05:22 -0600
From: Logan Gunthorpe <logang@...tatee.com>
To: Christoph Hellwig <hch@....de>,
"Martin K. Petersen" <martin.petersen@...cle.com>,
Sagi Grimberg <sagi@...mberg.me>, Jens Axboe <axboe@...nel.dk>,
Tejun Heo <tj@...nel.org>,
Greg Kroah-Hartman <gregkh@...uxfoundation.org>,
Dan Williams <dan.j.williams@...el.com>,
Ross Zwisler <ross.zwisler@...ux.intel.com>,
Matthew Wilcox <mawilcox@...rosoft.com>,
Sumit Semwal <sumit.semwal@...aro.org>,
Ming Lin <ming.l@....samsung.com>,
linux-kernel@...r.kernel.org, linux-crypto@...r.kernel.org,
linux-media@...r.kernel.org, dri-devel@...ts.freedesktop.org,
linaro-mm-sig@...ts.linaro.org, intel-gfx@...ts.freedesktop.org,
linux-raid@...r.kernel.org, linux-mmc@...r.kernel.org,
linux-nvme@...ts.infradead.org, linux-nvdimm@...ts.01.org,
linux-scsi@...r.kernel.org, fcoe-devel@...n-fcoe.org,
open-iscsi@...glegroups.com, megaraidlinux.pdl@...adcom.com,
sparmaintainer@...sys.com, devel@...verdev.osuosl.org,
target-devel@...r.kernel.org, netdev@...r.kernel.org,
linux-rdma@...r.kernel.org, rds-devel@....oracle.com
Cc: Steve Wise <swise@...ngridcomputing.com>,
Stephen Bates <sbates@...thlin.com>,
Logan Gunthorpe <logang@...tatee.com>
Subject: [PATCH 09/22] dm-crypt: Make use of the new sg_map helper in 4 call sites
Very straightforward conversion to the new function in all four spots.
Signed-off-by: Logan Gunthorpe <logang@...tatee.com>
---
drivers/md/dm-crypt.c | 38 +++++++++++++++++++++++++-------------
1 file changed, 25 insertions(+), 13 deletions(-)
diff --git a/drivers/md/dm-crypt.c b/drivers/md/dm-crypt.c
index 389a363..6bd0ffc 100644
--- a/drivers/md/dm-crypt.c
+++ b/drivers/md/dm-crypt.c
@@ -589,9 +589,12 @@ static int crypt_iv_lmk_gen(struct crypt_config *cc, u8 *iv,
int r = 0;
if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) {
- src = kmap_atomic(sg_page(&dmreq->sg_in));
- r = crypt_iv_lmk_one(cc, iv, dmreq, src + dmreq->sg_in.offset);
- kunmap_atomic(src);
+ src = sg_map(&dmreq->sg_in, SG_KMAP_ATOMIC);
+ if (IS_ERR(src))
+ return PTR_ERR(src);
+
+ r = crypt_iv_lmk_one(cc, iv, dmreq, src);
+ sg_unmap(&dmreq->sg_in, src, SG_KMAP_ATOMIC);
} else
memset(iv, 0, cc->iv_size);
@@ -607,14 +610,17 @@ static int crypt_iv_lmk_post(struct crypt_config *cc, u8 *iv,
if (bio_data_dir(dmreq->ctx->bio_in) == WRITE)
return 0;
- dst = kmap_atomic(sg_page(&dmreq->sg_out));
- r = crypt_iv_lmk_one(cc, iv, dmreq, dst + dmreq->sg_out.offset);
+ dst = sg_map(&dmreq->sg_out, SG_KMAP_ATOMIC);
+ if (IS_ERR(dst))
+ return PTR_ERR(dst);
+
+ r = crypt_iv_lmk_one(cc, iv, dmreq, dst);
/* Tweak the first block of plaintext sector */
if (!r)
- crypto_xor(dst + dmreq->sg_out.offset, iv, cc->iv_size);
+ crypto_xor(dst, iv, cc->iv_size);
- kunmap_atomic(dst);
+ sg_unmap(&dmreq->sg_out, dst, SG_KMAP_ATOMIC);
return r;
}
@@ -731,9 +737,12 @@ static int crypt_iv_tcw_gen(struct crypt_config *cc, u8 *iv,
/* Remove whitening from ciphertext */
if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) {
- src = kmap_atomic(sg_page(&dmreq->sg_in));
- r = crypt_iv_tcw_whitening(cc, dmreq, src + dmreq->sg_in.offset);
- kunmap_atomic(src);
+ src = sg_map(&dmreq->sg_in, SG_KMAP_ATOMIC);
+ if (IS_ERR(src))
+ return PTR_ERR(src);
+
+ r = crypt_iv_tcw_whitening(cc, dmreq, src);
+ sg_unmap(&dmreq->sg_in, src, SG_KMAP_ATOMIC);
}
/* Calculate IV */
@@ -755,9 +764,12 @@ static int crypt_iv_tcw_post(struct crypt_config *cc, u8 *iv,
return 0;
/* Apply whitening on ciphertext */
- dst = kmap_atomic(sg_page(&dmreq->sg_out));
- r = crypt_iv_tcw_whitening(cc, dmreq, dst + dmreq->sg_out.offset);
- kunmap_atomic(dst);
+ dst = sg_map(&dmreq->sg_out, SG_KMAP_ATOMIC);
+ if (IS_ERR(dst))
+ return PTR_ERR(dst);
+
+ r = crypt_iv_tcw_whitening(cc, dmreq, dst);
+ sg_unmap(&dmreq->sg_out, dst, SG_KMAP_ATOMIC);
return r;
}
--
2.1.4
Powered by blists - more mailing lists