Skip to content

Commit 334d37c

Browse files
horiagherbertx
authored andcommitted
crypto: caam - update IV using HW support
Modify drivers to perform skcipher IV update using the crypto engine, instead of performing the operation in SW. Besides being more efficient, this also fixes IV update for CTR mode. Output HW S/G table is appended with an entry pointing to the same IV buffer used as input (which is now mapped BIDIRECTIONAL). AS (Algorithm State) parameter of the OPERATION command is changed from INIFINAL to INIT in descriptors used by ctr(aes), cbc(aes). This is needed since in case FINAL bit is set, HW skips IV updating in the Context Register for the last data block. Signed-off-by: Horia Geantă <[email protected]> Signed-off-by: Herbert Xu <[email protected]>
1 parent 059d73e commit 334d37c

File tree

5 files changed

+163
-146
lines changed

5 files changed

+163
-146
lines changed

drivers/crypto/caam/caamalg.c

+51-37
Original file line numberDiff line numberDiff line change
@@ -898,7 +898,7 @@ static void caam_unmap(struct device *dev, struct scatterlist *src,
898898
}
899899

900900
if (iv_dma)
901-
dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
901+
dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL);
902902
if (sec4_sg_bytes)
903903
dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
904904
DMA_TO_DEVICE);
@@ -977,7 +977,6 @@ static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
977977
struct skcipher_request *req = context;
978978
struct skcipher_edesc *edesc;
979979
struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
980-
struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
981980
int ivsize = crypto_skcipher_ivsize(skcipher);
982981

983982
dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
@@ -991,16 +990,17 @@ static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
991990

992991
/*
993992
* The crypto API expects us to set the IV (req->iv) to the last
994-
* ciphertext block when running in CBC mode.
993+
* ciphertext block (CBC mode) or last counter (CTR mode).
994+
* This is used e.g. by the CTS mode.
995995
*/
996-
if ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == OP_ALG_AAI_CBC)
997-
scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen -
998-
ivsize, ivsize, 0);
996+
if (ivsize) {
997+
memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
998+
ivsize);
999999

1000-
if (ivsize)
10011000
print_hex_dump_debug("dstiv @"__stringify(__LINE__)": ",
10021001
DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
10031002
edesc->src_nents > 1 ? 100 : ivsize, 1);
1003+
}
10041004

10051005
caam_dump_sg("dst @" __stringify(__LINE__)": ",
10061006
DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
@@ -1027,8 +1027,20 @@ static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
10271027

10281028
skcipher_unmap(jrdev, edesc, req);
10291029

1030-
print_hex_dump_debug("dstiv @"__stringify(__LINE__)": ",
1031-
DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1030+
/*
1031+
* The crypto API expects us to set the IV (req->iv) to the last
1032+
* ciphertext block (CBC mode) or last counter (CTR mode).
1033+
* This is used e.g. by the CTS mode.
1034+
*/
1035+
if (ivsize) {
1036+
memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
1037+
ivsize);
1038+
1039+
print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ",
1040+
DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1041+
ivsize, 1);
1042+
}
1043+
10321044
caam_dump_sg("dst @" __stringify(__LINE__)": ",
10331045
DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
10341046
edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
@@ -1260,15 +1272,15 @@ static void init_skcipher_job(struct skcipher_request *req,
12601272
if (likely(req->src == req->dst)) {
12611273
dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
12621274
out_options = in_options;
1263-
} else if (edesc->mapped_dst_nents == 1) {
1275+
} else if (!ivsize && edesc->mapped_dst_nents == 1) {
12641276
dst_dma = sg_dma_address(req->dst);
12651277
} else {
12661278
dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
12671279
sizeof(struct sec4_sg_entry);
12681280
out_options = LDST_SGF;
12691281
}
12701282

1271-
append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options);
1283+
append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options);
12721284
}
12731285

12741286
/*
@@ -1699,22 +1711,26 @@ static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
16991711
dst_sg_idx = sec4_sg_ents;
17001712

17011713
/*
1714+
* Input, output HW S/G tables: [IV, src][dst, IV]
1715+
* IV entries point to the same buffer
1716+
* If src == dst, S/G entries are reused (S/G tables overlap)
1717+
*
17021718
* HW reads 4 S/G entries at a time; make sure the reads don't go beyond
17031719
* the end of the table by allocating more S/G entries. Logic:
1704-
* if (src != dst && output S/G)
1720+
* if (output S/G)
17051721
* pad output S/G, if needed
1706-
* else if (src == dst && S/G)
1707-
* overlapping S/Gs; pad one of them
17081722
* else if (input S/G) ...
17091723
* pad input S/G, if needed
17101724
*/
1711-
if (mapped_dst_nents > 1)
1712-
sec4_sg_ents += pad_sg_nents(mapped_dst_nents);
1713-
else if ((req->src == req->dst) && (mapped_src_nents > 1))
1714-
sec4_sg_ents = max(pad_sg_nents(sec4_sg_ents),
1715-
!!ivsize + pad_sg_nents(mapped_src_nents));
1716-
else
1725+
if (ivsize || mapped_dst_nents > 1) {
1726+
if (req->src == req->dst)
1727+
sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents);
1728+
else
1729+
sec4_sg_ents += pad_sg_nents(mapped_dst_nents +
1730+
!!ivsize);
1731+
} else {
17171732
sec4_sg_ents = pad_sg_nents(sec4_sg_ents);
1733+
}
17181734

17191735
sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
17201736

@@ -1740,10 +1756,10 @@ static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
17401756

17411757
/* Make sure IV is located in a DMAable area */
17421758
if (ivsize) {
1743-
iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
1759+
iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes;
17441760
memcpy(iv, req->iv, ivsize);
17451761

1746-
iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE);
1762+
iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL);
17471763
if (dma_mapping_error(jrdev, iv_dma)) {
17481764
dev_err(jrdev, "unable to map IV\n");
17491765
caam_unmap(jrdev, req->src, req->dst, src_nents,
@@ -1755,13 +1771,20 @@ static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
17551771
dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
17561772
}
17571773
if (dst_sg_idx)
1758-
sg_to_sec4_sg_last(req->src, req->cryptlen, edesc->sec4_sg +
1759-
!!ivsize, 0);
1774+
sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg +
1775+
!!ivsize, 0);
17601776

1761-
if (mapped_dst_nents > 1) {
1762-
sg_to_sec4_sg_last(req->dst, req->cryptlen,
1763-
edesc->sec4_sg + dst_sg_idx, 0);
1764-
}
1777+
if (req->src != req->dst && (ivsize || mapped_dst_nents > 1))
1778+
sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg +
1779+
dst_sg_idx, 0);
1780+
1781+
if (ivsize)
1782+
dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx +
1783+
mapped_dst_nents, iv_dma, ivsize, 0);
1784+
1785+
if (ivsize || mapped_dst_nents > 1)
1786+
sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx +
1787+
mapped_dst_nents);
17651788

17661789
if (sec4_sg_bytes) {
17671790
edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
@@ -1824,7 +1847,6 @@ static int skcipher_decrypt(struct skcipher_request *req)
18241847
struct skcipher_edesc *edesc;
18251848
struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
18261849
struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1827-
int ivsize = crypto_skcipher_ivsize(skcipher);
18281850
struct device *jrdev = ctx->jrdev;
18291851
u32 *desc;
18301852
int ret = 0;
@@ -1834,14 +1856,6 @@ static int skcipher_decrypt(struct skcipher_request *req)
18341856
if (IS_ERR(edesc))
18351857
return PTR_ERR(edesc);
18361858

1837-
/*
1838-
* The crypto API expects us to set the IV (req->iv) to the last
1839-
* ciphertext block when running in CBC mode.
1840-
*/
1841-
if ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == OP_ALG_AAI_CBC)
1842-
scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen -
1843-
ivsize, ivsize, 0);
1844-
18451859
/* Create and submit job descriptor*/
18461860
init_skcipher_job(req, edesc, false);
18471861
desc = edesc->hw_desc;

drivers/crypto/caam/caamalg_desc.c

+25-6
Original file line numberDiff line numberDiff line change
@@ -33,12 +33,11 @@ static inline void append_dec_op1(u32 *desc, u32 type)
3333
}
3434

3535
jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
36-
append_operation(desc, type | OP_ALG_AS_INITFINAL |
37-
OP_ALG_DECRYPT);
36+
append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT);
3837
uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
3938
set_jump_tgt_here(desc, jump_cmd);
40-
append_operation(desc, type | OP_ALG_AS_INITFINAL |
41-
OP_ALG_DECRYPT | OP_ALG_AAI_DK);
39+
append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT |
40+
OP_ALG_AAI_DK);
4241
set_jump_tgt_here(desc, uncond_jump_cmd);
4342
}
4443

@@ -1392,12 +1391,18 @@ void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata,
13921391
LDST_OFFSET_SHIFT));
13931392

13941393
/* Load operation */
1395-
append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1394+
append_operation(desc, cdata->algtype | OP_ALG_AS_INIT |
13961395
OP_ALG_ENCRYPT);
13971396

13981397
/* Perform operation */
13991398
skcipher_append_src_dst(desc);
14001399

1400+
/* Store IV */
1401+
if (ivsize)
1402+
append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1403+
LDST_CLASS_1_CCB | (ctx1_iv_off <<
1404+
LDST_OFFSET_SHIFT));
1405+
14011406
print_hex_dump_debug("skcipher enc shdesc@" __stringify(__LINE__)": ",
14021407
DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
14031408
1);
@@ -1459,14 +1464,20 @@ void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata,
14591464

14601465
/* Choose operation */
14611466
if (ctx1_iv_off)
1462-
append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1467+
append_operation(desc, cdata->algtype | OP_ALG_AS_INIT |
14631468
OP_ALG_DECRYPT);
14641469
else
14651470
append_dec_op1(desc, cdata->algtype);
14661471

14671472
/* Perform operation */
14681473
skcipher_append_src_dst(desc);
14691474

1475+
/* Store IV */
1476+
if (ivsize)
1477+
append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1478+
LDST_CLASS_1_CCB | (ctx1_iv_off <<
1479+
LDST_OFFSET_SHIFT));
1480+
14701481
print_hex_dump_debug("skcipher dec shdesc@" __stringify(__LINE__)": ",
14711482
DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
14721483
1);
@@ -1516,6 +1527,10 @@ void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
15161527
/* Perform operation */
15171528
skcipher_append_src_dst(desc);
15181529

1530+
/* Store upper 8B of IV */
1531+
append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1532+
(0x20 << LDST_OFFSET_SHIFT));
1533+
15191534
print_hex_dump_debug("xts skcipher enc shdesc@" __stringify(__LINE__)
15201535
": ", DUMP_PREFIX_ADDRESS, 16, 4,
15211536
desc, desc_bytes(desc), 1);
@@ -1564,6 +1579,10 @@ void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata)
15641579
/* Perform operation */
15651580
skcipher_append_src_dst(desc);
15661581

1582+
/* Store upper 8B of IV */
1583+
append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1584+
(0x20 << LDST_OFFSET_SHIFT));
1585+
15671586
print_hex_dump_debug("xts skcipher dec shdesc@" __stringify(__LINE__)
15681587
": ", DUMP_PREFIX_ADDRESS, 16, 4, desc,
15691588
desc_bytes(desc), 1);

drivers/crypto/caam/caamalg_desc.h

+2-2
Original file line numberDiff line numberDiff line change
@@ -44,9 +44,9 @@
4444

4545
#define DESC_SKCIPHER_BASE (3 * CAAM_CMD_SZ)
4646
#define DESC_SKCIPHER_ENC_LEN (DESC_SKCIPHER_BASE + \
47-
20 * CAAM_CMD_SZ)
47+
21 * CAAM_CMD_SZ)
4848
#define DESC_SKCIPHER_DEC_LEN (DESC_SKCIPHER_BASE + \
49-
15 * CAAM_CMD_SZ)
49+
16 * CAAM_CMD_SZ)
5050

5151
void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
5252
unsigned int icvsize, int era);

0 commit comments

Comments
 (0)