mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git
synced 2025-01-17 13:58:46 +00:00
crypto: rockchip - store crypto_info in request context
The crypto_info to use must be stored in the request context. This will help when 2 crypto_info will be available on rk3399. Signed-off-by: Corentin Labbe <clabbe@baylibre.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
d1b5749687
commit
2d3c756adc
@ -215,6 +215,7 @@ struct rk_ahash_ctx {
|
||||
|
||||
/* the private variable of hash for fallback */
|
||||
struct rk_ahash_rctx {
|
||||
struct rk_crypto_info *dev;
|
||||
struct ahash_request fallback_req;
|
||||
u32 mode;
|
||||
int nrsg;
|
||||
@ -231,6 +232,7 @@ struct rk_cipher_ctx {
|
||||
};
|
||||
|
||||
struct rk_cipher_rctx {
|
||||
struct rk_crypto_info *dev;
|
||||
u8 backup_iv[AES_BLOCK_SIZE];
|
||||
u32 mode;
|
||||
struct skcipher_request fallback_req; // keep at the end
|
||||
|
@ -200,6 +200,7 @@ static int rk_ahash_export(struct ahash_request *req, void *out)
|
||||
|
||||
static int rk_ahash_digest(struct ahash_request *req)
|
||||
{
|
||||
struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
|
||||
struct rk_ahash_ctx *tctx = crypto_tfm_ctx(req->base.tfm);
|
||||
struct rk_crypto_info *dev = tctx->dev;
|
||||
|
||||
@ -209,6 +210,8 @@ static int rk_ahash_digest(struct ahash_request *req)
|
||||
if (!req->nbytes)
|
||||
return zero_message_process(req);
|
||||
|
||||
rctx->dev = dev;
|
||||
|
||||
return crypto_transfer_hash_request_to_engine(dev->engine, req);
|
||||
}
|
||||
|
||||
@ -223,10 +226,8 @@ static void crypto_ahash_dma_start(struct rk_crypto_info *dev, struct scatterlis
|
||||
static int rk_hash_prepare(struct crypto_engine *engine, void *breq)
|
||||
{
|
||||
struct ahash_request *areq = container_of(breq, struct ahash_request, base);
|
||||
struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
|
||||
struct rk_ahash_rctx *rctx = ahash_request_ctx(areq);
|
||||
struct rk_ahash_ctx *tctx = crypto_ahash_ctx(tfm);
|
||||
struct rk_crypto_info *rkc = tctx->dev;
|
||||
struct rk_crypto_info *rkc = rctx->dev;
|
||||
int ret;
|
||||
|
||||
ret = dma_map_sg(rkc->dev, areq->src, sg_nents(areq->src), DMA_TO_DEVICE);
|
||||
@ -241,10 +242,8 @@ static int rk_hash_prepare(struct crypto_engine *engine, void *breq)
|
||||
static int rk_hash_unprepare(struct crypto_engine *engine, void *breq)
|
||||
{
|
||||
struct ahash_request *areq = container_of(breq, struct ahash_request, base);
|
||||
struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
|
||||
struct rk_ahash_rctx *rctx = ahash_request_ctx(areq);
|
||||
struct rk_ahash_ctx *tctx = crypto_ahash_ctx(tfm);
|
||||
struct rk_crypto_info *rkc = tctx->dev;
|
||||
struct rk_crypto_info *rkc = rctx->dev;
|
||||
|
||||
dma_unmap_sg(rkc->dev, areq->src, rctx->nrsg, DMA_TO_DEVICE);
|
||||
return 0;
|
||||
@ -255,11 +254,10 @@ static int rk_hash_run(struct crypto_engine *engine, void *breq)
|
||||
struct ahash_request *areq = container_of(breq, struct ahash_request, base);
|
||||
struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
|
||||
struct rk_ahash_rctx *rctx = ahash_request_ctx(areq);
|
||||
struct rk_ahash_ctx *tctx = crypto_ahash_ctx(tfm);
|
||||
struct ahash_alg *alg = __crypto_ahash_alg(tfm->base.__crt_alg);
|
||||
struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.hash);
|
||||
struct scatterlist *sg = areq->src;
|
||||
struct rk_crypto_info *rkc = tctx->dev;
|
||||
struct rk_crypto_info *rkc = rctx->dev;
|
||||
int err = 0;
|
||||
int i;
|
||||
u32 v;
|
||||
|
@ -86,12 +86,15 @@ static int rk_cipher_handle_req(struct skcipher_request *req)
|
||||
{
|
||||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||
struct rk_cipher_ctx *tctx = crypto_skcipher_ctx(tfm);
|
||||
struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
|
||||
struct rk_crypto_info *rkc = tctx->dev;
|
||||
struct crypto_engine *engine = rkc->engine;
|
||||
|
||||
if (rk_cipher_need_fallback(req))
|
||||
return rk_cipher_fallback(req);
|
||||
|
||||
rctx->dev = rkc;
|
||||
|
||||
return crypto_transfer_skcipher_request_to_engine(engine, req);
|
||||
}
|
||||
|
||||
@ -290,7 +293,6 @@ static int rk_cipher_run(struct crypto_engine *engine, void *async_req)
|
||||
{
|
||||
struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);
|
||||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
|
||||
struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq);
|
||||
struct scatterlist *sgs, *sgd;
|
||||
int err = 0;
|
||||
@ -303,7 +305,7 @@ static int rk_cipher_run(struct crypto_engine *engine, void *async_req)
|
||||
unsigned int todo;
|
||||
struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
|
||||
struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher);
|
||||
struct rk_crypto_info *rkc = ctx->dev;
|
||||
struct rk_crypto_info *rkc = rctx->dev;
|
||||
|
||||
algt->stat_req++;
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user