{
struct crypto_async_request *async_req = data;
struct aead_request *req = aead_request_cast(async_req);
- struct qce_aead_reqctx *rctx = aead_request_ctx(req);
+ struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
struct qce_device *qce = tmpl->qce;
static struct scatterlist *
qce_aead_prepare_result_buf(struct sg_table *tbl, struct aead_request *req)
{
- struct qce_aead_reqctx *rctx = aead_request_ctx(req);
+ struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
struct qce_device *qce = tmpl->qce;
static struct scatterlist *
qce_aead_prepare_ccm_result_buf(struct sg_table *tbl, struct aead_request *req)
{
- struct qce_aead_reqctx *rctx = aead_request_ctx(req);
+ struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
sg_init_one(&rctx->result_sg, rctx->ccmresult_buf, QCE_BAM_BURST_SIZE);
return qce_sgtable_add(tbl, &rctx->result_sg, QCE_BAM_BURST_SIZE);
static struct scatterlist *
qce_aead_prepare_dst_buf(struct aead_request *req)
{
- struct qce_aead_reqctx *rctx = aead_request_ctx(req);
+ struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
struct qce_device *qce = tmpl->qce;
struct scatterlist *sg, *msg_sg, __sg[2];
{
struct scatterlist *sg, *msg_sg, __sg[2];
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- struct qce_aead_reqctx *rctx = aead_request_ctx(req);
+ struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
struct qce_aead_ctx *ctx = crypto_aead_ctx(tfm);
unsigned int assoclen = rctx->assoclen;
unsigned int adata_header_len, cryptlen, totallen;
static int qce_aead_prepare_buf(struct aead_request *req)
{
- struct qce_aead_reqctx *rctx = aead_request_ctx(req);
+ struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
struct qce_device *qce = tmpl->qce;
struct scatterlist *sg;
static int qce_aead_ccm_prepare_buf(struct aead_request *req)
{
- struct qce_aead_reqctx *rctx = aead_request_ctx(req);
+ struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
struct qce_aead_ctx *ctx = crypto_aead_ctx(tfm);
struct scatterlist *sg;
qce_aead_async_req_handle(struct crypto_async_request *async_req)
{
struct aead_request *req = aead_request_cast(async_req);
- struct qce_aead_reqctx *rctx = aead_request_ctx(req);
+ struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
static int qce_aead_crypt(struct aead_request *req, int encrypt)
{
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
- struct qce_aead_reqctx *rctx = aead_request_ctx(req);
+ struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
struct qce_aead_ctx *ctx = crypto_aead_ctx(tfm);
struct qce_alg_template *tmpl = to_aead_tmpl(tfm);
unsigned int blocksize = crypto_aead_blocksize(tfm);
if (IS_ERR(ctx->fallback))
return PTR_ERR(ctx->fallback);
- crypto_aead_set_reqsize(tfm, sizeof(struct qce_aead_reqctx) +
- crypto_aead_reqsize(ctx->fallback));
+ crypto_aead_set_reqsize_dma(tfm, sizeof(struct qce_aead_reqctx) +
+ crypto_aead_reqsize(ctx->fallback));
return 0;
}
* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
*/
+#include <crypto/internal/hash.h>
#include <linux/err.h>
#include <linux/interrupt.h>
#include <linux/types.h>
{
struct ahash_request *req = ahash_request_cast(async_req);
struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm);
- struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
+ struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);
struct qce_device *qce = tmpl->qce;
unsigned int digestsize = crypto_ahash_digestsize(ahash);
static int qce_setup_regs_aead(struct crypto_async_request *async_req)
{
struct aead_request *req = aead_request_cast(async_req);
- struct qce_aead_reqctx *rctx = aead_request_ctx(req);
+ struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
struct qce_device *qce = tmpl->qce;
struct crypto_async_request *async_req = data;
struct ahash_request *req = ahash_request_cast(async_req);
struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
- struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
+ struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);
struct qce_device *qce = tmpl->qce;
struct qce_result_dump *result = qce->dma.result_buf;
static int qce_ahash_async_req_handle(struct crypto_async_request *async_req)
{
struct ahash_request *req = ahash_request_cast(async_req);
- struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
+ struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
struct qce_sha_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);
struct qce_device *qce = tmpl->qce;
static int qce_ahash_init(struct ahash_request *req)
{
- struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
+ struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
struct qce_alg_template *tmpl = to_ahash_tmpl(req->base.tfm);
const u32 *std_iv = tmpl->std_iv;
static int qce_ahash_export(struct ahash_request *req, void *out)
{
- struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
+ struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
struct qce_sha_saved_state *export_state = out;
memcpy(export_state->pending_buf, rctx->buf, rctx->buflen);
static int qce_ahash_import(struct ahash_request *req, const void *in)
{
- struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
+ struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
const struct qce_sha_saved_state *import_state = in;
memset(rctx, 0, sizeof(*rctx));
static int qce_ahash_update(struct ahash_request *req)
{
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
+ struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
struct qce_alg_template *tmpl = to_ahash_tmpl(req->base.tfm);
struct qce_device *qce = tmpl->qce;
struct scatterlist *sg_last, *sg;
static int qce_ahash_final(struct ahash_request *req)
{
- struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
+ struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
struct qce_alg_template *tmpl = to_ahash_tmpl(req->base.tfm);
struct qce_device *qce = tmpl->qce;
static int qce_ahash_digest(struct ahash_request *req)
{
- struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
+ struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
struct qce_alg_template *tmpl = to_ahash_tmpl(req->base.tfm);
struct qce_device *qce = tmpl->qce;
int ret;
struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
struct qce_sha_ctx *ctx = crypto_tfm_ctx(tfm);
- crypto_ahash_set_reqsize(ahash, sizeof(struct qce_sha_reqctx));
+ crypto_ahash_set_reqsize_dma(ahash, sizeof(struct qce_sha_reqctx));
memset(ctx, 0, sizeof(*ctx));
return 0;
}