crypto: nx - add offset to nx_build_sg_lists()

This patch includes one more parameter to nx_build_sg_lists() to skip
the given number of bytes from beginning of each sg list.

This is needed in order to implement the fixes for the AES modes to make
them able to process larger chunks of data.

Reviewed-by: Joy Latten <jmlatten@linux.vnet.ibm.com>
Signed-off-by: Marcelo Cerri <mhcerri@linux.vnet.ibm.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Marcelo Cerri 2013-08-29 11:36:31 -03:00 committed by Herbert Xu
parent b8b4a4166e
commit a8fc391a15
7 changed files with 14 additions and 9 deletions

View File

@ -85,7 +85,7 @@ static int cbc_aes_nx_crypt(struct blkcipher_desc *desc,
else
NX_CPB_FDM(csbcpb) &= ~NX_FDM_ENDE_ENCRYPT;
rc = nx_build_sg_lists(nx_ctx, desc, dst, src, nbytes,
rc = nx_build_sg_lists(nx_ctx, desc, dst, src, nbytes, 0,
csbcpb->cpb.aes_cbc.iv);
if (rc)
goto out;

View File

@ -293,7 +293,7 @@ static int ccm_nx_decrypt(struct aead_request *req,
if (rc)
goto out;
rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, nbytes,
rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, nbytes, 0,
csbcpb->cpb.aes_ccm.iv_or_ctr);
if (rc)
goto out;
@ -339,7 +339,7 @@ static int ccm_nx_encrypt(struct aead_request *req,
if (rc)
goto out;
rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, nbytes,
rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, nbytes, 0,
csbcpb->cpb.aes_ccm.iv_or_ctr);
if (rc)
goto out;

View File

@ -98,7 +98,7 @@ static int ctr_aes_nx_crypt(struct blkcipher_desc *desc,
goto out;
}
rc = nx_build_sg_lists(nx_ctx, desc, dst, src, nbytes,
rc = nx_build_sg_lists(nx_ctx, desc, dst, src, nbytes, 0,
csbcpb->cpb.aes_ctr.iv);
if (rc)
goto out;

View File

@ -85,7 +85,7 @@ static int ecb_aes_nx_crypt(struct blkcipher_desc *desc,
else
NX_CPB_FDM(csbcpb) &= ~NX_FDM_ENDE_ENCRYPT;
rc = nx_build_sg_lists(nx_ctx, desc, dst, src, nbytes, NULL);
rc = nx_build_sg_lists(nx_ctx, desc, dst, src, nbytes, 0, NULL);
if (rc)
goto out;

View File

@ -226,7 +226,7 @@ static int gcm_aes_nx_crypt(struct aead_request *req, int enc)
csbcpb->cpb.aes_gcm.bit_length_data = nbytes * 8;
rc = nx_build_sg_lists(nx_ctx, &desc, req->dst, req->src, nbytes,
rc = nx_build_sg_lists(nx_ctx, &desc, req->dst, req->src, nbytes, 0,
csbcpb->cpb.aes_gcm.iv_or_cnt);
if (rc)
goto out;

View File

@ -211,6 +211,8 @@ struct nx_sg *nx_walk_and_build(struct nx_sg *nx_dst,
* @dst: destination scatterlist
* @src: source scatterlist
* @nbytes: length of data described in the scatterlists
* @offset: number of bytes to fast-forward past at the beginning of
* scatterlists.
* @iv: destination for the iv data, if the algorithm requires it
*
* This is common code shared by all the AES algorithms. It uses the block
@ -222,6 +224,7 @@ int nx_build_sg_lists(struct nx_crypto_ctx *nx_ctx,
struct scatterlist *dst,
struct scatterlist *src,
unsigned int nbytes,
unsigned int offset,
u8 *iv)
{
struct nx_sg *nx_insg = nx_ctx->in_sg;
@ -230,8 +233,10 @@ int nx_build_sg_lists(struct nx_crypto_ctx *nx_ctx,
if (iv)
memcpy(iv, desc->info, AES_BLOCK_SIZE);
nx_insg = nx_walk_and_build(nx_insg, nx_ctx->ap->sglen, src, 0, nbytes);
nx_outsg = nx_walk_and_build(nx_outsg, nx_ctx->ap->sglen, dst, 0, nbytes);
nx_insg = nx_walk_and_build(nx_insg, nx_ctx->ap->sglen, src,
offset, nbytes);
nx_outsg = nx_walk_and_build(nx_outsg, nx_ctx->ap->sglen, dst,
offset, nbytes);
/* these lengths should be negative, which will indicate to phyp that
* the input and output parameters are scatterlists, not linear

View File

@ -156,7 +156,7 @@ int nx_hcall_sync(struct nx_crypto_ctx *ctx, struct vio_pfo_op *op,
struct nx_sg *nx_build_sg_list(struct nx_sg *, u8 *, unsigned int, u32);
int nx_build_sg_lists(struct nx_crypto_ctx *, struct blkcipher_desc *,
struct scatterlist *, struct scatterlist *, unsigned int,
u8 *);
unsigned int, u8 *);
struct nx_sg *nx_walk_and_build(struct nx_sg *, unsigned int,
struct scatterlist *, unsigned int,
unsigned int);