aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorLeonidas Da Silva Barbosa <leosilva@linux.vnet.ibm.com>2015-04-23 16:41:43 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2015-04-26 02:33:19 -0400
commit10d87b730e1d9f1442cae6487bb3aef8632bed23 (patch)
tree4bffed8c0ee49a629ae7805334a2e56a52fec4c8 /drivers/crypto
parentc3365ce130e50176533debe1cabebcdb8e61156c (diff)
crypto: nx - Fixing SHA update bug
Bug happens when a data size less than SHA block size is passed. Since first attempt will be saved in buffer, second round attempt get into two step to calculate op.inlen and op.outlen. The issue resides in this step. A wrong value of op.inlen and outlen was being calculated. This patch fix this eliminate the nx_sha_build_sg_list, that is useless in SHA's algorithm context. Instead we call nx_build_sg_list directly and pass a previous calculated max_sg_len to it. Signed-off-by: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/nx/nx-sha256.c84
-rw-r--r--drivers/crypto/nx/nx-sha512.c85
-rw-r--r--drivers/crypto/nx/nx.c47
-rw-r--r--drivers/crypto/nx/nx.h2
4 files changed, 99 insertions, 119 deletions
diff --git a/drivers/crypto/nx/nx-sha256.c b/drivers/crypto/nx/nx-sha256.c
index 23621da624c3..4e91bdb83c59 100644
--- a/drivers/crypto/nx/nx-sha256.c
+++ b/drivers/crypto/nx/nx-sha256.c
@@ -33,8 +33,9 @@ static int nx_sha256_init(struct shash_desc *desc)
33{ 33{
34 struct sha256_state *sctx = shash_desc_ctx(desc); 34 struct sha256_state *sctx = shash_desc_ctx(desc);
35 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); 35 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
36 struct nx_sg *out_sg;
36 int len; 37 int len;
37 int rc; 38 u32 max_sg_len;
38 39
39 nx_ctx_init(nx_ctx, HCOP_FC_SHA); 40 nx_ctx_init(nx_ctx, HCOP_FC_SHA);
40 41
@@ -44,15 +45,18 @@ static int nx_sha256_init(struct shash_desc *desc)
44 45
45 NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA256); 46 NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA256);
46 47
48 max_sg_len = min_t(u64, nx_ctx->ap->sglen,
49 nx_driver.of.max_sg_len/sizeof(struct nx_sg));
50 max_sg_len = min_t(u64, max_sg_len,
51 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
52
47 len = SHA256_DIGEST_SIZE; 53 len = SHA256_DIGEST_SIZE;
48 rc = nx_sha_build_sg_list(nx_ctx, nx_ctx->out_sg, 54 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state,
49 &nx_ctx->op.outlen, 55 &len, max_sg_len);
50 &len, 56 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg);
51 (u8 *) sctx->state,
52 NX_DS_SHA256);
53 57
54 if (rc) 58 if (len != SHA256_DIGEST_SIZE)
55 goto out; 59 return -EINVAL;
56 60
57 sctx->state[0] = __cpu_to_be32(SHA256_H0); 61 sctx->state[0] = __cpu_to_be32(SHA256_H0);
58 sctx->state[1] = __cpu_to_be32(SHA256_H1); 62 sctx->state[1] = __cpu_to_be32(SHA256_H1);
@@ -64,7 +68,6 @@ static int nx_sha256_init(struct shash_desc *desc)
64 sctx->state[7] = __cpu_to_be32(SHA256_H7); 68 sctx->state[7] = __cpu_to_be32(SHA256_H7);
65 sctx->count = 0; 69 sctx->count = 0;
66 70
67out:
68 return 0; 71 return 0;
69} 72}
70 73
@@ -74,10 +77,12 @@ static int nx_sha256_update(struct shash_desc *desc, const u8 *data,
74 struct sha256_state *sctx = shash_desc_ctx(desc); 77 struct sha256_state *sctx = shash_desc_ctx(desc);
75 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); 78 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
76 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; 79 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb;
80 struct nx_sg *in_sg;
77 u64 to_process = 0, leftover, total; 81 u64 to_process = 0, leftover, total;
78 unsigned long irq_flags; 82 unsigned long irq_flags;
79 int rc = 0; 83 int rc = 0;
80 int data_len; 84 int data_len;
85 u32 max_sg_len;
81 u64 buf_len = (sctx->count % SHA256_BLOCK_SIZE); 86 u64 buf_len = (sctx->count % SHA256_BLOCK_SIZE);
82 87
83 spin_lock_irqsave(&nx_ctx->lock, irq_flags); 88 spin_lock_irqsave(&nx_ctx->lock, irq_flags);
@@ -97,6 +102,12 @@ static int nx_sha256_update(struct shash_desc *desc, const u8 *data,
97 NX_CPB_FDM(csbcpb) |= NX_FDM_INTERMEDIATE; 102 NX_CPB_FDM(csbcpb) |= NX_FDM_INTERMEDIATE;
98 NX_CPB_FDM(csbcpb) |= NX_FDM_CONTINUATION; 103 NX_CPB_FDM(csbcpb) |= NX_FDM_CONTINUATION;
99 104
105 in_sg = nx_ctx->in_sg;
106 max_sg_len = min_t(u64, nx_ctx->ap->sglen,
107 nx_driver.of.max_sg_len/sizeof(struct nx_sg));
108 max_sg_len = min_t(u64, max_sg_len,
109 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
110
100 do { 111 do {
101 /* 112 /*
102 * to_process: the SHA256_BLOCK_SIZE data chunk to process in 113 * to_process: the SHA256_BLOCK_SIZE data chunk to process in
@@ -108,25 +119,22 @@ static int nx_sha256_update(struct shash_desc *desc, const u8 *data,
108 119
109 if (buf_len) { 120 if (buf_len) {
110 data_len = buf_len; 121 data_len = buf_len;
111 rc = nx_sha_build_sg_list(nx_ctx, nx_ctx->in_sg, 122 in_sg = nx_build_sg_list(nx_ctx->in_sg,
112 &nx_ctx->op.inlen, 123 (u8 *) sctx->buf,
113 &data_len, 124 &data_len,
114 (u8 *) sctx->buf, 125 max_sg_len);
115 NX_DS_SHA256);
116 126
117 if (rc || data_len != buf_len) 127 if (data_len != buf_len) {
128 rc = -EINVAL;
118 goto out; 129 goto out;
130 }
119 } 131 }
120 132
121 data_len = to_process - buf_len; 133 data_len = to_process - buf_len;
122 rc = nx_sha_build_sg_list(nx_ctx, nx_ctx->in_sg, 134 in_sg = nx_build_sg_list(in_sg, (u8 *) data,
123 &nx_ctx->op.inlen, 135 &data_len, max_sg_len);
124 &data_len,
125 (u8 *) data,
126 NX_DS_SHA256);
127 136
128 if (rc) 137 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
129 goto out;
130 138
131 to_process = (data_len + buf_len); 139 to_process = (data_len + buf_len);
132 leftover = total - to_process; 140 leftover = total - to_process;
@@ -173,12 +181,19 @@ static int nx_sha256_final(struct shash_desc *desc, u8 *out)
173 struct sha256_state *sctx = shash_desc_ctx(desc); 181 struct sha256_state *sctx = shash_desc_ctx(desc);
174 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); 182 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
175 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; 183 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb;
184 struct nx_sg *in_sg, *out_sg;
176 unsigned long irq_flags; 185 unsigned long irq_flags;
177 int rc; 186 u32 max_sg_len;
187 int rc = 0;
178 int len; 188 int len;
179 189
180 spin_lock_irqsave(&nx_ctx->lock, irq_flags); 190 spin_lock_irqsave(&nx_ctx->lock, irq_flags);
181 191
192 max_sg_len = min_t(u64, nx_ctx->ap->sglen,
193 nx_driver.of.max_sg_len/sizeof(struct nx_sg));
194 max_sg_len = min_t(u64, max_sg_len,
195 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
196
182 /* final is represented by continuing the operation and indicating that 197 /* final is represented by continuing the operation and indicating that
183 * this is not an intermediate operation */ 198 * this is not an intermediate operation */
184 if (sctx->count >= SHA256_BLOCK_SIZE) { 199 if (sctx->count >= SHA256_BLOCK_SIZE) {
@@ -195,25 +210,24 @@ static int nx_sha256_final(struct shash_desc *desc, u8 *out)
195 csbcpb->cpb.sha256.message_bit_length = (u64) (sctx->count * 8); 210 csbcpb->cpb.sha256.message_bit_length = (u64) (sctx->count * 8);
196 211
197 len = sctx->count & (SHA256_BLOCK_SIZE - 1); 212 len = sctx->count & (SHA256_BLOCK_SIZE - 1);
198 rc = nx_sha_build_sg_list(nx_ctx, nx_ctx->in_sg, 213 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) sctx->buf,
199 &nx_ctx->op.inlen, 214 &len, max_sg_len);
200 &len,
201 (u8 *) sctx->buf,
202 NX_DS_SHA256);
203 215
204 if (rc || len != (sctx->count & (SHA256_BLOCK_SIZE - 1))) 216 if (len != (sctx->count & (SHA256_BLOCK_SIZE - 1))) {
217 rc = -EINVAL;
205 goto out; 218 goto out;
219 }
206 220
207 len = SHA256_DIGEST_SIZE; 221 len = SHA256_DIGEST_SIZE;
208 rc = nx_sha_build_sg_list(nx_ctx, nx_ctx->out_sg, 222 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, max_sg_len);
209 &nx_ctx->op.outlen,
210 &len,
211 out,
212 NX_DS_SHA256);
213 223
214 if (rc || len != SHA256_DIGEST_SIZE) 224 if (len != SHA256_DIGEST_SIZE) {
225 rc = -EINVAL;
215 goto out; 226 goto out;
227 }
216 228
229 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
230 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg);
217 if (!nx_ctx->op.outlen) { 231 if (!nx_ctx->op.outlen) {
218 rc = -EINVAL; 232 rc = -EINVAL;
219 goto out; 233 goto out;
diff --git a/drivers/crypto/nx/nx-sha512.c b/drivers/crypto/nx/nx-sha512.c
index b3adf1022673..e6a58d2ee628 100644
--- a/drivers/crypto/nx/nx-sha512.c
+++ b/drivers/crypto/nx/nx-sha512.c
@@ -32,8 +32,9 @@ static int nx_sha512_init(struct shash_desc *desc)
32{ 32{
33 struct sha512_state *sctx = shash_desc_ctx(desc); 33 struct sha512_state *sctx = shash_desc_ctx(desc);
34 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); 34 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
35 struct nx_sg *out_sg;
35 int len; 36 int len;
36 int rc; 37 u32 max_sg_len;
37 38
38 nx_ctx_init(nx_ctx, HCOP_FC_SHA); 39 nx_ctx_init(nx_ctx, HCOP_FC_SHA);
39 40
@@ -43,15 +44,18 @@ static int nx_sha512_init(struct shash_desc *desc)
43 44
44 NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA512); 45 NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA512);
45 46
47 max_sg_len = min_t(u64, nx_ctx->ap->sglen,
48 nx_driver.of.max_sg_len/sizeof(struct nx_sg));
49 max_sg_len = min_t(u64, max_sg_len,
50 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
51
46 len = SHA512_DIGEST_SIZE; 52 len = SHA512_DIGEST_SIZE;
47 rc = nx_sha_build_sg_list(nx_ctx, nx_ctx->out_sg, 53 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state,
48 &nx_ctx->op.outlen, 54 &len, max_sg_len);
49 &len, 55 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg);
50 (u8 *)sctx->state,
51 NX_DS_SHA512);
52 56
53 if (rc || len != SHA512_DIGEST_SIZE) 57 if (len != SHA512_DIGEST_SIZE)
54 goto out; 58 return -EINVAL;
55 59
56 sctx->state[0] = __cpu_to_be64(SHA512_H0); 60 sctx->state[0] = __cpu_to_be64(SHA512_H0);
57 sctx->state[1] = __cpu_to_be64(SHA512_H1); 61 sctx->state[1] = __cpu_to_be64(SHA512_H1);
@@ -63,7 +67,6 @@ static int nx_sha512_init(struct shash_desc *desc)
63 sctx->state[7] = __cpu_to_be64(SHA512_H7); 67 sctx->state[7] = __cpu_to_be64(SHA512_H7);
64 sctx->count[0] = 0; 68 sctx->count[0] = 0;
65 69
66out:
67 return 0; 70 return 0;
68} 71}
69 72
@@ -73,10 +76,12 @@ static int nx_sha512_update(struct shash_desc *desc, const u8 *data,
73 struct sha512_state *sctx = shash_desc_ctx(desc); 76 struct sha512_state *sctx = shash_desc_ctx(desc);
74 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); 77 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
75 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; 78 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb;
79 struct nx_sg *in_sg;
76 u64 to_process, leftover = 0, total; 80 u64 to_process, leftover = 0, total;
77 unsigned long irq_flags; 81 unsigned long irq_flags;
78 int rc = 0; 82 int rc = 0;
79 int data_len; 83 int data_len;
84 u32 max_sg_len;
80 u64 buf_len = (sctx->count[0] % SHA512_BLOCK_SIZE); 85 u64 buf_len = (sctx->count[0] % SHA512_BLOCK_SIZE);
81 86
82 spin_lock_irqsave(&nx_ctx->lock, irq_flags); 87 spin_lock_irqsave(&nx_ctx->lock, irq_flags);
@@ -96,6 +101,12 @@ static int nx_sha512_update(struct shash_desc *desc, const u8 *data,
96 NX_CPB_FDM(csbcpb) |= NX_FDM_INTERMEDIATE; 101 NX_CPB_FDM(csbcpb) |= NX_FDM_INTERMEDIATE;
97 NX_CPB_FDM(csbcpb) |= NX_FDM_CONTINUATION; 102 NX_CPB_FDM(csbcpb) |= NX_FDM_CONTINUATION;
98 103
104 in_sg = nx_ctx->in_sg;
105 max_sg_len = min_t(u64, nx_ctx->ap->sglen,
106 nx_driver.of.max_sg_len/sizeof(struct nx_sg));
107 max_sg_len = min_t(u64, max_sg_len,
108 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
109
99 do { 110 do {
100 /* 111 /*
101 * to_process: the SHA512_BLOCK_SIZE data chunk to process in 112 * to_process: the SHA512_BLOCK_SIZE data chunk to process in
@@ -108,25 +119,26 @@ static int nx_sha512_update(struct shash_desc *desc, const u8 *data,
108 119
109 if (buf_len) { 120 if (buf_len) {
110 data_len = buf_len; 121 data_len = buf_len;
111 rc = nx_sha_build_sg_list(nx_ctx, nx_ctx->in_sg, 122 in_sg = nx_build_sg_list(nx_ctx->in_sg,
112 &nx_ctx->op.inlen, 123 (u8 *) sctx->buf,
113 &data_len, 124 &data_len, max_sg_len);
114 (u8 *) sctx->buf,
115 NX_DS_SHA512);
116 125
117 if (rc || data_len != buf_len) 126 if (data_len != buf_len) {
127 rc = -EINVAL;
118 goto out; 128 goto out;
129 }
119 } 130 }
120 131
121 data_len = to_process - buf_len; 132 data_len = to_process - buf_len;
122 rc = nx_sha_build_sg_list(nx_ctx, nx_ctx->in_sg, 133 in_sg = nx_build_sg_list(in_sg, (u8 *) data,
123 &nx_ctx->op.inlen, 134 &data_len, max_sg_len);
124 &data_len,
125 (u8 *) data,
126 NX_DS_SHA512);
127 135
128 if (rc || data_len != (to_process - buf_len)) 136 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
137
138 if (data_len != (to_process - buf_len)) {
139 rc = -EINVAL;
129 goto out; 140 goto out;
141 }
130 142
131 to_process = (data_len + buf_len); 143 to_process = (data_len + buf_len);
132 leftover = total - to_process; 144 leftover = total - to_process;
@@ -172,13 +184,20 @@ static int nx_sha512_final(struct shash_desc *desc, u8 *out)
172 struct sha512_state *sctx = shash_desc_ctx(desc); 184 struct sha512_state *sctx = shash_desc_ctx(desc);
173 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); 185 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
174 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; 186 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb;
187 struct nx_sg *in_sg, *out_sg;
188 u32 max_sg_len;
175 u64 count0; 189 u64 count0;
176 unsigned long irq_flags; 190 unsigned long irq_flags;
177 int rc; 191 int rc = 0;
178 int len; 192 int len;
179 193
180 spin_lock_irqsave(&nx_ctx->lock, irq_flags); 194 spin_lock_irqsave(&nx_ctx->lock, irq_flags);
181 195
196 max_sg_len = min_t(u64, nx_ctx->ap->sglen,
197 nx_driver.of.max_sg_len/sizeof(struct nx_sg));
198 max_sg_len = min_t(u64, max_sg_len,
199 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
200
182 /* final is represented by continuing the operation and indicating that 201 /* final is represented by continuing the operation and indicating that
183 * this is not an intermediate operation */ 202 * this is not an intermediate operation */
184 if (sctx->count[0] >= SHA512_BLOCK_SIZE) { 203 if (sctx->count[0] >= SHA512_BLOCK_SIZE) {
@@ -200,24 +219,20 @@ static int nx_sha512_final(struct shash_desc *desc, u8 *out)
200 csbcpb->cpb.sha512.message_bit_length_lo = count0; 219 csbcpb->cpb.sha512.message_bit_length_lo = count0;
201 220
202 len = sctx->count[0] & (SHA512_BLOCK_SIZE - 1); 221 len = sctx->count[0] & (SHA512_BLOCK_SIZE - 1);
203 rc = nx_sha_build_sg_list(nx_ctx, nx_ctx->in_sg, 222 in_sg = nx_build_sg_list(nx_ctx->in_sg, sctx->buf, &len,
204 &nx_ctx->op.inlen, 223 max_sg_len);
205 &len,
206 (u8 *)sctx->buf,
207 NX_DS_SHA512);
208 224
209 if (rc || len != (sctx->count[0] & (SHA512_BLOCK_SIZE - 1))) 225 if (len != (sctx->count[0] & (SHA512_BLOCK_SIZE - 1))) {
226 rc = -EINVAL;
210 goto out; 227 goto out;
228 }
211 229
212 len = SHA512_DIGEST_SIZE; 230 len = SHA512_DIGEST_SIZE;
213 rc = nx_sha_build_sg_list(nx_ctx, nx_ctx->out_sg, 231 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len,
214 &nx_ctx->op.outlen, 232 max_sg_len);
215 &len,
216 out,
217 NX_DS_SHA512);
218 233
219 if (rc) 234 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
220 goto out; 235 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg);
221 236
222 if (!nx_ctx->op.outlen) { 237 if (!nx_ctx->op.outlen) {
223 rc = -EINVAL; 238 rc = -EINVAL;
diff --git a/drivers/crypto/nx/nx.c b/drivers/crypto/nx/nx.c
index 4856f7287eae..2e2529ce8d31 100644
--- a/drivers/crypto/nx/nx.c
+++ b/drivers/crypto/nx/nx.c
@@ -252,53 +252,6 @@ static long int trim_sg_list(struct nx_sg *sg,
252} 252}
253 253
254/** 254/**
255 * nx_sha_build_sg_list - walk and build sg list to sha modes
256 * using right bounds and limits.
257 * @nx_ctx: NX crypto context for the lists we're building
258 * @nx_sg: current sg list in or out list
259 * @op_len: current op_len to be used in order to build a sg list
260 * @nbytes: number or bytes to be processed
261 * @offset: buf offset
262 * @mode: SHA256 or SHA512
263 */
264int nx_sha_build_sg_list(struct nx_crypto_ctx *nx_ctx,
265 struct nx_sg *nx_in_outsg,
266 s64 *op_len,
267 unsigned int *nbytes,
268 u8 *offset,
269 u32 mode)
270{
271 unsigned int delta = 0;
272 unsigned int total = *nbytes;
273 struct nx_sg *nx_insg = nx_in_outsg;
274 unsigned int max_sg_len;
275
276 max_sg_len = min_t(u64, nx_ctx->ap->sglen,
277 nx_driver.of.max_sg_len/sizeof(struct nx_sg));
278 max_sg_len = min_t(u64, max_sg_len,
279 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
280
281 *nbytes = min_t(u64, *nbytes, nx_ctx->ap->databytelen);
282 nx_insg = nx_build_sg_list(nx_insg, offset, nbytes, max_sg_len);
283
284 switch (mode) {
285 case NX_DS_SHA256:
286 if (*nbytes < total)
287 delta = *nbytes - (*nbytes & ~(SHA256_BLOCK_SIZE - 1));
288 break;
289 case NX_DS_SHA512:
290 if (*nbytes < total)
291 delta = *nbytes - (*nbytes & ~(SHA512_BLOCK_SIZE - 1));
292 break;
293 default:
294 return -EINVAL;
295 }
296 *op_len = trim_sg_list(nx_in_outsg, nx_insg, delta);
297
298 return 0;
299}
300
301/**
302 * nx_build_sg_lists - walk the input scatterlists and build arrays of NX 255 * nx_build_sg_lists - walk the input scatterlists and build arrays of NX
303 * scatterlists based on them. 256 * scatterlists based on them.
304 * 257 *
diff --git a/drivers/crypto/nx/nx.h b/drivers/crypto/nx/nx.h
index 6c9ecaaead52..41b87ee03fe2 100644
--- a/drivers/crypto/nx/nx.h
+++ b/drivers/crypto/nx/nx.h
@@ -153,8 +153,6 @@ void nx_crypto_ctx_exit(struct crypto_tfm *tfm);
153void nx_ctx_init(struct nx_crypto_ctx *nx_ctx, unsigned int function); 153void nx_ctx_init(struct nx_crypto_ctx *nx_ctx, unsigned int function);
154int nx_hcall_sync(struct nx_crypto_ctx *ctx, struct vio_pfo_op *op, 154int nx_hcall_sync(struct nx_crypto_ctx *ctx, struct vio_pfo_op *op,
155 u32 may_sleep); 155 u32 may_sleep);
156int nx_sha_build_sg_list(struct nx_crypto_ctx *, struct nx_sg *,
157 s64 *, unsigned int *, u8 *, u32);
158struct nx_sg *nx_build_sg_list(struct nx_sg *, u8 *, unsigned int *, u32); 156struct nx_sg *nx_build_sg_list(struct nx_sg *, u8 *, unsigned int *, u32);
159int nx_build_sg_lists(struct nx_crypto_ctx *, struct blkcipher_desc *, 157int nx_build_sg_lists(struct nx_crypto_ctx *, struct blkcipher_desc *,
160 struct scatterlist *, struct scatterlist *, unsigned int *, 158 struct scatterlist *, struct scatterlist *, unsigned int *,