aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto/nx
diff options
context:
space:
mode:
authorKent Yoder <key@linux.vnet.ibm.com>2012-05-14 07:05:59 -0400
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>2012-05-16 01:05:43 -0400
commitf2a15f1d5d36004236dab8184593aa8eef3949ae (patch)
tree8ff9d2d7ff5e70e1eb593380f2809633bb8a0e9c /drivers/crypto/nx
parentcc6250896a914f5604cbc3c0fbd2c5650972a7a6 (diff)
powerpc/crypto: AES-GCM mode routines for nx encryption
These routines add support for AES in GCM mode on the Power7+ CPU's in-Nest accelerator driver. Signed-off-by: Kent Yoder <key@linux.vnet.ibm.com> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'drivers/crypto/nx')
-rw-r--r--drivers/crypto/nx/nx-aes-gcm.c353
1 files changed, 353 insertions, 0 deletions
diff --git a/drivers/crypto/nx/nx-aes-gcm.c b/drivers/crypto/nx/nx-aes-gcm.c
new file mode 100644
index 000000000000..9ab1c7341dac
--- /dev/null
+++ b/drivers/crypto/nx/nx-aes-gcm.c
@@ -0,0 +1,353 @@
1/**
2 * AES GCM routines supporting the Power 7+ Nest Accelerators driver
3 *
4 * Copyright (C) 2012 International Business Machines Inc.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 only.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18 *
19 * Author: Kent Yoder <yoder1@us.ibm.com>
20 */
21
22#include <crypto/internal/aead.h>
23#include <crypto/aes.h>
24#include <crypto/algapi.h>
25#include <crypto/scatterwalk.h>
26#include <linux/module.h>
27#include <linux/types.h>
28#include <linux/crypto.h>
29#include <asm/vio.h>
30
31#include "nx_csbcpb.h"
32#include "nx.h"
33
34
35static int gcm_aes_nx_set_key(struct crypto_aead *tfm,
36 const u8 *in_key,
37 unsigned int key_len)
38{
39 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base);
40 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
41 struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead;
42
43 nx_ctx_init(nx_ctx, HCOP_FC_AES);
44
45 switch (key_len) {
46 case AES_KEYSIZE_128:
47 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128);
48 NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_128);
49 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128];
50 break;
51 case AES_KEYSIZE_192:
52 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_192);
53 NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_192);
54 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192];
55 break;
56 case AES_KEYSIZE_256:
57 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_256);
58 NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_256);
59 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256];
60 break;
61 default:
62 return -EINVAL;
63 }
64
65 csbcpb->cpb.hdr.mode = NX_MODE_AES_GCM;
66 memcpy(csbcpb->cpb.aes_gcm.key, in_key, key_len);
67
68 csbcpb_aead->cpb.hdr.mode = NX_MODE_AES_GCA;
69 memcpy(csbcpb_aead->cpb.aes_gca.key, in_key, key_len);
70
71 return 0;
72}
73
74static int gcm4106_aes_nx_set_key(struct crypto_aead *tfm,
75 const u8 *in_key,
76 unsigned int key_len)
77{
78 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base);
79 char *nonce = nx_ctx->priv.gcm.nonce;
80 int rc;
81
82 if (key_len < 4)
83 return -EINVAL;
84
85 key_len -= 4;
86
87 rc = gcm_aes_nx_set_key(tfm, in_key, key_len);
88 if (rc)
89 goto out;
90
91 memcpy(nonce, in_key + key_len, 4);
92out:
93 return rc;
94}
95
96static int gcm_aes_nx_setauthsize(struct crypto_aead *tfm,
97 unsigned int authsize)
98{
99 if (authsize > crypto_aead_alg(tfm)->maxauthsize)
100 return -EINVAL;
101
102 crypto_aead_crt(tfm)->authsize = authsize;
103
104 return 0;
105}
106
107static int gcm4106_aes_nx_setauthsize(struct crypto_aead *tfm,
108 unsigned int authsize)
109{
110 switch (authsize) {
111 case 8:
112 case 12:
113 case 16:
114 break;
115 default:
116 return -EINVAL;
117 }
118
119 crypto_aead_crt(tfm)->authsize = authsize;
120
121 return 0;
122}
123
124static int nx_gca(struct nx_crypto_ctx *nx_ctx,
125 struct aead_request *req,
126 u8 *out)
127{
128 struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead;
129 int rc = -EINVAL;
130 struct scatter_walk walk;
131 struct nx_sg *nx_sg = nx_ctx->in_sg;
132
133 if (req->assoclen > nx_ctx->ap->databytelen)
134 goto out;
135
136 if (req->assoclen <= AES_BLOCK_SIZE) {
137 scatterwalk_start(&walk, req->assoc);
138 scatterwalk_copychunks(out, &walk, req->assoclen,
139 SCATTERWALK_FROM_SG);
140 scatterwalk_done(&walk, SCATTERWALK_FROM_SG, 0);
141
142 rc = 0;
143 goto out;
144 }
145
146 nx_sg = nx_walk_and_build(nx_sg, nx_ctx->ap->sglen, req->assoc, 0,
147 req->assoclen);
148 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg) * sizeof(struct nx_sg);
149
150 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op_aead,
151 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
152 if (rc)
153 goto out;
154
155 atomic_inc(&(nx_ctx->stats->aes_ops));
156 atomic64_add(req->assoclen, &(nx_ctx->stats->aes_bytes));
157
158 memcpy(out, csbcpb_aead->cpb.aes_gca.out_pat, AES_BLOCK_SIZE);
159out:
160 return rc;
161}
162
163static int gcm_aes_nx_crypt(struct aead_request *req, int enc)
164{
165 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
166 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
167 struct blkcipher_desc desc;
168 unsigned int nbytes = req->cryptlen;
169 int rc = -EINVAL;
170
171 if (nbytes > nx_ctx->ap->databytelen)
172 goto out;
173
174 desc.info = nx_ctx->priv.gcm.iv;
175 /* initialize the counter */
176 *(u32 *)(desc.info + NX_GCM_CTR_OFFSET) = 1;
177
178 /* For scenarios where the input message is zero length, AES CTR mode
179 * may be used. Set the source data to be a single block (16B) of all
180 * zeros, and set the input IV value to be the same as the GMAC IV
181 * value. - nx_wb 4.8.1.3 */
182 if (nbytes == 0) {
183 char src[AES_BLOCK_SIZE] = {};
184 struct scatterlist sg;
185
186 desc.tfm = crypto_alloc_blkcipher("ctr(aes)", 0, 0);
187 if (IS_ERR(desc.tfm)) {
188 rc = -ENOMEM;
189 goto out;
190 }
191
192 crypto_blkcipher_setkey(desc.tfm, csbcpb->cpb.aes_gcm.key,
193 NX_CPB_KEY_SIZE(csbcpb) == NX_KS_AES_128 ? 16 :
194 NX_CPB_KEY_SIZE(csbcpb) == NX_KS_AES_192 ? 24 : 32);
195
196 sg_init_one(&sg, src, AES_BLOCK_SIZE);
197 if (enc)
198 crypto_blkcipher_encrypt_iv(&desc, req->dst, &sg,
199 AES_BLOCK_SIZE);
200 else
201 crypto_blkcipher_decrypt_iv(&desc, req->dst, &sg,
202 AES_BLOCK_SIZE);
203 crypto_free_blkcipher(desc.tfm);
204
205 rc = 0;
206 goto out;
207 }
208
209 desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
210
211 csbcpb->cpb.aes_gcm.bit_length_aad = req->assoclen * 8;
212
213 if (req->assoclen) {
214 rc = nx_gca(nx_ctx, req, csbcpb->cpb.aes_gcm.in_pat_or_aad);
215 if (rc)
216 goto out;
217 }
218
219 if (enc)
220 NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT;
221 else
222 nbytes -= AES_BLOCK_SIZE;
223
224 csbcpb->cpb.aes_gcm.bit_length_data = nbytes * 8;
225
226 rc = nx_build_sg_lists(nx_ctx, &desc, req->dst, req->src, nbytes,
227 csbcpb->cpb.aes_gcm.iv_or_cnt);
228 if (rc)
229 goto out;
230
231 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
232 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
233 if (rc)
234 goto out;
235
236 atomic_inc(&(nx_ctx->stats->aes_ops));
237 atomic64_add(csbcpb->csb.processed_byte_count,
238 &(nx_ctx->stats->aes_bytes));
239
240 if (enc) {
241 /* copy out the auth tag */
242 scatterwalk_map_and_copy(csbcpb->cpb.aes_gcm.out_pat_or_mac,
243 req->dst, nbytes,
244 crypto_aead_authsize(crypto_aead_reqtfm(req)),
245 SCATTERWALK_TO_SG);
246 } else if (req->assoclen) {
247 u8 *itag = nx_ctx->priv.gcm.iauth_tag;
248 u8 *otag = csbcpb->cpb.aes_gcm.out_pat_or_mac;
249
250 scatterwalk_map_and_copy(itag, req->dst, nbytes,
251 crypto_aead_authsize(crypto_aead_reqtfm(req)),
252 SCATTERWALK_FROM_SG);
253 rc = memcmp(itag, otag,
254 crypto_aead_authsize(crypto_aead_reqtfm(req))) ?
255 -EBADMSG : 0;
256 }
257out:
258 return rc;
259}
260
261static int gcm_aes_nx_encrypt(struct aead_request *req)
262{
263 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
264 char *iv = nx_ctx->priv.gcm.iv;
265
266 memcpy(iv, req->iv, 12);
267
268 return gcm_aes_nx_crypt(req, 1);
269}
270
271static int gcm_aes_nx_decrypt(struct aead_request *req)
272{
273 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
274 char *iv = nx_ctx->priv.gcm.iv;
275
276 memcpy(iv, req->iv, 12);
277
278 return gcm_aes_nx_crypt(req, 0);
279}
280
281static int gcm4106_aes_nx_encrypt(struct aead_request *req)
282{
283 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
284 char *iv = nx_ctx->priv.gcm.iv;
285 char *nonce = nx_ctx->priv.gcm.nonce;
286
287 memcpy(iv, nonce, NX_GCM4106_NONCE_LEN);
288 memcpy(iv + NX_GCM4106_NONCE_LEN, req->iv, 8);
289
290 return gcm_aes_nx_crypt(req, 1);
291}
292
293static int gcm4106_aes_nx_decrypt(struct aead_request *req)
294{
295 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
296 char *iv = nx_ctx->priv.gcm.iv;
297 char *nonce = nx_ctx->priv.gcm.nonce;
298
299 memcpy(iv, nonce, NX_GCM4106_NONCE_LEN);
300 memcpy(iv + NX_GCM4106_NONCE_LEN, req->iv, 8);
301
302 return gcm_aes_nx_crypt(req, 0);
303}
304
305/* tell the block cipher walk routines that this is a stream cipher by
306 * setting cra_blocksize to 1. Even using blkcipher_walk_virt_block
307 * during encrypt/decrypt doesn't solve this problem, because it calls
308 * blkcipher_walk_done under the covers, which doesn't use walk->blocksize,
309 * but instead uses this tfm->blocksize. */
310struct crypto_alg nx_gcm_aes_alg = {
311 .cra_name = "gcm(aes)",
312 .cra_driver_name = "gcm-aes-nx",
313 .cra_priority = 300,
314 .cra_flags = CRYPTO_ALG_TYPE_AEAD,
315 .cra_blocksize = 1,
316 .cra_ctxsize = sizeof(struct nx_crypto_ctx),
317 .cra_type = &crypto_aead_type,
318 .cra_module = THIS_MODULE,
319 .cra_list = LIST_HEAD_INIT(nx_gcm_aes_alg.cra_list),
320 .cra_init = nx_crypto_ctx_aes_gcm_init,
321 .cra_exit = nx_crypto_ctx_exit,
322 .cra_aead = {
323 .ivsize = AES_BLOCK_SIZE,
324 .maxauthsize = AES_BLOCK_SIZE,
325 .setkey = gcm_aes_nx_set_key,
326 .setauthsize = gcm_aes_nx_setauthsize,
327 .encrypt = gcm_aes_nx_encrypt,
328 .decrypt = gcm_aes_nx_decrypt,
329 }
330};
331
332struct crypto_alg nx_gcm4106_aes_alg = {
333 .cra_name = "rfc4106(gcm(aes))",
334 .cra_driver_name = "rfc4106-gcm-aes-nx",
335 .cra_priority = 300,
336 .cra_flags = CRYPTO_ALG_TYPE_AEAD,
337 .cra_blocksize = 1,
338 .cra_ctxsize = sizeof(struct nx_crypto_ctx),
339 .cra_type = &crypto_nivaead_type,
340 .cra_module = THIS_MODULE,
341 .cra_list = LIST_HEAD_INIT(nx_gcm4106_aes_alg.cra_list),
342 .cra_init = nx_crypto_ctx_aes_gcm_init,
343 .cra_exit = nx_crypto_ctx_exit,
344 .cra_aead = {
345 .ivsize = 8,
346 .maxauthsize = AES_BLOCK_SIZE,
347 .geniv = "seqiv",
348 .setkey = gcm4106_aes_nx_set_key,
349 .setauthsize = gcm4106_aes_nx_setauthsize,
350 .encrypt = gcm4106_aes_nx_encrypt,
351 .decrypt = gcm4106_aes_nx_decrypt,
352 }
353};