diff options
-rw-r--r-- | drivers/crypto/vmx/aes.c | 166 | ||||
-rw-r--r-- | drivers/crypto/vmx/aes_cbc.c | 236 | ||||
-rw-r--r-- | drivers/crypto/vmx/aes_ctr.c | 225 | ||||
-rw-r--r-- | drivers/crypto/vmx/aesp8-ppc.h | 15 | ||||
-rw-r--r-- | drivers/crypto/vmx/ghash.c | 278 | ||||
-rw-r--r-- | drivers/crypto/vmx/vmx.c | 68 |
6 files changed, 506 insertions, 482 deletions
diff --git a/drivers/crypto/vmx/aes.c b/drivers/crypto/vmx/aes.c index ab300ea19434..023e5f014783 100644 --- a/drivers/crypto/vmx/aes.c +++ b/drivers/crypto/vmx/aes.c | |||
@@ -30,110 +30,112 @@ | |||
30 | #include "aesp8-ppc.h" | 30 | #include "aesp8-ppc.h" |
31 | 31 | ||
32 | struct p8_aes_ctx { | 32 | struct p8_aes_ctx { |
33 | struct crypto_cipher *fallback; | 33 | struct crypto_cipher *fallback; |
34 | struct aes_key enc_key; | 34 | struct aes_key enc_key; |
35 | struct aes_key dec_key; | 35 | struct aes_key dec_key; |
36 | }; | 36 | }; |
37 | 37 | ||
38 | static int p8_aes_init(struct crypto_tfm *tfm) | 38 | static int p8_aes_init(struct crypto_tfm *tfm) |
39 | { | 39 | { |
40 | const char *alg; | 40 | const char *alg; |
41 | struct crypto_cipher *fallback; | 41 | struct crypto_cipher *fallback; |
42 | struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); | 42 | struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
43 | 43 | ||
44 | if (!(alg = crypto_tfm_alg_name(tfm))) { | 44 | if (!(alg = crypto_tfm_alg_name(tfm))) { |
45 | printk(KERN_ERR "Failed to get algorithm name.\n"); | 45 | printk(KERN_ERR "Failed to get algorithm name.\n"); |
46 | return -ENOENT; | 46 | return -ENOENT; |
47 | } | 47 | } |
48 | 48 | ||
49 | fallback = crypto_alloc_cipher(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK); | 49 | fallback = crypto_alloc_cipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); |
50 | if (IS_ERR(fallback)) { | 50 | if (IS_ERR(fallback)) { |
51 | printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n", | 51 | printk(KERN_ERR |
52 | alg, PTR_ERR(fallback)); | 52 | "Failed to allocate transformation for '%s': %ld\n", |
53 | return PTR_ERR(fallback); | 53 | alg, PTR_ERR(fallback)); |
54 | } | 54 | return PTR_ERR(fallback); |
55 | printk(KERN_INFO "Using '%s' as fallback implementation.\n", | 55 | } |
56 | crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); | 56 | printk(KERN_INFO "Using '%s' as fallback implementation.\n", |
57 | 57 | crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); | |
58 | crypto_cipher_set_flags(fallback, | 58 | |
59 | crypto_cipher_get_flags((struct crypto_cipher *) tfm)); | 59 | crypto_cipher_set_flags(fallback, |
60 | ctx->fallback = fallback; | 60 | crypto_cipher_get_flags((struct |
61 | 61 | crypto_cipher *) | |
62 | return 0; | 62 | tfm)); |
63 | ctx->fallback = fallback; | ||
64 | |||
65 | return 0; | ||
63 | } | 66 | } |
64 | 67 | ||
65 | static void p8_aes_exit(struct crypto_tfm *tfm) | 68 | static void p8_aes_exit(struct crypto_tfm *tfm) |
66 | { | 69 | { |
67 | struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); | 70 | struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
68 | 71 | ||
69 | if (ctx->fallback) { | 72 | if (ctx->fallback) { |
70 | crypto_free_cipher(ctx->fallback); | 73 | crypto_free_cipher(ctx->fallback); |
71 | ctx->fallback = NULL; | 74 | ctx->fallback = NULL; |
72 | } | 75 | } |
73 | } | 76 | } |
74 | 77 | ||
75 | static int p8_aes_setkey(struct crypto_tfm *tfm, const u8 *key, | 78 | static int p8_aes_setkey(struct crypto_tfm *tfm, const u8 *key, |
76 | unsigned int keylen) | 79 | unsigned int keylen) |
77 | { | 80 | { |
78 | int ret; | 81 | int ret; |
79 | struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); | 82 | struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
80 | 83 | ||
81 | pagefault_disable(); | 84 | pagefault_disable(); |
82 | enable_kernel_altivec(); | 85 | enable_kernel_altivec(); |
83 | ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); | 86 | ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); |
84 | ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); | 87 | ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); |
85 | pagefault_enable(); | 88 | pagefault_enable(); |
86 | 89 | ||
87 | ret += crypto_cipher_setkey(ctx->fallback, key, keylen); | 90 | ret += crypto_cipher_setkey(ctx->fallback, key, keylen); |
88 | return ret; | 91 | return ret; |
89 | } | 92 | } |
90 | 93 | ||
91 | static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | 94 | static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
92 | { | 95 | { |
93 | struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); | 96 | struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
94 | 97 | ||
95 | if (in_interrupt()) { | 98 | if (in_interrupt()) { |
96 | crypto_cipher_encrypt_one(ctx->fallback, dst, src); | 99 | crypto_cipher_encrypt_one(ctx->fallback, dst, src); |
97 | } else { | 100 | } else { |
98 | pagefault_disable(); | 101 | pagefault_disable(); |
99 | enable_kernel_altivec(); | 102 | enable_kernel_altivec(); |
100 | aes_p8_encrypt(src, dst, &ctx->enc_key); | 103 | aes_p8_encrypt(src, dst, &ctx->enc_key); |
101 | pagefault_enable(); | 104 | pagefault_enable(); |
102 | } | 105 | } |
103 | } | 106 | } |
104 | 107 | ||
105 | static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | 108 | static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
106 | { | 109 | { |
107 | struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); | 110 | struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
108 | 111 | ||
109 | if (in_interrupt()) { | 112 | if (in_interrupt()) { |
110 | crypto_cipher_decrypt_one(ctx->fallback, dst, src); | 113 | crypto_cipher_decrypt_one(ctx->fallback, dst, src); |
111 | } else { | 114 | } else { |
112 | pagefault_disable(); | 115 | pagefault_disable(); |
113 | enable_kernel_altivec(); | 116 | enable_kernel_altivec(); |
114 | aes_p8_decrypt(src, dst, &ctx->dec_key); | 117 | aes_p8_decrypt(src, dst, &ctx->dec_key); |
115 | pagefault_enable(); | 118 | pagefault_enable(); |
116 | } | 119 | } |
117 | } | 120 | } |
118 | 121 | ||
119 | struct crypto_alg p8_aes_alg = { | 122 | struct crypto_alg p8_aes_alg = { |
120 | .cra_name = "aes", | 123 | .cra_name = "aes", |
121 | .cra_driver_name = "p8_aes", | 124 | .cra_driver_name = "p8_aes", |
122 | .cra_module = THIS_MODULE, | 125 | .cra_module = THIS_MODULE, |
123 | .cra_priority = 1000, | 126 | .cra_priority = 1000, |
124 | .cra_type = NULL, | 127 | .cra_type = NULL, |
125 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_NEED_FALLBACK, | 128 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_NEED_FALLBACK, |
126 | .cra_alignmask = 0, | 129 | .cra_alignmask = 0, |
127 | .cra_blocksize = AES_BLOCK_SIZE, | 130 | .cra_blocksize = AES_BLOCK_SIZE, |
128 | .cra_ctxsize = sizeof(struct p8_aes_ctx), | 131 | .cra_ctxsize = sizeof(struct p8_aes_ctx), |
129 | .cra_init = p8_aes_init, | 132 | .cra_init = p8_aes_init, |
130 | .cra_exit = p8_aes_exit, | 133 | .cra_exit = p8_aes_exit, |
131 | .cra_cipher = { | 134 | .cra_cipher = { |
132 | .cia_min_keysize = AES_MIN_KEY_SIZE, | 135 | .cia_min_keysize = AES_MIN_KEY_SIZE, |
133 | .cia_max_keysize = AES_MAX_KEY_SIZE, | 136 | .cia_max_keysize = AES_MAX_KEY_SIZE, |
134 | .cia_setkey = p8_aes_setkey, | 137 | .cia_setkey = p8_aes_setkey, |
135 | .cia_encrypt = p8_aes_encrypt, | 138 | .cia_encrypt = p8_aes_encrypt, |
136 | .cia_decrypt = p8_aes_decrypt, | 139 | .cia_decrypt = p8_aes_decrypt, |
137 | }, | 140 | }, |
138 | }; | 141 | }; |
139 | |||
diff --git a/drivers/crypto/vmx/aes_cbc.c b/drivers/crypto/vmx/aes_cbc.c index 1a559b7dddb5..7120ab24d8c6 100644 --- a/drivers/crypto/vmx/aes_cbc.c +++ b/drivers/crypto/vmx/aes_cbc.c | |||
@@ -31,154 +31,162 @@ | |||
31 | #include "aesp8-ppc.h" | 31 | #include "aesp8-ppc.h" |
32 | 32 | ||
33 | struct p8_aes_cbc_ctx { | 33 | struct p8_aes_cbc_ctx { |
34 | struct crypto_blkcipher *fallback; | 34 | struct crypto_blkcipher *fallback; |
35 | struct aes_key enc_key; | 35 | struct aes_key enc_key; |
36 | struct aes_key dec_key; | 36 | struct aes_key dec_key; |
37 | }; | 37 | }; |
38 | 38 | ||
39 | static int p8_aes_cbc_init(struct crypto_tfm *tfm) | 39 | static int p8_aes_cbc_init(struct crypto_tfm *tfm) |
40 | { | 40 | { |
41 | const char *alg; | 41 | const char *alg; |
42 | struct crypto_blkcipher *fallback; | 42 | struct crypto_blkcipher *fallback; |
43 | struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); | 43 | struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); |
44 | 44 | ||
45 | if (!(alg = crypto_tfm_alg_name(tfm))) { | 45 | if (!(alg = crypto_tfm_alg_name(tfm))) { |
46 | printk(KERN_ERR "Failed to get algorithm name.\n"); | 46 | printk(KERN_ERR "Failed to get algorithm name.\n"); |
47 | return -ENOENT; | 47 | return -ENOENT; |
48 | } | 48 | } |
49 | 49 | ||
50 | fallback = crypto_alloc_blkcipher(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK); | 50 | fallback = |
51 | if (IS_ERR(fallback)) { | 51 | crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); |
52 | printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n", | 52 | if (IS_ERR(fallback)) { |
53 | alg, PTR_ERR(fallback)); | 53 | printk(KERN_ERR |
54 | return PTR_ERR(fallback); | 54 | "Failed to allocate transformation for '%s': %ld\n", |
55 | } | 55 | alg, PTR_ERR(fallback)); |
56 | printk(KERN_INFO "Using '%s' as fallback implementation.\n", | 56 | return PTR_ERR(fallback); |
57 | crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); | 57 | } |
58 | 58 | printk(KERN_INFO "Using '%s' as fallback implementation.\n", | |
59 | crypto_blkcipher_set_flags(fallback, | 59 | crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); |
60 | crypto_blkcipher_get_flags((struct crypto_blkcipher *) tfm)); | 60 | |
61 | ctx->fallback = fallback; | 61 | crypto_blkcipher_set_flags( |
62 | 62 | fallback, | |
63 | return 0; | 63 | crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm)); |
64 | ctx->fallback = fallback; | ||
65 | |||
66 | return 0; | ||
64 | } | 67 | } |
65 | 68 | ||
66 | static void p8_aes_cbc_exit(struct crypto_tfm *tfm) | 69 | static void p8_aes_cbc_exit(struct crypto_tfm *tfm) |
67 | { | 70 | { |
68 | struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); | 71 | struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); |
69 | 72 | ||
70 | if (ctx->fallback) { | 73 | if (ctx->fallback) { |
71 | crypto_free_blkcipher(ctx->fallback); | 74 | crypto_free_blkcipher(ctx->fallback); |
72 | ctx->fallback = NULL; | 75 | ctx->fallback = NULL; |
73 | } | 76 | } |
74 | } | 77 | } |
75 | 78 | ||
76 | static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key, | 79 | static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key, |
77 | unsigned int keylen) | 80 | unsigned int keylen) |
78 | { | 81 | { |
79 | int ret; | 82 | int ret; |
80 | struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); | 83 | struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); |
81 | 84 | ||
82 | pagefault_disable(); | 85 | pagefault_disable(); |
83 | enable_kernel_altivec(); | 86 | enable_kernel_altivec(); |
84 | ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); | 87 | ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); |
85 | ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); | 88 | ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); |
86 | pagefault_enable(); | 89 | pagefault_enable(); |
87 | 90 | ||
88 | ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); | 91 | ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); |
89 | return ret; | 92 | return ret; |
90 | } | 93 | } |
91 | 94 | ||
92 | static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc, | 95 | static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc, |
93 | struct scatterlist *dst, struct scatterlist *src, | 96 | struct scatterlist *dst, |
94 | unsigned int nbytes) | 97 | struct scatterlist *src, unsigned int nbytes) |
95 | { | 98 | { |
96 | int ret; | 99 | int ret; |
97 | struct blkcipher_walk walk; | 100 | struct blkcipher_walk walk; |
98 | struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx( | 101 | struct p8_aes_cbc_ctx *ctx = |
99 | crypto_blkcipher_tfm(desc->tfm)); | 102 | crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); |
100 | struct blkcipher_desc fallback_desc = { | 103 | struct blkcipher_desc fallback_desc = { |
101 | .tfm = ctx->fallback, | 104 | .tfm = ctx->fallback, |
102 | .info = desc->info, | 105 | .info = desc->info, |
103 | .flags = desc->flags | 106 | .flags = desc->flags |
104 | }; | 107 | }; |
105 | 108 | ||
106 | if (in_interrupt()) { | 109 | if (in_interrupt()) { |
107 | ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes); | 110 | ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, |
108 | } else { | 111 | nbytes); |
109 | pagefault_disable(); | 112 | } else { |
110 | enable_kernel_altivec(); | 113 | pagefault_disable(); |
111 | 114 | enable_kernel_altivec(); | |
112 | blkcipher_walk_init(&walk, dst, src, nbytes); | 115 | |
113 | ret = blkcipher_walk_virt(desc, &walk); | 116 | blkcipher_walk_init(&walk, dst, src, nbytes); |
114 | while ((nbytes = walk.nbytes)) { | 117 | ret = blkcipher_walk_virt(desc, &walk); |
115 | aes_p8_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr, | 118 | while ((nbytes = walk.nbytes)) { |
116 | nbytes & AES_BLOCK_MASK, &ctx->enc_key, walk.iv, 1); | 119 | aes_p8_cbc_encrypt(walk.src.virt.addr, |
120 | walk.dst.virt.addr, | ||
121 | nbytes & AES_BLOCK_MASK, | ||
122 | &ctx->enc_key, walk.iv, 1); | ||
117 | nbytes &= AES_BLOCK_SIZE - 1; | 123 | nbytes &= AES_BLOCK_SIZE - 1; |
118 | ret = blkcipher_walk_done(desc, &walk, nbytes); | 124 | ret = blkcipher_walk_done(desc, &walk, nbytes); |
119 | } | 125 | } |
120 | 126 | ||
121 | pagefault_enable(); | 127 | pagefault_enable(); |
122 | } | 128 | } |
123 | 129 | ||
124 | return ret; | 130 | return ret; |
125 | } | 131 | } |
126 | 132 | ||
127 | static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc, | 133 | static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc, |
128 | struct scatterlist *dst, struct scatterlist *src, | 134 | struct scatterlist *dst, |
129 | unsigned int nbytes) | 135 | struct scatterlist *src, unsigned int nbytes) |
130 | { | 136 | { |
131 | int ret; | 137 | int ret; |
132 | struct blkcipher_walk walk; | 138 | struct blkcipher_walk walk; |
133 | struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx( | 139 | struct p8_aes_cbc_ctx *ctx = |
134 | crypto_blkcipher_tfm(desc->tfm)); | 140 | crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); |
135 | struct blkcipher_desc fallback_desc = { | 141 | struct blkcipher_desc fallback_desc = { |
136 | .tfm = ctx->fallback, | 142 | .tfm = ctx->fallback, |
137 | .info = desc->info, | 143 | .info = desc->info, |
138 | .flags = desc->flags | 144 | .flags = desc->flags |
139 | }; | 145 | }; |
140 | 146 | ||
141 | if (in_interrupt()) { | 147 | if (in_interrupt()) { |
142 | ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src, nbytes); | 148 | ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src, |
143 | } else { | 149 | nbytes); |
144 | pagefault_disable(); | 150 | } else { |
145 | enable_kernel_altivec(); | 151 | pagefault_disable(); |
146 | 152 | enable_kernel_altivec(); | |
147 | blkcipher_walk_init(&walk, dst, src, nbytes); | 153 | |
148 | ret = blkcipher_walk_virt(desc, &walk); | 154 | blkcipher_walk_init(&walk, dst, src, nbytes); |
149 | while ((nbytes = walk.nbytes)) { | 155 | ret = blkcipher_walk_virt(desc, &walk); |
150 | aes_p8_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr, | 156 | while ((nbytes = walk.nbytes)) { |
151 | nbytes & AES_BLOCK_MASK, &ctx->dec_key, walk.iv, 0); | 157 | aes_p8_cbc_encrypt(walk.src.virt.addr, |
158 | walk.dst.virt.addr, | ||
159 | nbytes & AES_BLOCK_MASK, | ||
160 | &ctx->dec_key, walk.iv, 0); | ||
152 | nbytes &= AES_BLOCK_SIZE - 1; | 161 | nbytes &= AES_BLOCK_SIZE - 1; |
153 | ret = blkcipher_walk_done(desc, &walk, nbytes); | 162 | ret = blkcipher_walk_done(desc, &walk, nbytes); |
154 | } | 163 | } |
155 | 164 | ||
156 | pagefault_enable(); | 165 | pagefault_enable(); |
157 | } | 166 | } |
158 | 167 | ||
159 | return ret; | 168 | return ret; |
160 | } | 169 | } |
161 | 170 | ||
162 | 171 | ||
163 | struct crypto_alg p8_aes_cbc_alg = { | 172 | struct crypto_alg p8_aes_cbc_alg = { |
164 | .cra_name = "cbc(aes)", | 173 | .cra_name = "cbc(aes)", |
165 | .cra_driver_name = "p8_aes_cbc", | 174 | .cra_driver_name = "p8_aes_cbc", |
166 | .cra_module = THIS_MODULE, | 175 | .cra_module = THIS_MODULE, |
167 | .cra_priority = 1000, | 176 | .cra_priority = 1000, |
168 | .cra_type = &crypto_blkcipher_type, | 177 | .cra_type = &crypto_blkcipher_type, |
169 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, | 178 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, |
170 | .cra_alignmask = 0, | 179 | .cra_alignmask = 0, |
171 | .cra_blocksize = AES_BLOCK_SIZE, | 180 | .cra_blocksize = AES_BLOCK_SIZE, |
172 | .cra_ctxsize = sizeof(struct p8_aes_cbc_ctx), | 181 | .cra_ctxsize = sizeof(struct p8_aes_cbc_ctx), |
173 | .cra_init = p8_aes_cbc_init, | 182 | .cra_init = p8_aes_cbc_init, |
174 | .cra_exit = p8_aes_cbc_exit, | 183 | .cra_exit = p8_aes_cbc_exit, |
175 | .cra_blkcipher = { | 184 | .cra_blkcipher = { |
176 | .ivsize = 0, | 185 | .ivsize = 0, |
177 | .min_keysize = AES_MIN_KEY_SIZE, | 186 | .min_keysize = AES_MIN_KEY_SIZE, |
178 | .max_keysize = AES_MAX_KEY_SIZE, | 187 | .max_keysize = AES_MAX_KEY_SIZE, |
179 | .setkey = p8_aes_cbc_setkey, | 188 | .setkey = p8_aes_cbc_setkey, |
180 | .encrypt = p8_aes_cbc_encrypt, | 189 | .encrypt = p8_aes_cbc_encrypt, |
181 | .decrypt = p8_aes_cbc_decrypt, | 190 | .decrypt = p8_aes_cbc_decrypt, |
182 | }, | 191 | }, |
183 | }; | 192 | }; |
184 | |||
diff --git a/drivers/crypto/vmx/aes_ctr.c b/drivers/crypto/vmx/aes_ctr.c index 96dbee4bf4a6..7adae42a7b79 100644 --- a/drivers/crypto/vmx/aes_ctr.c +++ b/drivers/crypto/vmx/aes_ctr.c | |||
@@ -30,138 +30,147 @@ | |||
30 | #include "aesp8-ppc.h" | 30 | #include "aesp8-ppc.h" |
31 | 31 | ||
32 | struct p8_aes_ctr_ctx { | 32 | struct p8_aes_ctr_ctx { |
33 | struct crypto_blkcipher *fallback; | 33 | struct crypto_blkcipher *fallback; |
34 | struct aes_key enc_key; | 34 | struct aes_key enc_key; |
35 | }; | 35 | }; |
36 | 36 | ||
37 | static int p8_aes_ctr_init(struct crypto_tfm *tfm) | 37 | static int p8_aes_ctr_init(struct crypto_tfm *tfm) |
38 | { | 38 | { |
39 | const char *alg; | 39 | const char *alg; |
40 | struct crypto_blkcipher *fallback; | 40 | struct crypto_blkcipher *fallback; |
41 | struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); | 41 | struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); |
42 | 42 | ||
43 | if (!(alg = crypto_tfm_alg_name(tfm))) { | 43 | if (!(alg = crypto_tfm_alg_name(tfm))) { |
44 | printk(KERN_ERR "Failed to get algorithm name.\n"); | 44 | printk(KERN_ERR "Failed to get algorithm name.\n"); |
45 | return -ENOENT; | 45 | return -ENOENT; |
46 | } | 46 | } |
47 | 47 | ||
48 | fallback = crypto_alloc_blkcipher(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK); | 48 | fallback = |
49 | if (IS_ERR(fallback)) { | 49 | crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); |
50 | printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n", | 50 | if (IS_ERR(fallback)) { |
51 | alg, PTR_ERR(fallback)); | 51 | printk(KERN_ERR |
52 | return PTR_ERR(fallback); | 52 | "Failed to allocate transformation for '%s': %ld\n", |
53 | } | 53 | alg, PTR_ERR(fallback)); |
54 | printk(KERN_INFO "Using '%s' as fallback implementation.\n", | 54 | return PTR_ERR(fallback); |
55 | crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); | 55 | } |
56 | 56 | printk(KERN_INFO "Using '%s' as fallback implementation.\n", | |
57 | crypto_blkcipher_set_flags(fallback, | 57 | crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); |
58 | crypto_blkcipher_get_flags((struct crypto_blkcipher *) tfm)); | 58 | |
59 | ctx->fallback = fallback; | 59 | crypto_blkcipher_set_flags( |
60 | 60 | fallback, | |
61 | return 0; | 61 | crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm)); |
62 | ctx->fallback = fallback; | ||
63 | |||
64 | return 0; | ||
62 | } | 65 | } |
63 | 66 | ||
64 | static void p8_aes_ctr_exit(struct crypto_tfm *tfm) | 67 | static void p8_aes_ctr_exit(struct crypto_tfm *tfm) |
65 | { | 68 | { |
66 | struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); | 69 | struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); |
67 | 70 | ||
68 | if (ctx->fallback) { | 71 | if (ctx->fallback) { |
69 | crypto_free_blkcipher(ctx->fallback); | 72 | crypto_free_blkcipher(ctx->fallback); |
70 | ctx->fallback = NULL; | 73 | ctx->fallback = NULL; |
71 | } | 74 | } |
72 | } | 75 | } |
73 | 76 | ||
74 | static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key, | 77 | static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key, |
75 | unsigned int keylen) | 78 | unsigned int keylen) |
76 | { | 79 | { |
77 | int ret; | 80 | int ret; |
78 | struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); | 81 | struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); |
79 | 82 | ||
80 | pagefault_disable(); | 83 | pagefault_disable(); |
81 | enable_kernel_altivec(); | 84 | enable_kernel_altivec(); |
82 | ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); | 85 | ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); |
83 | pagefault_enable(); | 86 | pagefault_enable(); |
84 | 87 | ||
85 | ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); | 88 | ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); |
86 | return ret; | 89 | return ret; |
87 | } | 90 | } |
88 | 91 | ||
89 | static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx, | 92 | static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx, |
90 | struct blkcipher_walk *walk) | 93 | struct blkcipher_walk *walk) |
91 | { | 94 | { |
92 | u8 *ctrblk = walk->iv; | 95 | u8 *ctrblk = walk->iv; |
93 | u8 keystream[AES_BLOCK_SIZE]; | 96 | u8 keystream[AES_BLOCK_SIZE]; |
94 | u8 *src = walk->src.virt.addr; | 97 | u8 *src = walk->src.virt.addr; |
95 | u8 *dst = walk->dst.virt.addr; | 98 | u8 *dst = walk->dst.virt.addr; |
96 | unsigned int nbytes = walk->nbytes; | 99 | unsigned int nbytes = walk->nbytes; |
97 | 100 | ||
98 | pagefault_disable(); | 101 | pagefault_disable(); |
99 | enable_kernel_altivec(); | 102 | enable_kernel_altivec(); |
100 | aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key); | 103 | aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key); |
101 | pagefault_enable(); | 104 | pagefault_enable(); |
102 | 105 | ||
103 | crypto_xor(keystream, src, nbytes); | 106 | crypto_xor(keystream, src, nbytes); |
104 | memcpy(dst, keystream, nbytes); | 107 | memcpy(dst, keystream, nbytes); |
105 | crypto_inc(ctrblk, AES_BLOCK_SIZE); | 108 | crypto_inc(ctrblk, AES_BLOCK_SIZE); |
106 | } | 109 | } |
107 | 110 | ||
108 | static int p8_aes_ctr_crypt(struct blkcipher_desc *desc, | 111 | static int p8_aes_ctr_crypt(struct blkcipher_desc *desc, |
109 | struct scatterlist *dst, struct scatterlist *src, | 112 | struct scatterlist *dst, |
110 | unsigned int nbytes) | 113 | struct scatterlist *src, unsigned int nbytes) |
111 | { | 114 | { |
112 | int ret; | 115 | int ret; |
113 | struct blkcipher_walk walk; | 116 | struct blkcipher_walk walk; |
114 | struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx( | 117 | struct p8_aes_ctr_ctx *ctx = |
115 | crypto_blkcipher_tfm(desc->tfm)); | 118 | crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); |
116 | struct blkcipher_desc fallback_desc = { | 119 | struct blkcipher_desc fallback_desc = { |
117 | .tfm = ctx->fallback, | 120 | .tfm = ctx->fallback, |
118 | .info = desc->info, | 121 | .info = desc->info, |
119 | .flags = desc->flags | 122 | .flags = desc->flags |
120 | }; | 123 | }; |
121 | 124 | ||
122 | if (in_interrupt()) { | 125 | if (in_interrupt()) { |
123 | ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes); | 126 | ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, |
124 | } else { | 127 | nbytes); |
125 | blkcipher_walk_init(&walk, dst, src, nbytes); | 128 | } else { |
126 | ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); | 129 | blkcipher_walk_init(&walk, dst, src, nbytes); |
127 | while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { | 130 | ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); |
128 | pagefault_disable(); | 131 | while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { |
129 | enable_kernel_altivec(); | 132 | pagefault_disable(); |
130 | aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, walk.dst.virt.addr, | 133 | enable_kernel_altivec(); |
131 | (nbytes & AES_BLOCK_MASK)/AES_BLOCK_SIZE, &ctx->enc_key, walk.iv); | 134 | aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, |
132 | pagefault_enable(); | 135 | walk.dst.virt.addr, |
133 | 136 | (nbytes & | |
134 | crypto_inc(walk.iv, AES_BLOCK_SIZE); | 137 | AES_BLOCK_MASK) / |
135 | nbytes &= AES_BLOCK_SIZE - 1; | 138 | AES_BLOCK_SIZE, |
136 | ret = blkcipher_walk_done(desc, &walk, nbytes); | 139 | &ctx->enc_key, |
137 | } | 140 | walk.iv); |
138 | if (walk.nbytes) { | 141 | pagefault_enable(); |
139 | p8_aes_ctr_final(ctx, &walk); | 142 | |
140 | ret = blkcipher_walk_done(desc, &walk, 0); | 143 | crypto_inc(walk.iv, AES_BLOCK_SIZE); |
141 | } | 144 | nbytes &= AES_BLOCK_SIZE - 1; |
142 | } | 145 | ret = blkcipher_walk_done(desc, &walk, nbytes); |
143 | 146 | } | |
144 | return ret; | 147 | if (walk.nbytes) { |
148 | p8_aes_ctr_final(ctx, &walk); | ||
149 | ret = blkcipher_walk_done(desc, &walk, 0); | ||
150 | } | ||
151 | } | ||
152 | |||
153 | return ret; | ||
145 | } | 154 | } |
146 | 155 | ||
147 | struct crypto_alg p8_aes_ctr_alg = { | 156 | struct crypto_alg p8_aes_ctr_alg = { |
148 | .cra_name = "ctr(aes)", | 157 | .cra_name = "ctr(aes)", |
149 | .cra_driver_name = "p8_aes_ctr", | 158 | .cra_driver_name = "p8_aes_ctr", |
150 | .cra_module = THIS_MODULE, | 159 | .cra_module = THIS_MODULE, |
151 | .cra_priority = 1000, | 160 | .cra_priority = 1000, |
152 | .cra_type = &crypto_blkcipher_type, | 161 | .cra_type = &crypto_blkcipher_type, |
153 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, | 162 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, |
154 | .cra_alignmask = 0, | 163 | .cra_alignmask = 0, |
155 | .cra_blocksize = 1, | 164 | .cra_blocksize = 1, |
156 | .cra_ctxsize = sizeof(struct p8_aes_ctr_ctx), | 165 | .cra_ctxsize = sizeof(struct p8_aes_ctr_ctx), |
157 | .cra_init = p8_aes_ctr_init, | 166 | .cra_init = p8_aes_ctr_init, |
158 | .cra_exit = p8_aes_ctr_exit, | 167 | .cra_exit = p8_aes_ctr_exit, |
159 | .cra_blkcipher = { | 168 | .cra_blkcipher = { |
160 | .ivsize = 0, | 169 | .ivsize = 0, |
161 | .min_keysize = AES_MIN_KEY_SIZE, | 170 | .min_keysize = AES_MIN_KEY_SIZE, |
162 | .max_keysize = AES_MAX_KEY_SIZE, | 171 | .max_keysize = AES_MAX_KEY_SIZE, |
163 | .setkey = p8_aes_ctr_setkey, | 172 | .setkey = p8_aes_ctr_setkey, |
164 | .encrypt = p8_aes_ctr_crypt, | 173 | .encrypt = p8_aes_ctr_crypt, |
165 | .decrypt = p8_aes_ctr_crypt, | 174 | .decrypt = p8_aes_ctr_crypt, |
166 | }, | 175 | }, |
167 | }; | 176 | }; |
diff --git a/drivers/crypto/vmx/aesp8-ppc.h b/drivers/crypto/vmx/aesp8-ppc.h index e963945a83e1..4cd34ee54a94 100644 --- a/drivers/crypto/vmx/aesp8-ppc.h +++ b/drivers/crypto/vmx/aesp8-ppc.h | |||
@@ -4,17 +4,18 @@ | |||
4 | #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1)) | 4 | #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1)) |
5 | 5 | ||
6 | struct aes_key { | 6 | struct aes_key { |
7 | u8 key[AES_MAX_KEYLENGTH]; | 7 | u8 key[AES_MAX_KEYLENGTH]; |
8 | int rounds; | 8 | int rounds; |
9 | }; | 9 | }; |
10 | 10 | ||
11 | int aes_p8_set_encrypt_key(const u8 *userKey, const int bits, | 11 | int aes_p8_set_encrypt_key(const u8 *userKey, const int bits, |
12 | struct aes_key *key); | 12 | struct aes_key *key); |
13 | int aes_p8_set_decrypt_key(const u8 *userKey, const int bits, | 13 | int aes_p8_set_decrypt_key(const u8 *userKey, const int bits, |
14 | struct aes_key *key); | 14 | struct aes_key *key); |
15 | void aes_p8_encrypt(const u8 *in, u8 *out, const struct aes_key *key); | 15 | void aes_p8_encrypt(const u8 *in, u8 *out, const struct aes_key *key); |
16 | void aes_p8_decrypt(const u8 *in, u8 *out,const struct aes_key *key); | 16 | void aes_p8_decrypt(const u8 *in, u8 *out, const struct aes_key *key); |
17 | void aes_p8_cbc_encrypt(const u8 *in, u8 *out, size_t len, | 17 | void aes_p8_cbc_encrypt(const u8 *in, u8 *out, size_t len, |
18 | const struct aes_key *key, u8 *iv, const int enc); | 18 | const struct aes_key *key, u8 *iv, const int enc); |
19 | void aes_p8_ctr32_encrypt_blocks(const u8 *in, u8 *out, | 19 | void aes_p8_ctr32_encrypt_blocks(const u8 *in, u8 *out, |
20 | size_t len, const struct aes_key *key, const u8 *iv); | 20 | size_t len, const struct aes_key *key, |
21 | const u8 *iv); | ||
diff --git a/drivers/crypto/vmx/ghash.c b/drivers/crypto/vmx/ghash.c index d0ffe277af5c..4c3a8f7e5059 100644 --- a/drivers/crypto/vmx/ghash.c +++ b/drivers/crypto/vmx/ghash.c | |||
@@ -39,176 +39,180 @@ | |||
39 | void gcm_init_p8(u128 htable[16], const u64 Xi[2]); | 39 | void gcm_init_p8(u128 htable[16], const u64 Xi[2]); |
40 | void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]); | 40 | void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]); |
41 | void gcm_ghash_p8(u64 Xi[2], const u128 htable[16], | 41 | void gcm_ghash_p8(u64 Xi[2], const u128 htable[16], |
42 | const u8 *in,size_t len); | 42 | const u8 *in, size_t len); |
43 | 43 | ||
44 | struct p8_ghash_ctx { | 44 | struct p8_ghash_ctx { |
45 | u128 htable[16]; | 45 | u128 htable[16]; |
46 | struct crypto_shash *fallback; | 46 | struct crypto_shash *fallback; |
47 | }; | 47 | }; |
48 | 48 | ||
49 | struct p8_ghash_desc_ctx { | 49 | struct p8_ghash_desc_ctx { |
50 | u64 shash[2]; | 50 | u64 shash[2]; |
51 | u8 buffer[GHASH_DIGEST_SIZE]; | 51 | u8 buffer[GHASH_DIGEST_SIZE]; |
52 | int bytes; | 52 | int bytes; |
53 | struct shash_desc fallback_desc; | 53 | struct shash_desc fallback_desc; |
54 | }; | 54 | }; |
55 | 55 | ||
56 | static int p8_ghash_init_tfm(struct crypto_tfm *tfm) | 56 | static int p8_ghash_init_tfm(struct crypto_tfm *tfm) |
57 | { | 57 | { |
58 | const char *alg; | 58 | const char *alg; |
59 | struct crypto_shash *fallback; | 59 | struct crypto_shash *fallback; |
60 | struct crypto_shash *shash_tfm = __crypto_shash_cast(tfm); | 60 | struct crypto_shash *shash_tfm = __crypto_shash_cast(tfm); |
61 | struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm); | 61 | struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm); |
62 | 62 | ||
63 | if (!(alg = crypto_tfm_alg_name(tfm))) { | 63 | if (!(alg = crypto_tfm_alg_name(tfm))) { |
64 | printk(KERN_ERR "Failed to get algorithm name.\n"); | 64 | printk(KERN_ERR "Failed to get algorithm name.\n"); |
65 | return -ENOENT; | 65 | return -ENOENT; |
66 | } | 66 | } |
67 | 67 | ||
68 | fallback = crypto_alloc_shash(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK); | 68 | fallback = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK); |
69 | if (IS_ERR(fallback)) { | 69 | if (IS_ERR(fallback)) { |
70 | printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n", | 70 | printk(KERN_ERR |
71 | alg, PTR_ERR(fallback)); | 71 | "Failed to allocate transformation for '%s': %ld\n", |
72 | return PTR_ERR(fallback); | 72 | alg, PTR_ERR(fallback)); |
73 | } | 73 | return PTR_ERR(fallback); |
74 | printk(KERN_INFO "Using '%s' as fallback implementation.\n", | 74 | } |
75 | crypto_tfm_alg_driver_name(crypto_shash_tfm(fallback))); | 75 | printk(KERN_INFO "Using '%s' as fallback implementation.\n", |
76 | 76 | crypto_tfm_alg_driver_name(crypto_shash_tfm(fallback))); | |
77 | crypto_shash_set_flags(fallback, | 77 | |
78 | crypto_shash_get_flags((struct crypto_shash *) tfm)); | 78 | crypto_shash_set_flags(fallback, |
79 | ctx->fallback = fallback; | 79 | crypto_shash_get_flags((struct crypto_shash |
80 | 80 | *) tfm)); | |
81 | shash_tfm->descsize = sizeof(struct p8_ghash_desc_ctx) | 81 | ctx->fallback = fallback; |
82 | + crypto_shash_descsize(fallback); | 82 | |
83 | 83 | shash_tfm->descsize = sizeof(struct p8_ghash_desc_ctx) | |
84 | return 0; | 84 | + crypto_shash_descsize(fallback); |
85 | |||
86 | return 0; | ||
85 | } | 87 | } |
86 | 88 | ||
87 | static void p8_ghash_exit_tfm(struct crypto_tfm *tfm) | 89 | static void p8_ghash_exit_tfm(struct crypto_tfm *tfm) |
88 | { | 90 | { |
89 | struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm); | 91 | struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm); |
90 | 92 | ||
91 | if (ctx->fallback) { | 93 | if (ctx->fallback) { |
92 | crypto_free_shash(ctx->fallback); | 94 | crypto_free_shash(ctx->fallback); |
93 | ctx->fallback = NULL; | 95 | ctx->fallback = NULL; |
94 | } | 96 | } |
95 | } | 97 | } |
96 | 98 | ||
97 | static int p8_ghash_init(struct shash_desc *desc) | 99 | static int p8_ghash_init(struct shash_desc *desc) |
98 | { | 100 | { |
99 | struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); | 101 | struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); |
100 | struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); | 102 | struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); |
101 | 103 | ||
102 | dctx->bytes = 0; | 104 | dctx->bytes = 0; |
103 | memset(dctx->shash, 0, GHASH_DIGEST_SIZE); | 105 | memset(dctx->shash, 0, GHASH_DIGEST_SIZE); |
104 | dctx->fallback_desc.tfm = ctx->fallback; | 106 | dctx->fallback_desc.tfm = ctx->fallback; |
105 | dctx->fallback_desc.flags = desc->flags; | 107 | dctx->fallback_desc.flags = desc->flags; |
106 | return crypto_shash_init(&dctx->fallback_desc); | 108 | return crypto_shash_init(&dctx->fallback_desc); |
107 | } | 109 | } |
108 | 110 | ||
109 | static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key, | 111 | static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key, |
110 | unsigned int keylen) | 112 | unsigned int keylen) |
111 | { | 113 | { |
112 | struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm)); | 114 | struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm)); |
113 | 115 | ||
114 | if (keylen != GHASH_KEY_LEN) | 116 | if (keylen != GHASH_KEY_LEN) |
115 | return -EINVAL; | 117 | return -EINVAL; |
116 | 118 | ||
117 | pagefault_disable(); | 119 | pagefault_disable(); |
118 | enable_kernel_altivec(); | 120 | enable_kernel_altivec(); |
119 | enable_kernel_fp(); | 121 | enable_kernel_fp(); |
120 | gcm_init_p8(ctx->htable, (const u64 *) key); | 122 | gcm_init_p8(ctx->htable, (const u64 *) key); |
121 | pagefault_enable(); | 123 | pagefault_enable(); |
122 | return crypto_shash_setkey(ctx->fallback, key, keylen); | 124 | return crypto_shash_setkey(ctx->fallback, key, keylen); |
123 | } | 125 | } |
124 | 126 | ||
125 | static int p8_ghash_update(struct shash_desc *desc, | 127 | static int p8_ghash_update(struct shash_desc *desc, |
126 | const u8 *src, unsigned int srclen) | 128 | const u8 *src, unsigned int srclen) |
127 | { | 129 | { |
128 | unsigned int len; | 130 | unsigned int len; |
129 | struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); | 131 | struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); |
130 | struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); | 132 | struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); |
131 | 133 | ||
132 | if (IN_INTERRUPT) { | 134 | if (IN_INTERRUPT) { |
133 | return crypto_shash_update(&dctx->fallback_desc, src, srclen); | 135 | return crypto_shash_update(&dctx->fallback_desc, src, |
134 | } else { | 136 | srclen); |
135 | if (dctx->bytes) { | 137 | } else { |
136 | if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) { | 138 | if (dctx->bytes) { |
137 | memcpy(dctx->buffer + dctx->bytes, src, srclen); | 139 | if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) { |
138 | dctx->bytes += srclen; | 140 | memcpy(dctx->buffer + dctx->bytes, src, |
139 | return 0; | 141 | srclen); |
140 | } | 142 | dctx->bytes += srclen; |
141 | memcpy(dctx->buffer + dctx->bytes, src, | 143 | return 0; |
142 | GHASH_DIGEST_SIZE - dctx->bytes); | 144 | } |
143 | pagefault_disable(); | 145 | memcpy(dctx->buffer + dctx->bytes, src, |
144 | enable_kernel_altivec(); | 146 | GHASH_DIGEST_SIZE - dctx->bytes); |
145 | enable_kernel_fp(); | 147 | pagefault_disable(); |
146 | gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer, | 148 | enable_kernel_altivec(); |
147 | GHASH_DIGEST_SIZE); | 149 | enable_kernel_fp(); |
148 | pagefault_enable(); | 150 | gcm_ghash_p8(dctx->shash, ctx->htable, |
149 | src += GHASH_DIGEST_SIZE - dctx->bytes; | 151 | dctx->buffer, GHASH_DIGEST_SIZE); |
150 | srclen -= GHASH_DIGEST_SIZE - dctx->bytes; | 152 | pagefault_enable(); |
151 | dctx->bytes = 0; | 153 | src += GHASH_DIGEST_SIZE - dctx->bytes; |
152 | } | 154 | srclen -= GHASH_DIGEST_SIZE - dctx->bytes; |
153 | len = srclen & ~(GHASH_DIGEST_SIZE - 1); | 155 | dctx->bytes = 0; |
154 | if (len) { | 156 | } |
155 | pagefault_disable(); | 157 | len = srclen & ~(GHASH_DIGEST_SIZE - 1); |
156 | enable_kernel_altivec(); | 158 | if (len) { |
157 | enable_kernel_fp(); | 159 | pagefault_disable(); |
158 | gcm_ghash_p8(dctx->shash, ctx->htable, src, len); | 160 | enable_kernel_altivec(); |
159 | pagefault_enable(); | 161 | enable_kernel_fp(); |
160 | src += len; | 162 | gcm_ghash_p8(dctx->shash, ctx->htable, src, len); |
161 | srclen -= len; | 163 | pagefault_enable(); |
162 | } | 164 | src += len; |
163 | if (srclen) { | 165 | srclen -= len; |
164 | memcpy(dctx->buffer, src, srclen); | 166 | } |
165 | dctx->bytes = srclen; | 167 | if (srclen) { |
166 | } | 168 | memcpy(dctx->buffer, src, srclen); |
167 | return 0; | 169 | dctx->bytes = srclen; |
168 | } | 170 | } |
171 | return 0; | ||
172 | } | ||
169 | } | 173 | } |
170 | 174 | ||
171 | static int p8_ghash_final(struct shash_desc *desc, u8 *out) | 175 | static int p8_ghash_final(struct shash_desc *desc, u8 *out) |
172 | { | 176 | { |
173 | int i; | 177 | int i; |
174 | struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); | 178 | struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); |
175 | struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); | 179 | struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); |
176 | 180 | ||
177 | if (IN_INTERRUPT) { | 181 | if (IN_INTERRUPT) { |
178 | return crypto_shash_final(&dctx->fallback_desc, out); | 182 | return crypto_shash_final(&dctx->fallback_desc, out); |
179 | } else { | 183 | } else { |
180 | if (dctx->bytes) { | 184 | if (dctx->bytes) { |
181 | for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++) | 185 | for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++) |
182 | dctx->buffer[i] = 0; | 186 | dctx->buffer[i] = 0; |
183 | pagefault_disable(); | 187 | pagefault_disable(); |
184 | enable_kernel_altivec(); | 188 | enable_kernel_altivec(); |
185 | enable_kernel_fp(); | 189 | enable_kernel_fp(); |
186 | gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer, | 190 | gcm_ghash_p8(dctx->shash, ctx->htable, |
187 | GHASH_DIGEST_SIZE); | 191 | dctx->buffer, GHASH_DIGEST_SIZE); |
188 | pagefault_enable(); | 192 | pagefault_enable(); |
189 | dctx->bytes = 0; | 193 | dctx->bytes = 0; |
190 | } | 194 | } |
191 | memcpy(out, dctx->shash, GHASH_DIGEST_SIZE); | 195 | memcpy(out, dctx->shash, GHASH_DIGEST_SIZE); |
192 | return 0; | 196 | return 0; |
193 | } | 197 | } |
194 | } | 198 | } |
195 | 199 | ||
196 | struct shash_alg p8_ghash_alg = { | 200 | struct shash_alg p8_ghash_alg = { |
197 | .digestsize = GHASH_DIGEST_SIZE, | 201 | .digestsize = GHASH_DIGEST_SIZE, |
198 | .init = p8_ghash_init, | 202 | .init = p8_ghash_init, |
199 | .update = p8_ghash_update, | 203 | .update = p8_ghash_update, |
200 | .final = p8_ghash_final, | 204 | .final = p8_ghash_final, |
201 | .setkey = p8_ghash_setkey, | 205 | .setkey = p8_ghash_setkey, |
202 | .descsize = sizeof(struct p8_ghash_desc_ctx), | 206 | .descsize = sizeof(struct p8_ghash_desc_ctx), |
203 | .base = { | 207 | .base = { |
204 | .cra_name = "ghash", | 208 | .cra_name = "ghash", |
205 | .cra_driver_name = "p8_ghash", | 209 | .cra_driver_name = "p8_ghash", |
206 | .cra_priority = 1000, | 210 | .cra_priority = 1000, |
207 | .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_NEED_FALLBACK, | 211 | .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_NEED_FALLBACK, |
208 | .cra_blocksize = GHASH_BLOCK_SIZE, | 212 | .cra_blocksize = GHASH_BLOCK_SIZE, |
209 | .cra_ctxsize = sizeof(struct p8_ghash_ctx), | 213 | .cra_ctxsize = sizeof(struct p8_ghash_ctx), |
210 | .cra_module = THIS_MODULE, | 214 | .cra_module = THIS_MODULE, |
211 | .cra_init = p8_ghash_init_tfm, | 215 | .cra_init = p8_ghash_init_tfm, |
212 | .cra_exit = p8_ghash_exit_tfm, | 216 | .cra_exit = p8_ghash_exit_tfm, |
213 | }, | 217 | }, |
214 | }; | 218 | }; |
diff --git a/drivers/crypto/vmx/vmx.c b/drivers/crypto/vmx/vmx.c index 4c398ddd8c10..e163d5770438 100644 --- a/drivers/crypto/vmx/vmx.c +++ b/drivers/crypto/vmx/vmx.c | |||
@@ -32,57 +32,57 @@ extern struct crypto_alg p8_aes_alg; | |||
32 | extern struct crypto_alg p8_aes_cbc_alg; | 32 | extern struct crypto_alg p8_aes_cbc_alg; |
33 | extern struct crypto_alg p8_aes_ctr_alg; | 33 | extern struct crypto_alg p8_aes_ctr_alg; |
34 | static struct crypto_alg *algs[] = { | 34 | static struct crypto_alg *algs[] = { |
35 | &p8_aes_alg, | 35 | &p8_aes_alg, |
36 | &p8_aes_cbc_alg, | 36 | &p8_aes_cbc_alg, |
37 | &p8_aes_ctr_alg, | 37 | &p8_aes_ctr_alg, |
38 | NULL, | 38 | NULL, |
39 | }; | 39 | }; |
40 | 40 | ||
41 | int __init p8_init(void) | 41 | int __init p8_init(void) |
42 | { | 42 | { |
43 | int ret = 0; | 43 | int ret = 0; |
44 | struct crypto_alg **alg_it; | 44 | struct crypto_alg **alg_it; |
45 | 45 | ||
46 | if (!(cur_cpu_spec->cpu_user_features2 & PPC_FEATURE2_VEC_CRYPTO)) | 46 | if (!(cur_cpu_spec->cpu_user_features2 & PPC_FEATURE2_VEC_CRYPTO)) |
47 | return -ENODEV; | 47 | return -ENODEV; |
48 | 48 | ||
49 | for (alg_it = algs; *alg_it; alg_it++) { | 49 | for (alg_it = algs; *alg_it; alg_it++) { |
50 | ret = crypto_register_alg(*alg_it); | 50 | ret = crypto_register_alg(*alg_it); |
51 | printk(KERN_INFO "crypto_register_alg '%s' = %d\n", | 51 | printk(KERN_INFO "crypto_register_alg '%s' = %d\n", |
52 | (*alg_it)->cra_name, ret); | 52 | (*alg_it)->cra_name, ret); |
53 | if (ret) { | 53 | if (ret) { |
54 | for (alg_it--; alg_it >= algs; alg_it--) | 54 | for (alg_it--; alg_it >= algs; alg_it--) |
55 | crypto_unregister_alg(*alg_it); | 55 | crypto_unregister_alg(*alg_it); |
56 | break; | 56 | break; |
57 | } | 57 | } |
58 | } | 58 | } |
59 | if (ret) | 59 | if (ret) |
60 | return ret; | 60 | return ret; |
61 | 61 | ||
62 | ret = crypto_register_shash(&p8_ghash_alg); | 62 | ret = crypto_register_shash(&p8_ghash_alg); |
63 | if (ret) { | 63 | if (ret) { |
64 | for (alg_it = algs; *alg_it; alg_it++) | 64 | for (alg_it = algs; *alg_it; alg_it++) |
65 | crypto_unregister_alg(*alg_it); | 65 | crypto_unregister_alg(*alg_it); |
66 | } | 66 | } |
67 | return ret; | 67 | return ret; |
68 | } | 68 | } |
69 | 69 | ||
70 | void __exit p8_exit(void) | 70 | void __exit p8_exit(void) |
71 | { | 71 | { |
72 | struct crypto_alg **alg_it; | 72 | struct crypto_alg **alg_it; |
73 | 73 | ||
74 | for (alg_it = algs; *alg_it; alg_it++) { | 74 | for (alg_it = algs; *alg_it; alg_it++) { |
75 | printk(KERN_INFO "Removing '%s'\n", (*alg_it)->cra_name); | 75 | printk(KERN_INFO "Removing '%s'\n", (*alg_it)->cra_name); |
76 | crypto_unregister_alg(*alg_it); | 76 | crypto_unregister_alg(*alg_it); |
77 | } | 77 | } |
78 | crypto_unregister_shash(&p8_ghash_alg); | 78 | crypto_unregister_shash(&p8_ghash_alg); |
79 | } | 79 | } |
80 | 80 | ||
81 | module_init(p8_init); | 81 | module_init(p8_init); |
82 | module_exit(p8_exit); | 82 | module_exit(p8_exit); |
83 | 83 | ||
84 | MODULE_AUTHOR("Marcelo Cerri<mhcerri@br.ibm.com>"); | 84 | MODULE_AUTHOR("Marcelo Cerri<mhcerri@br.ibm.com>"); |
85 | MODULE_DESCRIPTION("IBM VMX cryptographic acceleration instructions support on Power 8"); | 85 | MODULE_DESCRIPTION("IBM VMX cryptographic acceleration instructions " |
86 | "support on Power 8"); | ||
86 | MODULE_LICENSE("GPL"); | 87 | MODULE_LICENSE("GPL"); |
87 | MODULE_VERSION("1.0.0"); | 88 | MODULE_VERSION("1.0.0"); |
88 | |||