diff options
Diffstat (limited to 'crypto/aes_generic.c')
-rw-r--r-- | crypto/aes_generic.c | 468 |
1 files changed, 241 insertions, 227 deletions
diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c index 9401dca85e87..cf30af74480f 100644 --- a/crypto/aes_generic.c +++ b/crypto/aes_generic.c | |||
@@ -47,11 +47,7 @@ | |||
47 | * --------------------------------------------------------------------------- | 47 | * --------------------------------------------------------------------------- |
48 | */ | 48 | */ |
49 | 49 | ||
50 | /* Some changes from the Gladman version: | 50 | #include <crypto/aes.h> |
51 | s/RIJNDAEL(e_key)/E_KEY/g | ||
52 | s/RIJNDAEL(d_key)/D_KEY/g | ||
53 | */ | ||
54 | |||
55 | #include <linux/module.h> | 51 | #include <linux/module.h> |
56 | #include <linux/init.h> | 52 | #include <linux/init.h> |
57 | #include <linux/types.h> | 53 | #include <linux/types.h> |
@@ -59,88 +55,46 @@ | |||
59 | #include <linux/crypto.h> | 55 | #include <linux/crypto.h> |
60 | #include <asm/byteorder.h> | 56 | #include <asm/byteorder.h> |
61 | 57 | ||
62 | #define AES_MIN_KEY_SIZE 16 | 58 | static inline u8 byte(const u32 x, const unsigned n) |
63 | #define AES_MAX_KEY_SIZE 32 | ||
64 | |||
65 | #define AES_BLOCK_SIZE 16 | ||
66 | |||
67 | /* | ||
68 | * #define byte(x, nr) ((unsigned char)((x) >> (nr*8))) | ||
69 | */ | ||
70 | static inline u8 | ||
71 | byte(const u32 x, const unsigned n) | ||
72 | { | 59 | { |
73 | return x >> (n << 3); | 60 | return x >> (n << 3); |
74 | } | 61 | } |
75 | 62 | ||
76 | struct aes_ctx { | ||
77 | int key_length; | ||
78 | u32 buf[120]; | ||
79 | }; | ||
80 | |||
81 | #define E_KEY (&ctx->buf[0]) | ||
82 | #define D_KEY (&ctx->buf[60]) | ||
83 | |||
84 | static u8 pow_tab[256] __initdata; | 63 | static u8 pow_tab[256] __initdata; |
85 | static u8 log_tab[256] __initdata; | 64 | static u8 log_tab[256] __initdata; |
86 | static u8 sbx_tab[256] __initdata; | 65 | static u8 sbx_tab[256] __initdata; |
87 | static u8 isb_tab[256] __initdata; | 66 | static u8 isb_tab[256] __initdata; |
88 | static u32 rco_tab[10]; | 67 | static u32 rco_tab[10]; |
89 | static u32 ft_tab[4][256]; | ||
90 | static u32 it_tab[4][256]; | ||
91 | 68 | ||
92 | static u32 fl_tab[4][256]; | 69 | u32 crypto_ft_tab[4][256]; |
93 | static u32 il_tab[4][256]; | 70 | u32 crypto_fl_tab[4][256]; |
71 | u32 crypto_it_tab[4][256]; | ||
72 | u32 crypto_il_tab[4][256]; | ||
94 | 73 | ||
95 | static inline u8 __init | 74 | EXPORT_SYMBOL_GPL(crypto_ft_tab); |
96 | f_mult (u8 a, u8 b) | 75 | EXPORT_SYMBOL_GPL(crypto_fl_tab); |
76 | EXPORT_SYMBOL_GPL(crypto_it_tab); | ||
77 | EXPORT_SYMBOL_GPL(crypto_il_tab); | ||
78 | |||
79 | static inline u8 __init f_mult(u8 a, u8 b) | ||
97 | { | 80 | { |
98 | u8 aa = log_tab[a], cc = aa + log_tab[b]; | 81 | u8 aa = log_tab[a], cc = aa + log_tab[b]; |
99 | 82 | ||
100 | return pow_tab[cc + (cc < aa ? 1 : 0)]; | 83 | return pow_tab[cc + (cc < aa ? 1 : 0)]; |
101 | } | 84 | } |
102 | 85 | ||
103 | #define ff_mult(a,b) (a && b ? f_mult(a, b) : 0) | 86 | #define ff_mult(a, b) (a && b ? f_mult(a, b) : 0) |
104 | 87 | ||
105 | #define f_rn(bo, bi, n, k) \ | 88 | static void __init gen_tabs(void) |
106 | bo[n] = ft_tab[0][byte(bi[n],0)] ^ \ | ||
107 | ft_tab[1][byte(bi[(n + 1) & 3],1)] ^ \ | ||
108 | ft_tab[2][byte(bi[(n + 2) & 3],2)] ^ \ | ||
109 | ft_tab[3][byte(bi[(n + 3) & 3],3)] ^ *(k + n) | ||
110 | |||
111 | #define i_rn(bo, bi, n, k) \ | ||
112 | bo[n] = it_tab[0][byte(bi[n],0)] ^ \ | ||
113 | it_tab[1][byte(bi[(n + 3) & 3],1)] ^ \ | ||
114 | it_tab[2][byte(bi[(n + 2) & 3],2)] ^ \ | ||
115 | it_tab[3][byte(bi[(n + 1) & 3],3)] ^ *(k + n) | ||
116 | |||
117 | #define ls_box(x) \ | ||
118 | ( fl_tab[0][byte(x, 0)] ^ \ | ||
119 | fl_tab[1][byte(x, 1)] ^ \ | ||
120 | fl_tab[2][byte(x, 2)] ^ \ | ||
121 | fl_tab[3][byte(x, 3)] ) | ||
122 | |||
123 | #define f_rl(bo, bi, n, k) \ | ||
124 | bo[n] = fl_tab[0][byte(bi[n],0)] ^ \ | ||
125 | fl_tab[1][byte(bi[(n + 1) & 3],1)] ^ \ | ||
126 | fl_tab[2][byte(bi[(n + 2) & 3],2)] ^ \ | ||
127 | fl_tab[3][byte(bi[(n + 3) & 3],3)] ^ *(k + n) | ||
128 | |||
129 | #define i_rl(bo, bi, n, k) \ | ||
130 | bo[n] = il_tab[0][byte(bi[n],0)] ^ \ | ||
131 | il_tab[1][byte(bi[(n + 3) & 3],1)] ^ \ | ||
132 | il_tab[2][byte(bi[(n + 2) & 3],2)] ^ \ | ||
133 | il_tab[3][byte(bi[(n + 1) & 3],3)] ^ *(k + n) | ||
134 | |||
135 | static void __init | ||
136 | gen_tabs (void) | ||
137 | { | 89 | { |
138 | u32 i, t; | 90 | u32 i, t; |
139 | u8 p, q; | 91 | u8 p, q; |
140 | 92 | ||
141 | /* log and power tables for GF(2**8) finite field with | 93 | /* |
142 | 0x011b as modular polynomial - the simplest primitive | 94 | * log and power tables for GF(2**8) finite field with |
143 | root is 0x03, used here to generate the tables */ | 95 | * 0x011b as modular polynomial - the simplest primitive |
96 | * root is 0x03, used here to generate the tables | ||
97 | */ | ||
144 | 98 | ||
145 | for (i = 0, p = 1; i < 256; ++i) { | 99 | for (i = 0, p = 1; i < 256; ++i) { |
146 | pow_tab[i] = (u8) p; | 100 | pow_tab[i] = (u8) p; |
@@ -169,92 +123,119 @@ gen_tabs (void) | |||
169 | p = sbx_tab[i]; | 123 | p = sbx_tab[i]; |
170 | 124 | ||
171 | t = p; | 125 | t = p; |
172 | fl_tab[0][i] = t; | 126 | crypto_fl_tab[0][i] = t; |
173 | fl_tab[1][i] = rol32(t, 8); | 127 | crypto_fl_tab[1][i] = rol32(t, 8); |
174 | fl_tab[2][i] = rol32(t, 16); | 128 | crypto_fl_tab[2][i] = rol32(t, 16); |
175 | fl_tab[3][i] = rol32(t, 24); | 129 | crypto_fl_tab[3][i] = rol32(t, 24); |
176 | 130 | ||
177 | t = ((u32) ff_mult (2, p)) | | 131 | t = ((u32) ff_mult(2, p)) | |
178 | ((u32) p << 8) | | 132 | ((u32) p << 8) | |
179 | ((u32) p << 16) | ((u32) ff_mult (3, p) << 24); | 133 | ((u32) p << 16) | ((u32) ff_mult(3, p) << 24); |
180 | 134 | ||
181 | ft_tab[0][i] = t; | 135 | crypto_ft_tab[0][i] = t; |
182 | ft_tab[1][i] = rol32(t, 8); | 136 | crypto_ft_tab[1][i] = rol32(t, 8); |
183 | ft_tab[2][i] = rol32(t, 16); | 137 | crypto_ft_tab[2][i] = rol32(t, 16); |
184 | ft_tab[3][i] = rol32(t, 24); | 138 | crypto_ft_tab[3][i] = rol32(t, 24); |
185 | 139 | ||
186 | p = isb_tab[i]; | 140 | p = isb_tab[i]; |
187 | 141 | ||
188 | t = p; | 142 | t = p; |
189 | il_tab[0][i] = t; | 143 | crypto_il_tab[0][i] = t; |
190 | il_tab[1][i] = rol32(t, 8); | 144 | crypto_il_tab[1][i] = rol32(t, 8); |
191 | il_tab[2][i] = rol32(t, 16); | 145 | crypto_il_tab[2][i] = rol32(t, 16); |
192 | il_tab[3][i] = rol32(t, 24); | 146 | crypto_il_tab[3][i] = rol32(t, 24); |
193 | 147 | ||
194 | t = ((u32) ff_mult (14, p)) | | 148 | t = ((u32) ff_mult(14, p)) | |
195 | ((u32) ff_mult (9, p) << 8) | | 149 | ((u32) ff_mult(9, p) << 8) | |
196 | ((u32) ff_mult (13, p) << 16) | | 150 | ((u32) ff_mult(13, p) << 16) | |
197 | ((u32) ff_mult (11, p) << 24); | 151 | ((u32) ff_mult(11, p) << 24); |
198 | 152 | ||
199 | it_tab[0][i] = t; | 153 | crypto_it_tab[0][i] = t; |
200 | it_tab[1][i] = rol32(t, 8); | 154 | crypto_it_tab[1][i] = rol32(t, 8); |
201 | it_tab[2][i] = rol32(t, 16); | 155 | crypto_it_tab[2][i] = rol32(t, 16); |
202 | it_tab[3][i] = rol32(t, 24); | 156 | crypto_it_tab[3][i] = rol32(t, 24); |
203 | } | 157 | } |
204 | } | 158 | } |
205 | 159 | ||
206 | #define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b) | ||
207 | |||
208 | #define imix_col(y,x) \ | ||
209 | u = star_x(x); \ | ||
210 | v = star_x(u); \ | ||
211 | w = star_x(v); \ | ||
212 | t = w ^ (x); \ | ||
213 | (y) = u ^ v ^ w; \ | ||
214 | (y) ^= ror32(u ^ t, 8) ^ \ | ||
215 | ror32(v ^ t, 16) ^ \ | ||
216 | ror32(t,24) | ||
217 | |||
218 | /* initialise the key schedule from the user supplied key */ | 160 | /* initialise the key schedule from the user supplied key */ |
219 | 161 | ||
220 | #define loop4(i) \ | 162 | #define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b) |
221 | { t = ror32(t, 8); t = ls_box(t) ^ rco_tab[i]; \ | ||
222 | t ^= E_KEY[4 * i]; E_KEY[4 * i + 4] = t; \ | ||
223 | t ^= E_KEY[4 * i + 1]; E_KEY[4 * i + 5] = t; \ | ||
224 | t ^= E_KEY[4 * i + 2]; E_KEY[4 * i + 6] = t; \ | ||
225 | t ^= E_KEY[4 * i + 3]; E_KEY[4 * i + 7] = t; \ | ||
226 | } | ||
227 | |||
228 | #define loop6(i) \ | ||
229 | { t = ror32(t, 8); t = ls_box(t) ^ rco_tab[i]; \ | ||
230 | t ^= E_KEY[6 * i]; E_KEY[6 * i + 6] = t; \ | ||
231 | t ^= E_KEY[6 * i + 1]; E_KEY[6 * i + 7] = t; \ | ||
232 | t ^= E_KEY[6 * i + 2]; E_KEY[6 * i + 8] = t; \ | ||
233 | t ^= E_KEY[6 * i + 3]; E_KEY[6 * i + 9] = t; \ | ||
234 | t ^= E_KEY[6 * i + 4]; E_KEY[6 * i + 10] = t; \ | ||
235 | t ^= E_KEY[6 * i + 5]; E_KEY[6 * i + 11] = t; \ | ||
236 | } | ||
237 | |||
238 | #define loop8(i) \ | ||
239 | { t = ror32(t, 8); ; t = ls_box(t) ^ rco_tab[i]; \ | ||
240 | t ^= E_KEY[8 * i]; E_KEY[8 * i + 8] = t; \ | ||
241 | t ^= E_KEY[8 * i + 1]; E_KEY[8 * i + 9] = t; \ | ||
242 | t ^= E_KEY[8 * i + 2]; E_KEY[8 * i + 10] = t; \ | ||
243 | t ^= E_KEY[8 * i + 3]; E_KEY[8 * i + 11] = t; \ | ||
244 | t = E_KEY[8 * i + 4] ^ ls_box(t); \ | ||
245 | E_KEY[8 * i + 12] = t; \ | ||
246 | t ^= E_KEY[8 * i + 5]; E_KEY[8 * i + 13] = t; \ | ||
247 | t ^= E_KEY[8 * i + 6]; E_KEY[8 * i + 14] = t; \ | ||
248 | t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \ | ||
249 | } | ||
250 | 163 | ||
251 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | 164 | #define imix_col(y,x) do { \ |
252 | unsigned int key_len) | 165 | u = star_x(x); \ |
166 | v = star_x(u); \ | ||
167 | w = star_x(v); \ | ||
168 | t = w ^ (x); \ | ||
169 | (y) = u ^ v ^ w; \ | ||
170 | (y) ^= ror32(u ^ t, 8) ^ \ | ||
171 | ror32(v ^ t, 16) ^ \ | ||
172 | ror32(t, 24); \ | ||
173 | } while (0) | ||
174 | |||
175 | #define ls_box(x) \ | ||
176 | crypto_fl_tab[0][byte(x, 0)] ^ \ | ||
177 | crypto_fl_tab[1][byte(x, 1)] ^ \ | ||
178 | crypto_fl_tab[2][byte(x, 2)] ^ \ | ||
179 | crypto_fl_tab[3][byte(x, 3)] | ||
180 | |||
181 | #define loop4(i) do { \ | ||
182 | t = ror32(t, 8); \ | ||
183 | t = ls_box(t) ^ rco_tab[i]; \ | ||
184 | t ^= ctx->key_enc[4 * i]; \ | ||
185 | ctx->key_enc[4 * i + 4] = t; \ | ||
186 | t ^= ctx->key_enc[4 * i + 1]; \ | ||
187 | ctx->key_enc[4 * i + 5] = t; \ | ||
188 | t ^= ctx->key_enc[4 * i + 2]; \ | ||
189 | ctx->key_enc[4 * i + 6] = t; \ | ||
190 | t ^= ctx->key_enc[4 * i + 3]; \ | ||
191 | ctx->key_enc[4 * i + 7] = t; \ | ||
192 | } while (0) | ||
193 | |||
194 | #define loop6(i) do { \ | ||
195 | t = ror32(t, 8); \ | ||
196 | t = ls_box(t) ^ rco_tab[i]; \ | ||
197 | t ^= ctx->key_enc[6 * i]; \ | ||
198 | ctx->key_enc[6 * i + 6] = t; \ | ||
199 | t ^= ctx->key_enc[6 * i + 1]; \ | ||
200 | ctx->key_enc[6 * i + 7] = t; \ | ||
201 | t ^= ctx->key_enc[6 * i + 2]; \ | ||
202 | ctx->key_enc[6 * i + 8] = t; \ | ||
203 | t ^= ctx->key_enc[6 * i + 3]; \ | ||
204 | ctx->key_enc[6 * i + 9] = t; \ | ||
205 | t ^= ctx->key_enc[6 * i + 4]; \ | ||
206 | ctx->key_enc[6 * i + 10] = t; \ | ||
207 | t ^= ctx->key_enc[6 * i + 5]; \ | ||
208 | ctx->key_enc[6 * i + 11] = t; \ | ||
209 | } while (0) | ||
210 | |||
211 | #define loop8(i) do { \ | ||
212 | t = ror32(t, 8); \ | ||
213 | t = ls_box(t) ^ rco_tab[i]; \ | ||
214 | t ^= ctx->key_enc[8 * i]; \ | ||
215 | ctx->key_enc[8 * i + 8] = t; \ | ||
216 | t ^= ctx->key_enc[8 * i + 1]; \ | ||
217 | ctx->key_enc[8 * i + 9] = t; \ | ||
218 | t ^= ctx->key_enc[8 * i + 2]; \ | ||
219 | ctx->key_enc[8 * i + 10] = t; \ | ||
220 | t ^= ctx->key_enc[8 * i + 3]; \ | ||
221 | ctx->key_enc[8 * i + 11] = t; \ | ||
222 | t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \ | ||
223 | ctx->key_enc[8 * i + 12] = t; \ | ||
224 | t ^= ctx->key_enc[8 * i + 5]; \ | ||
225 | ctx->key_enc[8 * i + 13] = t; \ | ||
226 | t ^= ctx->key_enc[8 * i + 6]; \ | ||
227 | ctx->key_enc[8 * i + 14] = t; \ | ||
228 | t ^= ctx->key_enc[8 * i + 7]; \ | ||
229 | ctx->key_enc[8 * i + 15] = t; \ | ||
230 | } while (0) | ||
231 | |||
232 | int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | ||
233 | unsigned int key_len) | ||
253 | { | 234 | { |
254 | struct aes_ctx *ctx = crypto_tfm_ctx(tfm); | 235 | struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
255 | const __le32 *key = (const __le32 *)in_key; | 236 | const __le32 *key = (const __le32 *)in_key; |
256 | u32 *flags = &tfm->crt_flags; | 237 | u32 *flags = &tfm->crt_flags; |
257 | u32 i, t, u, v, w; | 238 | u32 i, t, u, v, w, j; |
258 | 239 | ||
259 | if (key_len % 8) { | 240 | if (key_len % 8) { |
260 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | 241 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
@@ -263,95 +244,113 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | |||
263 | 244 | ||
264 | ctx->key_length = key_len; | 245 | ctx->key_length = key_len; |
265 | 246 | ||
266 | E_KEY[0] = le32_to_cpu(key[0]); | 247 | ctx->key_dec[key_len + 24] = ctx->key_enc[0] = le32_to_cpu(key[0]); |
267 | E_KEY[1] = le32_to_cpu(key[1]); | 248 | ctx->key_dec[key_len + 25] = ctx->key_enc[1] = le32_to_cpu(key[1]); |
268 | E_KEY[2] = le32_to_cpu(key[2]); | 249 | ctx->key_dec[key_len + 26] = ctx->key_enc[2] = le32_to_cpu(key[2]); |
269 | E_KEY[3] = le32_to_cpu(key[3]); | 250 | ctx->key_dec[key_len + 27] = ctx->key_enc[3] = le32_to_cpu(key[3]); |
270 | 251 | ||
271 | switch (key_len) { | 252 | switch (key_len) { |
272 | case 16: | 253 | case 16: |
273 | t = E_KEY[3]; | 254 | t = ctx->key_enc[3]; |
274 | for (i = 0; i < 10; ++i) | 255 | for (i = 0; i < 10; ++i) |
275 | loop4 (i); | 256 | loop4(i); |
276 | break; | 257 | break; |
277 | 258 | ||
278 | case 24: | 259 | case 24: |
279 | E_KEY[4] = le32_to_cpu(key[4]); | 260 | ctx->key_enc[4] = le32_to_cpu(key[4]); |
280 | t = E_KEY[5] = le32_to_cpu(key[5]); | 261 | t = ctx->key_enc[5] = le32_to_cpu(key[5]); |
281 | for (i = 0; i < 8; ++i) | 262 | for (i = 0; i < 8; ++i) |
282 | loop6 (i); | 263 | loop6(i); |
283 | break; | 264 | break; |
284 | 265 | ||
285 | case 32: | 266 | case 32: |
286 | E_KEY[4] = le32_to_cpu(key[4]); | 267 | ctx->key_enc[4] = le32_to_cpu(key[4]); |
287 | E_KEY[5] = le32_to_cpu(key[5]); | 268 | ctx->key_enc[5] = le32_to_cpu(key[5]); |
288 | E_KEY[6] = le32_to_cpu(key[6]); | 269 | ctx->key_enc[6] = le32_to_cpu(key[6]); |
289 | t = E_KEY[7] = le32_to_cpu(key[7]); | 270 | t = ctx->key_enc[7] = le32_to_cpu(key[7]); |
290 | for (i = 0; i < 7; ++i) | 271 | for (i = 0; i < 7; ++i) |
291 | loop8 (i); | 272 | loop8(i); |
292 | break; | 273 | break; |
293 | } | 274 | } |
294 | 275 | ||
295 | D_KEY[0] = E_KEY[0]; | 276 | ctx->key_dec[0] = ctx->key_enc[key_len + 24]; |
296 | D_KEY[1] = E_KEY[1]; | 277 | ctx->key_dec[1] = ctx->key_enc[key_len + 25]; |
297 | D_KEY[2] = E_KEY[2]; | 278 | ctx->key_dec[2] = ctx->key_enc[key_len + 26]; |
298 | D_KEY[3] = E_KEY[3]; | 279 | ctx->key_dec[3] = ctx->key_enc[key_len + 27]; |
299 | 280 | ||
300 | for (i = 4; i < key_len + 24; ++i) { | 281 | for (i = 4; i < key_len + 24; ++i) { |
301 | imix_col (D_KEY[i], E_KEY[i]); | 282 | j = key_len + 24 - (i & ~3) + (i & 3); |
283 | imix_col(ctx->key_dec[j], ctx->key_enc[i]); | ||
302 | } | 284 | } |
303 | |||
304 | return 0; | 285 | return 0; |
305 | } | 286 | } |
287 | EXPORT_SYMBOL_GPL(crypto_aes_set_key); | ||
306 | 288 | ||
307 | /* encrypt a block of text */ | 289 | /* encrypt a block of text */ |
308 | 290 | ||
309 | #define f_nround(bo, bi, k) \ | 291 | #define f_rn(bo, bi, n, k) do { \ |
310 | f_rn(bo, bi, 0, k); \ | 292 | bo[n] = crypto_ft_tab[0][byte(bi[n], 0)] ^ \ |
311 | f_rn(bo, bi, 1, k); \ | 293 | crypto_ft_tab[1][byte(bi[(n + 1) & 3], 1)] ^ \ |
312 | f_rn(bo, bi, 2, k); \ | 294 | crypto_ft_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \ |
313 | f_rn(bo, bi, 3, k); \ | 295 | crypto_ft_tab[3][byte(bi[(n + 3) & 3], 3)] ^ *(k + n); \ |
314 | k += 4 | 296 | } while (0) |
315 | 297 | ||
316 | #define f_lround(bo, bi, k) \ | 298 | #define f_nround(bo, bi, k) do {\ |
317 | f_rl(bo, bi, 0, k); \ | 299 | f_rn(bo, bi, 0, k); \ |
318 | f_rl(bo, bi, 1, k); \ | 300 | f_rn(bo, bi, 1, k); \ |
319 | f_rl(bo, bi, 2, k); \ | 301 | f_rn(bo, bi, 2, k); \ |
320 | f_rl(bo, bi, 3, k) | 302 | f_rn(bo, bi, 3, k); \ |
303 | k += 4; \ | ||
304 | } while (0) | ||
305 | |||
306 | #define f_rl(bo, bi, n, k) do { \ | ||
307 | bo[n] = crypto_fl_tab[0][byte(bi[n], 0)] ^ \ | ||
308 | crypto_fl_tab[1][byte(bi[(n + 1) & 3], 1)] ^ \ | ||
309 | crypto_fl_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \ | ||
310 | crypto_fl_tab[3][byte(bi[(n + 3) & 3], 3)] ^ *(k + n); \ | ||
311 | } while (0) | ||
312 | |||
313 | #define f_lround(bo, bi, k) do {\ | ||
314 | f_rl(bo, bi, 0, k); \ | ||
315 | f_rl(bo, bi, 1, k); \ | ||
316 | f_rl(bo, bi, 2, k); \ | ||
317 | f_rl(bo, bi, 3, k); \ | ||
318 | } while (0) | ||
321 | 319 | ||
322 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) | 320 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
323 | { | 321 | { |
324 | const struct aes_ctx *ctx = crypto_tfm_ctx(tfm); | 322 | const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
325 | const __le32 *src = (const __le32 *)in; | 323 | const __le32 *src = (const __le32 *)in; |
326 | __le32 *dst = (__le32 *)out; | 324 | __le32 *dst = (__le32 *)out; |
327 | u32 b0[4], b1[4]; | 325 | u32 b0[4], b1[4]; |
328 | const u32 *kp = E_KEY + 4; | 326 | const u32 *kp = ctx->key_enc + 4; |
327 | const int key_len = ctx->key_length; | ||
329 | 328 | ||
330 | b0[0] = le32_to_cpu(src[0]) ^ E_KEY[0]; | 329 | b0[0] = le32_to_cpu(src[0]) ^ ctx->key_enc[0]; |
331 | b0[1] = le32_to_cpu(src[1]) ^ E_KEY[1]; | 330 | b0[1] = le32_to_cpu(src[1]) ^ ctx->key_enc[1]; |
332 | b0[2] = le32_to_cpu(src[2]) ^ E_KEY[2]; | 331 | b0[2] = le32_to_cpu(src[2]) ^ ctx->key_enc[2]; |
333 | b0[3] = le32_to_cpu(src[3]) ^ E_KEY[3]; | 332 | b0[3] = le32_to_cpu(src[3]) ^ ctx->key_enc[3]; |
334 | 333 | ||
335 | if (ctx->key_length > 24) { | 334 | if (key_len > 24) { |
336 | f_nround (b1, b0, kp); | 335 | f_nround(b1, b0, kp); |
337 | f_nround (b0, b1, kp); | 336 | f_nround(b0, b1, kp); |
338 | } | 337 | } |
339 | 338 | ||
340 | if (ctx->key_length > 16) { | 339 | if (key_len > 16) { |
341 | f_nround (b1, b0, kp); | 340 | f_nround(b1, b0, kp); |
342 | f_nround (b0, b1, kp); | 341 | f_nround(b0, b1, kp); |
343 | } | 342 | } |
344 | 343 | ||
345 | f_nround (b1, b0, kp); | 344 | f_nround(b1, b0, kp); |
346 | f_nround (b0, b1, kp); | 345 | f_nround(b0, b1, kp); |
347 | f_nround (b1, b0, kp); | 346 | f_nround(b1, b0, kp); |
348 | f_nround (b0, b1, kp); | 347 | f_nround(b0, b1, kp); |
349 | f_nround (b1, b0, kp); | 348 | f_nround(b1, b0, kp); |
350 | f_nround (b0, b1, kp); | 349 | f_nround(b0, b1, kp); |
351 | f_nround (b1, b0, kp); | 350 | f_nround(b1, b0, kp); |
352 | f_nround (b0, b1, kp); | 351 | f_nround(b0, b1, kp); |
353 | f_nround (b1, b0, kp); | 352 | f_nround(b1, b0, kp); |
354 | f_lround (b0, b1, kp); | 353 | f_lround(b0, b1, kp); |
355 | 354 | ||
356 | dst[0] = cpu_to_le32(b0[0]); | 355 | dst[0] = cpu_to_le32(b0[0]); |
357 | dst[1] = cpu_to_le32(b0[1]); | 356 | dst[1] = cpu_to_le32(b0[1]); |
@@ -361,53 +360,69 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) | |||
361 | 360 | ||
362 | /* decrypt a block of text */ | 361 | /* decrypt a block of text */ |
363 | 362 | ||
364 | #define i_nround(bo, bi, k) \ | 363 | #define i_rn(bo, bi, n, k) do { \ |
365 | i_rn(bo, bi, 0, k); \ | 364 | bo[n] = crypto_it_tab[0][byte(bi[n], 0)] ^ \ |
366 | i_rn(bo, bi, 1, k); \ | 365 | crypto_it_tab[1][byte(bi[(n + 3) & 3], 1)] ^ \ |
367 | i_rn(bo, bi, 2, k); \ | 366 | crypto_it_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \ |
368 | i_rn(bo, bi, 3, k); \ | 367 | crypto_it_tab[3][byte(bi[(n + 1) & 3], 3)] ^ *(k + n); \ |
369 | k -= 4 | 368 | } while (0) |
370 | 369 | ||
371 | #define i_lround(bo, bi, k) \ | 370 | #define i_nround(bo, bi, k) do {\ |
372 | i_rl(bo, bi, 0, k); \ | 371 | i_rn(bo, bi, 0, k); \ |
373 | i_rl(bo, bi, 1, k); \ | 372 | i_rn(bo, bi, 1, k); \ |
374 | i_rl(bo, bi, 2, k); \ | 373 | i_rn(bo, bi, 2, k); \ |
375 | i_rl(bo, bi, 3, k) | 374 | i_rn(bo, bi, 3, k); \ |
375 | k += 4; \ | ||
376 | } while (0) | ||
377 | |||
378 | #define i_rl(bo, bi, n, k) do { \ | ||
379 | bo[n] = crypto_il_tab[0][byte(bi[n], 0)] ^ \ | ||
380 | crypto_il_tab[1][byte(bi[(n + 3) & 3], 1)] ^ \ | ||
381 | crypto_il_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \ | ||
382 | crypto_il_tab[3][byte(bi[(n + 1) & 3], 3)] ^ *(k + n); \ | ||
383 | } while (0) | ||
384 | |||
385 | #define i_lround(bo, bi, k) do {\ | ||
386 | i_rl(bo, bi, 0, k); \ | ||
387 | i_rl(bo, bi, 1, k); \ | ||
388 | i_rl(bo, bi, 2, k); \ | ||
389 | i_rl(bo, bi, 3, k); \ | ||
390 | } while (0) | ||
376 | 391 | ||
377 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) | 392 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
378 | { | 393 | { |
379 | const struct aes_ctx *ctx = crypto_tfm_ctx(tfm); | 394 | const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); |
380 | const __le32 *src = (const __le32 *)in; | 395 | const __le32 *src = (const __le32 *)in; |
381 | __le32 *dst = (__le32 *)out; | 396 | __le32 *dst = (__le32 *)out; |
382 | u32 b0[4], b1[4]; | 397 | u32 b0[4], b1[4]; |
383 | const int key_len = ctx->key_length; | 398 | const int key_len = ctx->key_length; |
384 | const u32 *kp = D_KEY + key_len + 20; | 399 | const u32 *kp = ctx->key_dec + 4; |
385 | 400 | ||
386 | b0[0] = le32_to_cpu(src[0]) ^ E_KEY[key_len + 24]; | 401 | b0[0] = le32_to_cpu(src[0]) ^ ctx->key_dec[0]; |
387 | b0[1] = le32_to_cpu(src[1]) ^ E_KEY[key_len + 25]; | 402 | b0[1] = le32_to_cpu(src[1]) ^ ctx->key_dec[1]; |
388 | b0[2] = le32_to_cpu(src[2]) ^ E_KEY[key_len + 26]; | 403 | b0[2] = le32_to_cpu(src[2]) ^ ctx->key_dec[2]; |
389 | b0[3] = le32_to_cpu(src[3]) ^ E_KEY[key_len + 27]; | 404 | b0[3] = le32_to_cpu(src[3]) ^ ctx->key_dec[3]; |
390 | 405 | ||
391 | if (key_len > 24) { | 406 | if (key_len > 24) { |
392 | i_nround (b1, b0, kp); | 407 | i_nround(b1, b0, kp); |
393 | i_nround (b0, b1, kp); | 408 | i_nround(b0, b1, kp); |
394 | } | 409 | } |
395 | 410 | ||
396 | if (key_len > 16) { | 411 | if (key_len > 16) { |
397 | i_nround (b1, b0, kp); | 412 | i_nround(b1, b0, kp); |
398 | i_nround (b0, b1, kp); | 413 | i_nround(b0, b1, kp); |
399 | } | 414 | } |
400 | 415 | ||
401 | i_nround (b1, b0, kp); | 416 | i_nround(b1, b0, kp); |
402 | i_nround (b0, b1, kp); | 417 | i_nround(b0, b1, kp); |
403 | i_nround (b1, b0, kp); | 418 | i_nround(b1, b0, kp); |
404 | i_nround (b0, b1, kp); | 419 | i_nround(b0, b1, kp); |
405 | i_nround (b1, b0, kp); | 420 | i_nround(b1, b0, kp); |
406 | i_nround (b0, b1, kp); | 421 | i_nround(b0, b1, kp); |
407 | i_nround (b1, b0, kp); | 422 | i_nround(b1, b0, kp); |
408 | i_nround (b0, b1, kp); | 423 | i_nround(b0, b1, kp); |
409 | i_nround (b1, b0, kp); | 424 | i_nround(b1, b0, kp); |
410 | i_lround (b0, b1, kp); | 425 | i_lround(b0, b1, kp); |
411 | 426 | ||
412 | dst[0] = cpu_to_le32(b0[0]); | 427 | dst[0] = cpu_to_le32(b0[0]); |
413 | dst[1] = cpu_to_le32(b0[1]); | 428 | dst[1] = cpu_to_le32(b0[1]); |
@@ -415,14 +430,13 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) | |||
415 | dst[3] = cpu_to_le32(b0[3]); | 430 | dst[3] = cpu_to_le32(b0[3]); |
416 | } | 431 | } |
417 | 432 | ||
418 | |||
419 | static struct crypto_alg aes_alg = { | 433 | static struct crypto_alg aes_alg = { |
420 | .cra_name = "aes", | 434 | .cra_name = "aes", |
421 | .cra_driver_name = "aes-generic", | 435 | .cra_driver_name = "aes-generic", |
422 | .cra_priority = 100, | 436 | .cra_priority = 100, |
423 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | 437 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
424 | .cra_blocksize = AES_BLOCK_SIZE, | 438 | .cra_blocksize = AES_BLOCK_SIZE, |
425 | .cra_ctxsize = sizeof(struct aes_ctx), | 439 | .cra_ctxsize = sizeof(struct crypto_aes_ctx), |
426 | .cra_alignmask = 3, | 440 | .cra_alignmask = 3, |
427 | .cra_module = THIS_MODULE, | 441 | .cra_module = THIS_MODULE, |
428 | .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), | 442 | .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), |
@@ -430,9 +444,9 @@ static struct crypto_alg aes_alg = { | |||
430 | .cipher = { | 444 | .cipher = { |
431 | .cia_min_keysize = AES_MIN_KEY_SIZE, | 445 | .cia_min_keysize = AES_MIN_KEY_SIZE, |
432 | .cia_max_keysize = AES_MAX_KEY_SIZE, | 446 | .cia_max_keysize = AES_MAX_KEY_SIZE, |
433 | .cia_setkey = aes_set_key, | 447 | .cia_setkey = crypto_aes_set_key, |
434 | .cia_encrypt = aes_encrypt, | 448 | .cia_encrypt = aes_encrypt, |
435 | .cia_decrypt = aes_decrypt | 449 | .cia_decrypt = aes_decrypt |
436 | } | 450 | } |
437 | } | 451 | } |
438 | }; | 452 | }; |