summaryrefslogtreecommitdiffstats
path: root/crypto/aes_generic.c
diff options
context:
space:
mode:
Diffstat (limited to 'crypto/aes_generic.c')
-rw-r--r--crypto/aes_generic.c64
1 files changed, 32 insertions, 32 deletions
diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c
index 3dd101144a58..ca554d57d01e 100644
--- a/crypto/aes_generic.c
+++ b/crypto/aes_generic.c
@@ -54,6 +54,7 @@
54#include <linux/errno.h> 54#include <linux/errno.h>
55#include <linux/crypto.h> 55#include <linux/crypto.h>
56#include <asm/byteorder.h> 56#include <asm/byteorder.h>
57#include <asm/unaligned.h>
57 58
58static inline u8 byte(const u32 x, const unsigned n) 59static inline u8 byte(const u32 x, const unsigned n)
59{ 60{
@@ -1216,7 +1217,6 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
1216int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key, 1217int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
1217 unsigned int key_len) 1218 unsigned int key_len)
1218{ 1219{
1219 const __le32 *key = (const __le32 *)in_key;
1220 u32 i, t, u, v, w, j; 1220 u32 i, t, u, v, w, j;
1221 1221
1222 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 && 1222 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
@@ -1225,10 +1225,15 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
1225 1225
1226 ctx->key_length = key_len; 1226 ctx->key_length = key_len;
1227 1227
1228 ctx->key_dec[key_len + 24] = ctx->key_enc[0] = le32_to_cpu(key[0]); 1228 ctx->key_enc[0] = get_unaligned_le32(in_key);
1229 ctx->key_dec[key_len + 25] = ctx->key_enc[1] = le32_to_cpu(key[1]); 1229 ctx->key_enc[1] = get_unaligned_le32(in_key + 4);
1230 ctx->key_dec[key_len + 26] = ctx->key_enc[2] = le32_to_cpu(key[2]); 1230 ctx->key_enc[2] = get_unaligned_le32(in_key + 8);
1231 ctx->key_dec[key_len + 27] = ctx->key_enc[3] = le32_to_cpu(key[3]); 1231 ctx->key_enc[3] = get_unaligned_le32(in_key + 12);
1232
1233 ctx->key_dec[key_len + 24] = ctx->key_enc[0];
1234 ctx->key_dec[key_len + 25] = ctx->key_enc[1];
1235 ctx->key_dec[key_len + 26] = ctx->key_enc[2];
1236 ctx->key_dec[key_len + 27] = ctx->key_enc[3];
1232 1237
1233 switch (key_len) { 1238 switch (key_len) {
1234 case AES_KEYSIZE_128: 1239 case AES_KEYSIZE_128:
@@ -1238,17 +1243,17 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
1238 break; 1243 break;
1239 1244
1240 case AES_KEYSIZE_192: 1245 case AES_KEYSIZE_192:
1241 ctx->key_enc[4] = le32_to_cpu(key[4]); 1246 ctx->key_enc[4] = get_unaligned_le32(in_key + 16);
1242 t = ctx->key_enc[5] = le32_to_cpu(key[5]); 1247 t = ctx->key_enc[5] = get_unaligned_le32(in_key + 20);
1243 for (i = 0; i < 8; ++i) 1248 for (i = 0; i < 8; ++i)
1244 loop6(i); 1249 loop6(i);
1245 break; 1250 break;
1246 1251
1247 case AES_KEYSIZE_256: 1252 case AES_KEYSIZE_256:
1248 ctx->key_enc[4] = le32_to_cpu(key[4]); 1253 ctx->key_enc[4] = get_unaligned_le32(in_key + 16);
1249 ctx->key_enc[5] = le32_to_cpu(key[5]); 1254 ctx->key_enc[5] = get_unaligned_le32(in_key + 20);
1250 ctx->key_enc[6] = le32_to_cpu(key[6]); 1255 ctx->key_enc[6] = get_unaligned_le32(in_key + 24);
1251 t = ctx->key_enc[7] = le32_to_cpu(key[7]); 1256 t = ctx->key_enc[7] = get_unaligned_le32(in_key + 28);
1252 for (i = 0; i < 6; ++i) 1257 for (i = 0; i < 6; ++i)
1253 loop8(i); 1258 loop8(i);
1254 loop8tophalf(i); 1259 loop8tophalf(i);
@@ -1329,16 +1334,14 @@ EXPORT_SYMBOL_GPL(crypto_aes_set_key);
1329static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 1334static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1330{ 1335{
1331 const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); 1336 const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
1332 const __le32 *src = (const __le32 *)in;
1333 __le32 *dst = (__le32 *)out;
1334 u32 b0[4], b1[4]; 1337 u32 b0[4], b1[4];
1335 const u32 *kp = ctx->key_enc + 4; 1338 const u32 *kp = ctx->key_enc + 4;
1336 const int key_len = ctx->key_length; 1339 const int key_len = ctx->key_length;
1337 1340
1338 b0[0] = le32_to_cpu(src[0]) ^ ctx->key_enc[0]; 1341 b0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in);
1339 b0[1] = le32_to_cpu(src[1]) ^ ctx->key_enc[1]; 1342 b0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4);
1340 b0[2] = le32_to_cpu(src[2]) ^ ctx->key_enc[2]; 1343 b0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8);
1341 b0[3] = le32_to_cpu(src[3]) ^ ctx->key_enc[3]; 1344 b0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12);
1342 1345
1343 if (key_len > 24) { 1346 if (key_len > 24) {
1344 f_nround(b1, b0, kp); 1347 f_nround(b1, b0, kp);
@@ -1361,10 +1364,10 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1361 f_nround(b1, b0, kp); 1364 f_nround(b1, b0, kp);
1362 f_lround(b0, b1, kp); 1365 f_lround(b0, b1, kp);
1363 1366
1364 dst[0] = cpu_to_le32(b0[0]); 1367 put_unaligned_le32(b0[0], out);
1365 dst[1] = cpu_to_le32(b0[1]); 1368 put_unaligned_le32(b0[1], out + 4);
1366 dst[2] = cpu_to_le32(b0[2]); 1369 put_unaligned_le32(b0[2], out + 8);
1367 dst[3] = cpu_to_le32(b0[3]); 1370 put_unaligned_le32(b0[3], out + 12);
1368} 1371}
1369 1372
1370/* decrypt a block of text */ 1373/* decrypt a block of text */
@@ -1401,16 +1404,14 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1401static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 1404static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1402{ 1405{
1403 const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); 1406 const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
1404 const __le32 *src = (const __le32 *)in;
1405 __le32 *dst = (__le32 *)out;
1406 u32 b0[4], b1[4]; 1407 u32 b0[4], b1[4];
1407 const int key_len = ctx->key_length; 1408 const int key_len = ctx->key_length;
1408 const u32 *kp = ctx->key_dec + 4; 1409 const u32 *kp = ctx->key_dec + 4;
1409 1410
1410 b0[0] = le32_to_cpu(src[0]) ^ ctx->key_dec[0]; 1411 b0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in);
1411 b0[1] = le32_to_cpu(src[1]) ^ ctx->key_dec[1]; 1412 b0[1] = ctx->key_dec[1] ^ get_unaligned_le32(in + 4);
1412 b0[2] = le32_to_cpu(src[2]) ^ ctx->key_dec[2]; 1413 b0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8);
1413 b0[3] = le32_to_cpu(src[3]) ^ ctx->key_dec[3]; 1414 b0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12);
1414 1415
1415 if (key_len > 24) { 1416 if (key_len > 24) {
1416 i_nround(b1, b0, kp); 1417 i_nround(b1, b0, kp);
@@ -1433,10 +1434,10 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1433 i_nround(b1, b0, kp); 1434 i_nround(b1, b0, kp);
1434 i_lround(b0, b1, kp); 1435 i_lround(b0, b1, kp);
1435 1436
1436 dst[0] = cpu_to_le32(b0[0]); 1437 put_unaligned_le32(b0[0], out);
1437 dst[1] = cpu_to_le32(b0[1]); 1438 put_unaligned_le32(b0[1], out + 4);
1438 dst[2] = cpu_to_le32(b0[2]); 1439 put_unaligned_le32(b0[2], out + 8);
1439 dst[3] = cpu_to_le32(b0[3]); 1440 put_unaligned_le32(b0[3], out + 12);
1440} 1441}
1441 1442
1442static struct crypto_alg aes_alg = { 1443static struct crypto_alg aes_alg = {
@@ -1446,7 +1447,6 @@ static struct crypto_alg aes_alg = {
1446 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 1447 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
1447 .cra_blocksize = AES_BLOCK_SIZE, 1448 .cra_blocksize = AES_BLOCK_SIZE,
1448 .cra_ctxsize = sizeof(struct crypto_aes_ctx), 1449 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
1449 .cra_alignmask = 3,
1450 .cra_module = THIS_MODULE, 1450 .cra_module = THIS_MODULE,
1451 .cra_u = { 1451 .cra_u = {
1452 .cipher = { 1452 .cipher = {