aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorTheodore Ts'o <tytso@mit.edu>2013-09-21 18:06:02 -0400
committerTheodore Ts'o <tytso@mit.edu>2013-10-10 14:32:13 -0400
commit85a1f77716cf546d9b9c42e2848b5712f51ba1ee (patch)
tree80b598761ad54d8eab82eab20ca59c13b765b8f7
parent61875f30daf60305712e25b209ef41ced2635bad (diff)
random: mix in architectural randomness earlier in extract_buf()
Previously if CPU chip had a built-in random number generator (i.e., RDRAND on newer x86 chips), we mixed it in at the very end of extract_buf() using an XOR operation. We now mix it in right after the calculate a hash across the entire pool. This has the advantage that any contribution of entropy from the CPU's HWRNG will get mixed back into the pool. In addition, it means that if the HWRNG has any defects (either accidentally or maliciously introduced), this will be mitigated via the non-linear transform of the SHA-1 hash function before we hand out generated output. Signed-off-by: "Theodore Ts'o" <tytso@mit.edu>
-rw-r--r--drivers/char/random.c24
1 files changed, 12 insertions, 12 deletions
diff --git a/drivers/char/random.c b/drivers/char/random.c
index 2d5daf9b58e9..54d020815b4e 100644
--- a/drivers/char/random.c
+++ b/drivers/char/random.c
@@ -904,7 +904,7 @@ static void extract_buf(struct entropy_store *r, __u8 *out)
904 int i; 904 int i;
905 union { 905 union {
906 __u32 w[5]; 906 __u32 w[5];
907 unsigned long l[LONGS(EXTRACT_SIZE)]; 907 unsigned long l[LONGS(20)];
908 } hash; 908 } hash;
909 __u32 workspace[SHA_WORKSPACE_WORDS]; 909 __u32 workspace[SHA_WORKSPACE_WORDS];
910 __u8 extract[64]; 910 __u8 extract[64];
@@ -917,6 +917,17 @@ static void extract_buf(struct entropy_store *r, __u8 *out)
917 sha_transform(hash.w, (__u8 *)(r->pool + i), workspace); 917 sha_transform(hash.w, (__u8 *)(r->pool + i), workspace);
918 918
919 /* 919 /*
920 * If we have a architectural hardware random number
921 * generator, mix that in, too.
922 */
923 for (i = 0; i < LONGS(20); i++) {
924 unsigned long v;
925 if (!arch_get_random_long(&v))
926 break;
927 hash.l[i] ^= v;
928 }
929
930 /*
920 * We mix the hash back into the pool to prevent backtracking 931 * We mix the hash back into the pool to prevent backtracking
921 * attacks (where the attacker knows the state of the pool 932 * attacks (where the attacker knows the state of the pool
922 * plus the current outputs, and attempts to find previous 933 * plus the current outputs, and attempts to find previous
@@ -945,17 +956,6 @@ static void extract_buf(struct entropy_store *r, __u8 *out)
945 hash.w[1] ^= hash.w[4]; 956 hash.w[1] ^= hash.w[4];
946 hash.w[2] ^= rol32(hash.w[2], 16); 957 hash.w[2] ^= rol32(hash.w[2], 16);
947 958
948 /*
949 * If we have a architectural hardware random number
950 * generator, mix that in, too.
951 */
952 for (i = 0; i < LONGS(EXTRACT_SIZE); i++) {
953 unsigned long v;
954 if (!arch_get_random_long(&v))
955 break;
956 hash.l[i] ^= v;
957 }
958
959 memcpy(out, &hash, EXTRACT_SIZE); 959 memcpy(out, &hash, EXTRACT_SIZE);
960 memset(&hash, 0, sizeof(hash)); 960 memset(&hash, 0, sizeof(hash));
961} 961}