summaryrefslogtreecommitdiffstats
path: root/drivers/char/random.c
diff options
context:
space:
mode:
authorJason A. Donenfeld <Jason@zx2c4.com>2017-06-14 18:45:26 -0400
committerTheodore Ts'o <tytso@mit.edu>2017-06-19 21:42:49 -0400
commit4a072c71f49b0a0e495ea13423bdb850da73c58c (patch)
tree6aacee17db1b1619926d0856d7cf74ef22470fdf /drivers/char/random.c
parentb169c13de473a85b3c859bb36216a4cb5f00a54a (diff)
random: silence compiler warnings and fix race
Odd versions of gcc for the sh4 architecture will actually warn about flags being used while uninitialized, so we set them to zero. Non crazy gccs will optimize that out again, so it doesn't make a difference. Next, over aggressive gccs could inline the expression that defines use_lock, which could then introduce a race resulting in a lock imbalance. By using READ_ONCE, we prevent that fate. Finally, we make that assignment const, so that gcc can still optimize a nice amount. Finally, we fix a potential deadlock between primary_crng.lock and batched_entropy_reset_lock, where they could be called in opposite order. Moving the call to invalidate_batched_entropy to outside the lock rectifies this issue. Fixes: b169c13de473a85b3c859bb36216a4cb5f00a54a Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com> Signed-off-by: Theodore Ts'o <tytso@mit.edu> Cc: stable@vger.kernel.org
Diffstat (limited to 'drivers/char/random.c')
-rw-r--r--drivers/char/random.c12
1 files changed, 6 insertions, 6 deletions
diff --git a/drivers/char/random.c b/drivers/char/random.c
index e870f329db88..01a260f67437 100644
--- a/drivers/char/random.c
+++ b/drivers/char/random.c
@@ -803,13 +803,13 @@ static int crng_fast_load(const char *cp, size_t len)
803 p[crng_init_cnt % CHACHA20_KEY_SIZE] ^= *cp; 803 p[crng_init_cnt % CHACHA20_KEY_SIZE] ^= *cp;
804 cp++; crng_init_cnt++; len--; 804 cp++; crng_init_cnt++; len--;
805 } 805 }
806 spin_unlock_irqrestore(&primary_crng.lock, flags);
806 if (crng_init_cnt >= CRNG_INIT_CNT_THRESH) { 807 if (crng_init_cnt >= CRNG_INIT_CNT_THRESH) {
807 invalidate_batched_entropy(); 808 invalidate_batched_entropy();
808 crng_init = 1; 809 crng_init = 1;
809 wake_up_interruptible(&crng_init_wait); 810 wake_up_interruptible(&crng_init_wait);
810 pr_notice("random: fast init done\n"); 811 pr_notice("random: fast init done\n");
811 } 812 }
812 spin_unlock_irqrestore(&primary_crng.lock, flags);
813 return 1; 813 return 1;
814} 814}
815 815
@@ -841,6 +841,7 @@ static void crng_reseed(struct crng_state *crng, struct entropy_store *r)
841 } 841 }
842 memzero_explicit(&buf, sizeof(buf)); 842 memzero_explicit(&buf, sizeof(buf));
843 crng->init_time = jiffies; 843 crng->init_time = jiffies;
844 spin_unlock_irqrestore(&primary_crng.lock, flags);
844 if (crng == &primary_crng && crng_init < 2) { 845 if (crng == &primary_crng && crng_init < 2) {
845 invalidate_batched_entropy(); 846 invalidate_batched_entropy();
846 crng_init = 2; 847 crng_init = 2;
@@ -848,7 +849,6 @@ static void crng_reseed(struct crng_state *crng, struct entropy_store *r)
848 wake_up_interruptible(&crng_init_wait); 849 wake_up_interruptible(&crng_init_wait);
849 pr_notice("random: crng init done\n"); 850 pr_notice("random: crng init done\n");
850 } 851 }
851 spin_unlock_irqrestore(&primary_crng.lock, flags);
852} 852}
853 853
854static inline void crng_wait_ready(void) 854static inline void crng_wait_ready(void)
@@ -2041,8 +2041,8 @@ static DEFINE_PER_CPU(struct batched_entropy, batched_entropy_u64);
2041u64 get_random_u64(void) 2041u64 get_random_u64(void)
2042{ 2042{
2043 u64 ret; 2043 u64 ret;
2044 bool use_lock = crng_init < 2; 2044 bool use_lock = READ_ONCE(crng_init) < 2;
2045 unsigned long flags; 2045 unsigned long flags = 0;
2046 struct batched_entropy *batch; 2046 struct batched_entropy *batch;
2047 2047
2048#if BITS_PER_LONG == 64 2048#if BITS_PER_LONG == 64
@@ -2073,8 +2073,8 @@ static DEFINE_PER_CPU(struct batched_entropy, batched_entropy_u32);
2073u32 get_random_u32(void) 2073u32 get_random_u32(void)
2074{ 2074{
2075 u32 ret; 2075 u32 ret;
2076 bool use_lock = crng_init < 2; 2076 bool use_lock = READ_ONCE(crng_init) < 2;
2077 unsigned long flags; 2077 unsigned long flags = 0;
2078 struct batched_entropy *batch; 2078 struct batched_entropy *batch;
2079 2079
2080 if (arch_get_random_int(&ret)) 2080 if (arch_get_random_int(&ret))