aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/md/dm-cache-target.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/md/dm-cache-target.c')
-rw-r--r--drivers/md/dm-cache-target.c7
1 files changed, 4 insertions, 3 deletions
diff --git a/drivers/md/dm-cache-target.c b/drivers/md/dm-cache-target.c
index b680da5d7b93..1fe93cfea7d3 100644
--- a/drivers/md/dm-cache-target.c
+++ b/drivers/md/dm-cache-target.c
@@ -424,6 +424,7 @@ static void free_migration(struct dm_cache_migration *mg)
424 wake_up(&cache->migration_wait); 424 wake_up(&cache->migration_wait);
425 425
426 mempool_free(mg, cache->migration_pool); 426 mempool_free(mg, cache->migration_pool);
427 wake_worker(cache);
427} 428}
428 429
429static int prealloc_data_structs(struct cache *cache, struct prealloc *p) 430static int prealloc_data_structs(struct cache *cache, struct prealloc *p)
@@ -1966,6 +1967,7 @@ static void process_deferred_bios(struct cache *cache)
1966 * this bio might require one, we pause until there are some 1967 * this bio might require one, we pause until there are some
1967 * prepared mappings to process. 1968 * prepared mappings to process.
1968 */ 1969 */
1970 prealloc_used = true;
1969 if (prealloc_data_structs(cache, &structs)) { 1971 if (prealloc_data_structs(cache, &structs)) {
1970 spin_lock_irqsave(&cache->lock, flags); 1972 spin_lock_irqsave(&cache->lock, flags);
1971 bio_list_merge(&cache->deferred_bios, &bios); 1973 bio_list_merge(&cache->deferred_bios, &bios);
@@ -1981,7 +1983,6 @@ static void process_deferred_bios(struct cache *cache)
1981 process_discard_bio(cache, &structs, bio); 1983 process_discard_bio(cache, &structs, bio);
1982 else 1984 else
1983 process_bio(cache, &structs, bio); 1985 process_bio(cache, &structs, bio);
1984 prealloc_used = true;
1985 } 1986 }
1986 1987
1987 if (prealloc_used) 1988 if (prealloc_used)
@@ -2010,6 +2011,7 @@ static void process_deferred_cells(struct cache *cache)
2010 * this bio might require one, we pause until there are some 2011 * this bio might require one, we pause until there are some
2011 * prepared mappings to process. 2012 * prepared mappings to process.
2012 */ 2013 */
2014 prealloc_used = true;
2013 if (prealloc_data_structs(cache, &structs)) { 2015 if (prealloc_data_structs(cache, &structs)) {
2014 spin_lock_irqsave(&cache->lock, flags); 2016 spin_lock_irqsave(&cache->lock, flags);
2015 list_splice(&cells, &cache->deferred_cells); 2017 list_splice(&cells, &cache->deferred_cells);
@@ -2018,7 +2020,6 @@ static void process_deferred_cells(struct cache *cache)
2018 } 2020 }
2019 2021
2020 process_cell(cache, &structs, cell); 2022 process_cell(cache, &structs, cell);
2021 prealloc_used = true;
2022 } 2023 }
2023 2024
2024 if (prealloc_used) 2025 if (prealloc_used)
@@ -2080,6 +2081,7 @@ static void writeback_some_dirty_blocks(struct cache *cache)
2080 if (policy_writeback_work(cache->policy, &oblock, &cblock, busy)) 2081 if (policy_writeback_work(cache->policy, &oblock, &cblock, busy))
2081 break; /* no work to do */ 2082 break; /* no work to do */
2082 2083
2084 prealloc_used = true;
2083 if (prealloc_data_structs(cache, &structs) || 2085 if (prealloc_data_structs(cache, &structs) ||
2084 get_cell(cache, oblock, &structs, &old_ocell)) { 2086 get_cell(cache, oblock, &structs, &old_ocell)) {
2085 policy_set_dirty(cache->policy, oblock); 2087 policy_set_dirty(cache->policy, oblock);
@@ -2087,7 +2089,6 @@ static void writeback_some_dirty_blocks(struct cache *cache)
2087 } 2089 }
2088 2090
2089 writeback(cache, &structs, oblock, cblock, old_ocell); 2091 writeback(cache, &structs, oblock, cblock, old_ocell);
2090 prealloc_used = true;
2091 } 2092 }
2092 2093
2093 if (prealloc_used) 2094 if (prealloc_used)