aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorXiao Guangrong <xiaoguangrong@cn.fujitsu.com>2011-04-20 02:44:57 -0400
committerChris Mason <chris.mason@oracle.com>2011-05-23 13:24:41 -0400
commit8233767a227ac5843f1023b88c7272a7b5058f5f (patch)
treee37bb379b90d9754035021ad042de1970a5106fc
parentb083916638eee513be501f53b42a4be0b9851db0 (diff)
Btrfs: allocate extent state and check the result properly
It doesn't allocate extent_state and check the result properly: - in set_extent_bit, it doesn't allocate extent_state if the path is not allowed wait - in clear_extent_bit, it doesn't check the result after atomic-ly allocate, we trigger BUG_ON() if it's fail - if allocate fail, we trigger BUG_ON instead of returning -ENOMEM since the return value of clear_extent_bit() is ignored by many callers Signed-off-by: Xiao Guangrong <xiaoguangrong@cn.fujitsu.com> Signed-off-by: Chris Mason <chris.mason@oracle.com>
-rw-r--r--fs/btrfs/extent_io.c34
1 files changed, 26 insertions, 8 deletions
diff --git a/fs/btrfs/extent_io.c b/fs/btrfs/extent_io.c
index ba41da59e31b..9ccea86dd015 100644
--- a/fs/btrfs/extent_io.c
+++ b/fs/btrfs/extent_io.c
@@ -439,6 +439,15 @@ static int clear_state_bit(struct extent_io_tree *tree,
439 return ret; 439 return ret;
440} 440}
441 441
442static struct extent_state *
443alloc_extent_state_atomic(struct extent_state *prealloc)
444{
445 if (!prealloc)
446 prealloc = alloc_extent_state(GFP_ATOMIC);
447
448 return prealloc;
449}
450
442/* 451/*
443 * clear some bits on a range in the tree. This may require splitting 452 * clear some bits on a range in the tree. This may require splitting
444 * or inserting elements in the tree, so the gfp mask is used to 453 * or inserting elements in the tree, so the gfp mask is used to
@@ -476,8 +485,7 @@ int clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
476again: 485again:
477 if (!prealloc && (mask & __GFP_WAIT)) { 486 if (!prealloc && (mask & __GFP_WAIT)) {
478 prealloc = alloc_extent_state(mask); 487 prealloc = alloc_extent_state(mask);
479 if (!prealloc) 488 BUG_ON(!prealloc);
480 return -ENOMEM;
481 } 489 }
482 490
483 spin_lock(&tree->lock); 491 spin_lock(&tree->lock);
@@ -529,8 +537,8 @@ hit_next:
529 */ 537 */
530 538
531 if (state->start < start) { 539 if (state->start < start) {
532 if (!prealloc) 540 prealloc = alloc_extent_state_atomic(prealloc);
533 prealloc = alloc_extent_state(GFP_ATOMIC); 541 BUG_ON(!prealloc);
534 err = split_state(tree, state, prealloc, start); 542 err = split_state(tree, state, prealloc, start);
535 BUG_ON(err == -EEXIST); 543 BUG_ON(err == -EEXIST);
536 prealloc = NULL; 544 prealloc = NULL;
@@ -551,8 +559,8 @@ hit_next:
551 * on the first half 559 * on the first half
552 */ 560 */
553 if (state->start <= end && state->end > end) { 561 if (state->start <= end && state->end > end) {
554 if (!prealloc) 562 prealloc = alloc_extent_state_atomic(prealloc);
555 prealloc = alloc_extent_state(GFP_ATOMIC); 563 BUG_ON(!prealloc);
556 err = split_state(tree, state, prealloc, end + 1); 564 err = split_state(tree, state, prealloc, end + 1);
557 BUG_ON(err == -EEXIST); 565 BUG_ON(err == -EEXIST);
558 if (wake) 566 if (wake)
@@ -725,8 +733,7 @@ int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
725again: 733again:
726 if (!prealloc && (mask & __GFP_WAIT)) { 734 if (!prealloc && (mask & __GFP_WAIT)) {
727 prealloc = alloc_extent_state(mask); 735 prealloc = alloc_extent_state(mask);
728 if (!prealloc) 736 BUG_ON(!prealloc);
729 return -ENOMEM;
730 } 737 }
731 738
732 spin_lock(&tree->lock); 739 spin_lock(&tree->lock);
@@ -743,6 +750,8 @@ again:
743 */ 750 */
744 node = tree_search(tree, start); 751 node = tree_search(tree, start);
745 if (!node) { 752 if (!node) {
753 prealloc = alloc_extent_state_atomic(prealloc);
754 BUG_ON(!prealloc);
746 err = insert_state(tree, prealloc, start, end, &bits); 755 err = insert_state(tree, prealloc, start, end, &bits);
747 prealloc = NULL; 756 prealloc = NULL;
748 BUG_ON(err == -EEXIST); 757 BUG_ON(err == -EEXIST);
@@ -811,6 +820,9 @@ hit_next:
811 err = -EEXIST; 820 err = -EEXIST;
812 goto out; 821 goto out;
813 } 822 }
823
824 prealloc = alloc_extent_state_atomic(prealloc);
825 BUG_ON(!prealloc);
814 err = split_state(tree, state, prealloc, start); 826 err = split_state(tree, state, prealloc, start);
815 BUG_ON(err == -EEXIST); 827 BUG_ON(err == -EEXIST);
816 prealloc = NULL; 828 prealloc = NULL;
@@ -841,6 +853,9 @@ hit_next:
841 this_end = end; 853 this_end = end;
842 else 854 else
843 this_end = last_start - 1; 855 this_end = last_start - 1;
856
857 prealloc = alloc_extent_state_atomic(prealloc);
858 BUG_ON(!prealloc);
844 err = insert_state(tree, prealloc, start, this_end, 859 err = insert_state(tree, prealloc, start, this_end,
845 &bits); 860 &bits);
846 BUG_ON(err == -EEXIST); 861 BUG_ON(err == -EEXIST);
@@ -865,6 +880,9 @@ hit_next:
865 err = -EEXIST; 880 err = -EEXIST;
866 goto out; 881 goto out;
867 } 882 }
883
884 prealloc = alloc_extent_state_atomic(prealloc);
885 BUG_ON(!prealloc);
868 err = split_state(tree, state, prealloc, end + 1); 886 err = split_state(tree, state, prealloc, end + 1);
869 BUG_ON(err == -EEXIST); 887 BUG_ON(err == -EEXIST);
870 888