aboutsummaryrefslogtreecommitdiffstats
path: root/fs/btrfs/extent_io.c
diff options
context:
space:
mode:
authorChris Mason <chris.mason@oracle.com>2011-05-23 14:37:47 -0400
committerChris Mason <chris.mason@oracle.com>2011-05-23 14:37:47 -0400
commitd6c0cb379c5198487e4ac124728cbb2346d63b1f (patch)
tree167a97ac58c7a2fbf7c4e94b6abbfe1e03af014a /fs/btrfs/extent_io.c
parent8e531cdfeb75269c6c5aae33651cca39707848da (diff)
parent1f78160ce1b1b8e657e2248118c4d91f881763f0 (diff)
Merge branch 'cleanups_and_fixes' into inode_numbers
Conflicts: fs/btrfs/tree-log.c fs/btrfs/volumes.c Signed-off-by: Chris Mason <chris.mason@oracle.com>
Diffstat (limited to 'fs/btrfs/extent_io.c')
-rw-r--r--fs/btrfs/extent_io.c56
1 files changed, 40 insertions, 16 deletions
diff --git a/fs/btrfs/extent_io.c b/fs/btrfs/extent_io.c
index 64c8b361b539..a90c4a12556b 100644
--- a/fs/btrfs/extent_io.c
+++ b/fs/btrfs/extent_io.c
@@ -439,6 +439,15 @@ static int clear_state_bit(struct extent_io_tree *tree,
439 return ret; 439 return ret;
440} 440}
441 441
442static struct extent_state *
443alloc_extent_state_atomic(struct extent_state *prealloc)
444{
445 if (!prealloc)
446 prealloc = alloc_extent_state(GFP_ATOMIC);
447
448 return prealloc;
449}
450
442/* 451/*
443 * clear some bits on a range in the tree. This may require splitting 452 * clear some bits on a range in the tree. This may require splitting
444 * or inserting elements in the tree, so the gfp mask is used to 453 * or inserting elements in the tree, so the gfp mask is used to
@@ -476,8 +485,7 @@ int clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
476again: 485again:
477 if (!prealloc && (mask & __GFP_WAIT)) { 486 if (!prealloc && (mask & __GFP_WAIT)) {
478 prealloc = alloc_extent_state(mask); 487 prealloc = alloc_extent_state(mask);
479 if (!prealloc) 488 BUG_ON(!prealloc);
480 return -ENOMEM;
481 } 489 }
482 490
483 spin_lock(&tree->lock); 491 spin_lock(&tree->lock);
@@ -529,8 +537,8 @@ hit_next:
529 */ 537 */
530 538
531 if (state->start < start) { 539 if (state->start < start) {
532 if (!prealloc) 540 prealloc = alloc_extent_state_atomic(prealloc);
533 prealloc = alloc_extent_state(GFP_ATOMIC); 541 BUG_ON(!prealloc);
534 err = split_state(tree, state, prealloc, start); 542 err = split_state(tree, state, prealloc, start);
535 BUG_ON(err == -EEXIST); 543 BUG_ON(err == -EEXIST);
536 prealloc = NULL; 544 prealloc = NULL;
@@ -551,8 +559,8 @@ hit_next:
551 * on the first half 559 * on the first half
552 */ 560 */
553 if (state->start <= end && state->end > end) { 561 if (state->start <= end && state->end > end) {
554 if (!prealloc) 562 prealloc = alloc_extent_state_atomic(prealloc);
555 prealloc = alloc_extent_state(GFP_ATOMIC); 563 BUG_ON(!prealloc);
556 err = split_state(tree, state, prealloc, end + 1); 564 err = split_state(tree, state, prealloc, end + 1);
557 BUG_ON(err == -EEXIST); 565 BUG_ON(err == -EEXIST);
558 if (wake) 566 if (wake)
@@ -725,8 +733,7 @@ int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
725again: 733again:
726 if (!prealloc && (mask & __GFP_WAIT)) { 734 if (!prealloc && (mask & __GFP_WAIT)) {
727 prealloc = alloc_extent_state(mask); 735 prealloc = alloc_extent_state(mask);
728 if (!prealloc) 736 BUG_ON(!prealloc);
729 return -ENOMEM;
730 } 737 }
731 738
732 spin_lock(&tree->lock); 739 spin_lock(&tree->lock);
@@ -743,6 +750,8 @@ again:
743 */ 750 */
744 node = tree_search(tree, start); 751 node = tree_search(tree, start);
745 if (!node) { 752 if (!node) {
753 prealloc = alloc_extent_state_atomic(prealloc);
754 BUG_ON(!prealloc);
746 err = insert_state(tree, prealloc, start, end, &bits); 755 err = insert_state(tree, prealloc, start, end, &bits);
747 prealloc = NULL; 756 prealloc = NULL;
748 BUG_ON(err == -EEXIST); 757 BUG_ON(err == -EEXIST);
@@ -771,20 +780,18 @@ hit_next:
771 if (err) 780 if (err)
772 goto out; 781 goto out;
773 782
783 next_node = rb_next(node);
774 cache_state(state, cached_state); 784 cache_state(state, cached_state);
775 merge_state(tree, state); 785 merge_state(tree, state);
776 if (last_end == (u64)-1) 786 if (last_end == (u64)-1)
777 goto out; 787 goto out;
778 788
779 start = last_end + 1; 789 start = last_end + 1;
780 if (start < end && prealloc && !need_resched()) { 790 if (next_node && start < end && prealloc && !need_resched()) {
781 next_node = rb_next(node); 791 state = rb_entry(next_node, struct extent_state,
782 if (next_node) { 792 rb_node);
783 state = rb_entry(next_node, struct extent_state, 793 if (state->start == start)
784 rb_node); 794 goto hit_next;
785 if (state->start == start)
786 goto hit_next;
787 }
788 } 795 }
789 goto search_again; 796 goto search_again;
790 } 797 }
@@ -811,6 +818,9 @@ hit_next:
811 err = -EEXIST; 818 err = -EEXIST;
812 goto out; 819 goto out;
813 } 820 }
821
822 prealloc = alloc_extent_state_atomic(prealloc);
823 BUG_ON(!prealloc);
814 err = split_state(tree, state, prealloc, start); 824 err = split_state(tree, state, prealloc, start);
815 BUG_ON(err == -EEXIST); 825 BUG_ON(err == -EEXIST);
816 prealloc = NULL; 826 prealloc = NULL;
@@ -841,14 +851,25 @@ hit_next:
841 this_end = end; 851 this_end = end;
842 else 852 else
843 this_end = last_start - 1; 853 this_end = last_start - 1;
854
855 prealloc = alloc_extent_state_atomic(prealloc);
856 BUG_ON(!prealloc);
857
858 /*
859 * Avoid to free 'prealloc' if it can be merged with
860 * the later extent.
861 */
862 atomic_inc(&prealloc->refs);
844 err = insert_state(tree, prealloc, start, this_end, 863 err = insert_state(tree, prealloc, start, this_end,
845 &bits); 864 &bits);
846 BUG_ON(err == -EEXIST); 865 BUG_ON(err == -EEXIST);
847 if (err) { 866 if (err) {
867 free_extent_state(prealloc);
848 prealloc = NULL; 868 prealloc = NULL;
849 goto out; 869 goto out;
850 } 870 }
851 cache_state(prealloc, cached_state); 871 cache_state(prealloc, cached_state);
872 free_extent_state(prealloc);
852 prealloc = NULL; 873 prealloc = NULL;
853 start = this_end + 1; 874 start = this_end + 1;
854 goto search_again; 875 goto search_again;
@@ -865,6 +886,9 @@ hit_next:
865 err = -EEXIST; 886 err = -EEXIST;
866 goto out; 887 goto out;
867 } 888 }
889
890 prealloc = alloc_extent_state_atomic(prealloc);
891 BUG_ON(!prealloc);
868 err = split_state(tree, state, prealloc, end + 1); 892 err = split_state(tree, state, prealloc, end + 1);
869 BUG_ON(err == -EEXIST); 893 BUG_ON(err == -EEXIST);
870 894