aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/bio.h
diff options
context:
space:
mode:
authorDavid Woodhouse <David.Woodhouse@intel.com>2008-08-09 11:42:20 -0400
committerJens Axboe <jens.axboe@oracle.com>2008-10-09 02:56:02 -0400
commite17fc0a1ccf88f6d4dcb363729f3141b0958c325 (patch)
tree0a7c2dc1c3159c2af14d87c67ca83e158b2c78b5 /include/linux/bio.h
parentd30a2605be9d5132d95944916e8f578fcfe4f976 (diff)
Allow elevators to sort/merge discard requests
But blkdev_issue_discard() still emits requests which are interpreted as soft barriers, because naïve callers might otherwise issue subsequent writes to those same sectors, which might cross on the queue (if they're reallocated quickly enough). Callers still _can_ issue non-barrier discard requests, but they have to take care of queue ordering for themselves. Signed-off-by: David Woodhouse <David.Woodhouse@intel.com> Signed-off-by: Jens Axboe <jens.axboe@oracle.com>
Diffstat (limited to 'include/linux/bio.h')
-rw-r--r--include/linux/bio.h2
1 files changed, 1 insertions, 1 deletions
diff --git a/include/linux/bio.h b/include/linux/bio.h
index 1fdfc5621c83..33c3947d61e9 100644
--- a/include/linux/bio.h
+++ b/include/linux/bio.h
@@ -188,8 +188,8 @@ struct bio {
188#define bio_failfast(bio) ((bio)->bi_rw & (1 << BIO_RW_FAILFAST)) 188#define bio_failfast(bio) ((bio)->bi_rw & (1 << BIO_RW_FAILFAST))
189#define bio_rw_ahead(bio) ((bio)->bi_rw & (1 << BIO_RW_AHEAD)) 189#define bio_rw_ahead(bio) ((bio)->bi_rw & (1 << BIO_RW_AHEAD))
190#define bio_rw_meta(bio) ((bio)->bi_rw & (1 << BIO_RW_META)) 190#define bio_rw_meta(bio) ((bio)->bi_rw & (1 << BIO_RW_META))
191#define bio_empty_barrier(bio) (bio_barrier(bio) && !bio_has_data(bio))
192#define bio_discard(bio) ((bio)->bi_rw & (1 << BIO_RW_DISCARD)) 191#define bio_discard(bio) ((bio)->bi_rw & (1 << BIO_RW_DISCARD))
192#define bio_empty_barrier(bio) (bio_barrier(bio) && !bio_has_data(bio) && !bio_discard(bio))
193 193
194static inline unsigned int bio_cur_sectors(struct bio *bio) 194static inline unsigned int bio_cur_sectors(struct bio *bio)
195{ 195{