aboutsummaryrefslogtreecommitdiffstats
path: root/include/drm/drm_mm.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/drm/drm_mm.h')
-rw-r--r--include/drm/drm_mm.h5
1 files changed, 4 insertions, 1 deletions
diff --git a/include/drm/drm_mm.h b/include/drm/drm_mm.h
index d81b0ba9921f..f262da180117 100644
--- a/include/drm/drm_mm.h
+++ b/include/drm/drm_mm.h
@@ -459,10 +459,13 @@ __drm_mm_interval_first(const struct drm_mm *mm, u64 start, u64 last);
459 * but using the internal interval tree to accelerate the search for the 459 * but using the internal interval tree to accelerate the search for the
460 * starting node, and so not safe against removal of elements. It assumes 460 * starting node, and so not safe against removal of elements. It assumes
461 * that @end is within (or is the upper limit of) the drm_mm allocator. 461 * that @end is within (or is the upper limit of) the drm_mm allocator.
462 * If [@start, @end] are beyond the range of the drm_mm, the iterator may walk
463 * over the special _unallocated_ &drm_mm.head_node, and may even continue
464 * indefinitely.
462 */ 465 */
463#define drm_mm_for_each_node_in_range(node__, mm__, start__, end__) \ 466#define drm_mm_for_each_node_in_range(node__, mm__, start__, end__) \
464 for (node__ = __drm_mm_interval_first((mm__), (start__), (end__)-1); \ 467 for (node__ = __drm_mm_interval_first((mm__), (start__), (end__)-1); \
465 node__ && node__->start < (end__); \ 468 node__->start < (end__); \
466 node__ = list_next_entry(node__, node_list)) 469 node__ = list_next_entry(node__, node_list))
467 470
468void drm_mm_scan_init_with_range(struct drm_mm_scan *scan, 471void drm_mm_scan_init_with_range(struct drm_mm_scan *scan,