diff options
Diffstat (limited to 'lib/dma-debug.c')
-rw-r--r-- | lib/dma-debug.c | 43 |
1 files changed, 28 insertions, 15 deletions
diff --git a/lib/dma-debug.c b/lib/dma-debug.c index add80cc02dbe..9722bd2dbc9b 100644 --- a/lib/dma-debug.c +++ b/lib/dma-debug.c | |||
@@ -102,6 +102,14 @@ static DEFINE_SPINLOCK(free_entries_lock); | |||
102 | /* Global disable flag - will be set in case of an error */ | 102 | /* Global disable flag - will be set in case of an error */ |
103 | static u32 global_disable __read_mostly; | 103 | static u32 global_disable __read_mostly; |
104 | 104 | ||
105 | /* Early initialization disable flag, set at the end of dma_debug_init */ | ||
106 | static bool dma_debug_initialized __read_mostly; | ||
107 | |||
108 | static inline bool dma_debug_disabled(void) | ||
109 | { | ||
110 | return global_disable || !dma_debug_initialized; | ||
111 | } | ||
112 | |||
105 | /* Global error count */ | 113 | /* Global error count */ |
106 | static u32 error_count; | 114 | static u32 error_count; |
107 | 115 | ||
@@ -945,7 +953,7 @@ static int dma_debug_device_change(struct notifier_block *nb, unsigned long acti | |||
945 | struct dma_debug_entry *uninitialized_var(entry); | 953 | struct dma_debug_entry *uninitialized_var(entry); |
946 | int count; | 954 | int count; |
947 | 955 | ||
948 | if (global_disable) | 956 | if (dma_debug_disabled()) |
949 | return 0; | 957 | return 0; |
950 | 958 | ||
951 | switch (action) { | 959 | switch (action) { |
@@ -973,7 +981,7 @@ void dma_debug_add_bus(struct bus_type *bus) | |||
973 | { | 981 | { |
974 | struct notifier_block *nb; | 982 | struct notifier_block *nb; |
975 | 983 | ||
976 | if (global_disable) | 984 | if (dma_debug_disabled()) |
977 | return; | 985 | return; |
978 | 986 | ||
979 | nb = kzalloc(sizeof(struct notifier_block), GFP_KERNEL); | 987 | nb = kzalloc(sizeof(struct notifier_block), GFP_KERNEL); |
@@ -994,6 +1002,9 @@ void dma_debug_init(u32 num_entries) | |||
994 | { | 1002 | { |
995 | int i; | 1003 | int i; |
996 | 1004 | ||
1005 | /* Do not use dma_debug_initialized here, since we really want to be | ||
1006 | * called to set dma_debug_initialized | ||
1007 | */ | ||
997 | if (global_disable) | 1008 | if (global_disable) |
998 | return; | 1009 | return; |
999 | 1010 | ||
@@ -1021,6 +1032,8 @@ void dma_debug_init(u32 num_entries) | |||
1021 | 1032 | ||
1022 | nr_total_entries = num_free_entries; | 1033 | nr_total_entries = num_free_entries; |
1023 | 1034 | ||
1035 | dma_debug_initialized = true; | ||
1036 | |||
1024 | pr_info("DMA-API: debugging enabled by kernel config\n"); | 1037 | pr_info("DMA-API: debugging enabled by kernel config\n"); |
1025 | } | 1038 | } |
1026 | 1039 | ||
@@ -1243,7 +1256,7 @@ void debug_dma_map_page(struct device *dev, struct page *page, size_t offset, | |||
1243 | { | 1256 | { |
1244 | struct dma_debug_entry *entry; | 1257 | struct dma_debug_entry *entry; |
1245 | 1258 | ||
1246 | if (unlikely(global_disable)) | 1259 | if (unlikely(dma_debug_disabled())) |
1247 | return; | 1260 | return; |
1248 | 1261 | ||
1249 | if (dma_mapping_error(dev, dma_addr)) | 1262 | if (dma_mapping_error(dev, dma_addr)) |
@@ -1283,7 +1296,7 @@ void debug_dma_mapping_error(struct device *dev, dma_addr_t dma_addr) | |||
1283 | struct hash_bucket *bucket; | 1296 | struct hash_bucket *bucket; |
1284 | unsigned long flags; | 1297 | unsigned long flags; |
1285 | 1298 | ||
1286 | if (unlikely(global_disable)) | 1299 | if (unlikely(dma_debug_disabled())) |
1287 | return; | 1300 | return; |
1288 | 1301 | ||
1289 | ref.dev = dev; | 1302 | ref.dev = dev; |
@@ -1325,7 +1338,7 @@ void debug_dma_unmap_page(struct device *dev, dma_addr_t addr, | |||
1325 | .direction = direction, | 1338 | .direction = direction, |
1326 | }; | 1339 | }; |
1327 | 1340 | ||
1328 | if (unlikely(global_disable)) | 1341 | if (unlikely(dma_debug_disabled())) |
1329 | return; | 1342 | return; |
1330 | 1343 | ||
1331 | if (map_single) | 1344 | if (map_single) |
@@ -1342,7 +1355,7 @@ void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, | |||
1342 | struct scatterlist *s; | 1355 | struct scatterlist *s; |
1343 | int i; | 1356 | int i; |
1344 | 1357 | ||
1345 | if (unlikely(global_disable)) | 1358 | if (unlikely(dma_debug_disabled())) |
1346 | return; | 1359 | return; |
1347 | 1360 | ||
1348 | for_each_sg(sg, s, mapped_ents, i) { | 1361 | for_each_sg(sg, s, mapped_ents, i) { |
@@ -1395,7 +1408,7 @@ void debug_dma_unmap_sg(struct device *dev, struct scatterlist *sglist, | |||
1395 | struct scatterlist *s; | 1408 | struct scatterlist *s; |
1396 | int mapped_ents = 0, i; | 1409 | int mapped_ents = 0, i; |
1397 | 1410 | ||
1398 | if (unlikely(global_disable)) | 1411 | if (unlikely(dma_debug_disabled())) |
1399 | return; | 1412 | return; |
1400 | 1413 | ||
1401 | for_each_sg(sglist, s, nelems, i) { | 1414 | for_each_sg(sglist, s, nelems, i) { |
@@ -1427,7 +1440,7 @@ void debug_dma_alloc_coherent(struct device *dev, size_t size, | |||
1427 | { | 1440 | { |
1428 | struct dma_debug_entry *entry; | 1441 | struct dma_debug_entry *entry; |
1429 | 1442 | ||
1430 | if (unlikely(global_disable)) | 1443 | if (unlikely(dma_debug_disabled())) |
1431 | return; | 1444 | return; |
1432 | 1445 | ||
1433 | if (unlikely(virt == NULL)) | 1446 | if (unlikely(virt == NULL)) |
@@ -1462,7 +1475,7 @@ void debug_dma_free_coherent(struct device *dev, size_t size, | |||
1462 | .direction = DMA_BIDIRECTIONAL, | 1475 | .direction = DMA_BIDIRECTIONAL, |
1463 | }; | 1476 | }; |
1464 | 1477 | ||
1465 | if (unlikely(global_disable)) | 1478 | if (unlikely(dma_debug_disabled())) |
1466 | return; | 1479 | return; |
1467 | 1480 | ||
1468 | check_unmap(&ref); | 1481 | check_unmap(&ref); |
@@ -1474,7 +1487,7 @@ void debug_dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, | |||
1474 | { | 1487 | { |
1475 | struct dma_debug_entry ref; | 1488 | struct dma_debug_entry ref; |
1476 | 1489 | ||
1477 | if (unlikely(global_disable)) | 1490 | if (unlikely(dma_debug_disabled())) |
1478 | return; | 1491 | return; |
1479 | 1492 | ||
1480 | ref.type = dma_debug_single; | 1493 | ref.type = dma_debug_single; |
@@ -1494,7 +1507,7 @@ void debug_dma_sync_single_for_device(struct device *dev, | |||
1494 | { | 1507 | { |
1495 | struct dma_debug_entry ref; | 1508 | struct dma_debug_entry ref; |
1496 | 1509 | ||
1497 | if (unlikely(global_disable)) | 1510 | if (unlikely(dma_debug_disabled())) |
1498 | return; | 1511 | return; |
1499 | 1512 | ||
1500 | ref.type = dma_debug_single; | 1513 | ref.type = dma_debug_single; |
@@ -1515,7 +1528,7 @@ void debug_dma_sync_single_range_for_cpu(struct device *dev, | |||
1515 | { | 1528 | { |
1516 | struct dma_debug_entry ref; | 1529 | struct dma_debug_entry ref; |
1517 | 1530 | ||
1518 | if (unlikely(global_disable)) | 1531 | if (unlikely(dma_debug_disabled())) |
1519 | return; | 1532 | return; |
1520 | 1533 | ||
1521 | ref.type = dma_debug_single; | 1534 | ref.type = dma_debug_single; |
@@ -1536,7 +1549,7 @@ void debug_dma_sync_single_range_for_device(struct device *dev, | |||
1536 | { | 1549 | { |
1537 | struct dma_debug_entry ref; | 1550 | struct dma_debug_entry ref; |
1538 | 1551 | ||
1539 | if (unlikely(global_disable)) | 1552 | if (unlikely(dma_debug_disabled())) |
1540 | return; | 1553 | return; |
1541 | 1554 | ||
1542 | ref.type = dma_debug_single; | 1555 | ref.type = dma_debug_single; |
@@ -1556,7 +1569,7 @@ void debug_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, | |||
1556 | struct scatterlist *s; | 1569 | struct scatterlist *s; |
1557 | int mapped_ents = 0, i; | 1570 | int mapped_ents = 0, i; |
1558 | 1571 | ||
1559 | if (unlikely(global_disable)) | 1572 | if (unlikely(dma_debug_disabled())) |
1560 | return; | 1573 | return; |
1561 | 1574 | ||
1562 | for_each_sg(sg, s, nelems, i) { | 1575 | for_each_sg(sg, s, nelems, i) { |
@@ -1589,7 +1602,7 @@ void debug_dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, | |||
1589 | struct scatterlist *s; | 1602 | struct scatterlist *s; |
1590 | int mapped_ents = 0, i; | 1603 | int mapped_ents = 0, i; |
1591 | 1604 | ||
1592 | if (unlikely(global_disable)) | 1605 | if (unlikely(dma_debug_disabled())) |
1593 | return; | 1606 | return; |
1594 | 1607 | ||
1595 | for_each_sg(sg, s, nelems, i) { | 1608 | for_each_sg(sg, s, nelems, i) { |