summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorKonsta Holtta <kholtta@nvidia.com>2018-03-15 09:34:14 -0400
committermobile promotions <svcmobile_promotions@nvidia.com>2018-03-16 20:12:03 -0400
commit9f9035d10bdbd56e13697aab44bc36b4bd9275cd (patch)
treeb43b0ee7fb26fbbedba36e08798d8c8636cfb720
parent0378bc32e4db87dd7bb58fce2f85cd4ff436506a (diff)
gpu: nvgpu: remove fence param from channel_sync
The fence parameter that gets output from gk20a_channel_sync's wait() and wait_fd() APIs is no longer used for anything. Delete it. Jira NVGPU-527 Jira NVGPU-528 Bug 200390539 Change-Id: I659504062dc6aee83a0a0d9f5625372b4ae8c0e2 Signed-off-by: Konsta Holtta <kholtta@nvidia.com> Reviewed-on: https://git-master.nvidia.com/r/1676734 Reviewed-by: svc-mobile-coverity <svc-mobile-coverity@nvidia.com> Reviewed-by: Alex Waterman <alexw@nvidia.com> GVS: Gerrit_Virtual_Submit Reviewed-by: Terje Bergstrom <tbergstrom@nvidia.com> Reviewed-by: mobile promotions <svcmobile_promotions@nvidia.com> Tested-by: mobile promotions <svcmobile_promotions@nvidia.com>
-rw-r--r--drivers/gpu/nvgpu/common/linux/channel.c6
-rw-r--r--drivers/gpu/nvgpu/gk20a/channel_sync_gk20a.c12
-rw-r--r--drivers/gpu/nvgpu/gk20a/channel_sync_gk20a.h16
3 files changed, 11 insertions, 23 deletions
diff --git a/drivers/gpu/nvgpu/common/linux/channel.c b/drivers/gpu/nvgpu/common/linux/channel.c
index ea294738..cd9cadf8 100644
--- a/drivers/gpu/nvgpu/common/linux/channel.c
+++ b/drivers/gpu/nvgpu/common/linux/channel.c
@@ -488,11 +488,11 @@ static int gk20a_submit_prepare_syncs(struct channel_gk20a *c,
488 if (flags & NVGPU_SUBMIT_GPFIFO_FLAGS_SYNC_FENCE) { 488 if (flags & NVGPU_SUBMIT_GPFIFO_FLAGS_SYNC_FENCE) {
489 wait_fence_fd = fence->id; 489 wait_fence_fd = fence->id;
490 err = c->sync->wait_fd(c->sync, wait_fence_fd, 490 err = c->sync->wait_fd(c->sync, wait_fence_fd,
491 job->wait_cmd, job->pre_fence); 491 job->wait_cmd);
492 } else { 492 } else {
493 err = c->sync->wait_syncpt(c->sync, fence->id, 493 err = c->sync->wait_syncpt(c->sync, fence->id,
494 fence->value, job->wait_cmd, 494 fence->value,
495 job->pre_fence); 495 job->wait_cmd);
496 } 496 }
497 497
498 if (!err) { 498 if (!err) {
diff --git a/drivers/gpu/nvgpu/gk20a/channel_sync_gk20a.c b/drivers/gpu/nvgpu/gk20a/channel_sync_gk20a.c
index 20460c80..3c12147f 100644
--- a/drivers/gpu/nvgpu/gk20a/channel_sync_gk20a.c
+++ b/drivers/gpu/nvgpu/gk20a/channel_sync_gk20a.c
@@ -51,8 +51,7 @@ struct gk20a_channel_syncpt {
51}; 51};
52 52
53static int gk20a_channel_syncpt_wait_syncpt(struct gk20a_channel_sync *s, 53static int gk20a_channel_syncpt_wait_syncpt(struct gk20a_channel_sync *s,
54 u32 id, u32 thresh, struct priv_cmd_entry *wait_cmd, 54 u32 id, u32 thresh, struct priv_cmd_entry *wait_cmd)
55 struct gk20a_fence *fence)
56{ 55{
57 struct gk20a_channel_syncpt *sp = 56 struct gk20a_channel_syncpt *sp =
58 container_of(s, struct gk20a_channel_syncpt, ops); 57 container_of(s, struct gk20a_channel_syncpt, ops);
@@ -84,8 +83,7 @@ static int gk20a_channel_syncpt_wait_syncpt(struct gk20a_channel_sync *s,
84} 83}
85 84
86static int gk20a_channel_syncpt_wait_fd(struct gk20a_channel_sync *s, int fd, 85static int gk20a_channel_syncpt_wait_fd(struct gk20a_channel_sync *s, int fd,
87 struct priv_cmd_entry *wait_cmd, 86 struct priv_cmd_entry *wait_cmd)
88 struct gk20a_fence *fence)
89{ 87{
90#ifdef CONFIG_SYNC 88#ifdef CONFIG_SYNC
91 int i; 89 int i;
@@ -456,8 +454,7 @@ static void add_sema_cmd(struct gk20a *g, struct channel_gk20a *c,
456 454
457static int gk20a_channel_semaphore_wait_syncpt( 455static int gk20a_channel_semaphore_wait_syncpt(
458 struct gk20a_channel_sync *s, u32 id, 456 struct gk20a_channel_sync *s, u32 id,
459 u32 thresh, struct priv_cmd_entry *entry, 457 u32 thresh, struct priv_cmd_entry *entry)
460 struct gk20a_fence *fence)
461{ 458{
462 struct gk20a_channel_semaphore *sema = 459 struct gk20a_channel_semaphore *sema =
463 container_of(s, struct gk20a_channel_semaphore, ops); 460 container_of(s, struct gk20a_channel_semaphore, ops);
@@ -521,8 +518,7 @@ put_fence:
521 518
522static int gk20a_channel_semaphore_wait_fd( 519static int gk20a_channel_semaphore_wait_fd(
523 struct gk20a_channel_sync *s, int fd, 520 struct gk20a_channel_sync *s, int fd,
524 struct priv_cmd_entry *entry, 521 struct priv_cmd_entry *entry)
525 struct gk20a_fence *fence)
526{ 522{
527 struct gk20a_channel_semaphore *sema = 523 struct gk20a_channel_semaphore *sema =
528 container_of(s, struct gk20a_channel_semaphore, ops); 524 container_of(s, struct gk20a_channel_semaphore, ops);
diff --git a/drivers/gpu/nvgpu/gk20a/channel_sync_gk20a.h b/drivers/gpu/nvgpu/gk20a/channel_sync_gk20a.h
index dd0213dc..3f44b27a 100644
--- a/drivers/gpu/nvgpu/gk20a/channel_sync_gk20a.h
+++ b/drivers/gpu/nvgpu/gk20a/channel_sync_gk20a.h
@@ -37,24 +37,16 @@ struct gk20a_channel_sync {
37 nvgpu_atomic_t refcount; 37 nvgpu_atomic_t refcount;
38 38
39 /* Generate a gpu wait cmdbuf from syncpoint. 39 /* Generate a gpu wait cmdbuf from syncpoint.
40 * Returns 40 * Returns a gpu cmdbuf that performs the wait when executed
41 * - a gpu cmdbuf that performs the wait when executed,
42 * - possibly a helper fence that the caller must hold until the
43 * cmdbuf is executed.
44 */ 41 */
45 int (*wait_syncpt)(struct gk20a_channel_sync *s, u32 id, u32 thresh, 42 int (*wait_syncpt)(struct gk20a_channel_sync *s, u32 id, u32 thresh,
46 struct priv_cmd_entry *entry, 43 struct priv_cmd_entry *entry);
47 struct gk20a_fence *fence);
48 44
49 /* Generate a gpu wait cmdbuf from sync fd. 45 /* Generate a gpu wait cmdbuf from sync fd.
50 * Returns 46 * Returns a gpu cmdbuf that performs the wait when executed
51 * - a gpu cmdbuf that performs the wait when executed,
52 * - possibly a helper fence that the caller must hold until the
53 * cmdbuf is executed.
54 */ 47 */
55 int (*wait_fd)(struct gk20a_channel_sync *s, int fd, 48 int (*wait_fd)(struct gk20a_channel_sync *s, int fd,
56 struct priv_cmd_entry *entry, 49 struct priv_cmd_entry *entry);
57 struct gk20a_fence *fence);
58 50
59 /* Increment syncpoint/semaphore. 51 /* Increment syncpoint/semaphore.
60 * Returns 52 * Returns