summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMatthew Brost <matthew.brost@intel.com>2024-02-22 15:20:21 -0800
committerMatthew Brost <matthew.brost@intel.com>2024-02-23 11:44:59 -0800
commita9e483dda3efa5b9aae5d9eef94d2c3a878d9bea (patch)
tree4207f326a157a9e5f2f5f4f9cb83c4f48ebed219
parent3121fed0c51beb8ea7b18ab2ceff1ac9e358ac53 (diff)
downloadlinux-stable-a9e483dda3efa5b9aae5d9eef94d2c3a878d9bea.tar.gz
linux-stable-a9e483dda3efa5b9aae5d9eef94d2c3a878d9bea.tar.bz2
linux-stable-a9e483dda3efa5b9aae5d9eef94d2c3a878d9bea.zip
drm/xe: Don't support execlists in xe_gt_tlb_invalidation layer
The xe_gt_tlb_invalidation layer implements TLB invalidations for a GuC backend. Simply return if in execlists mode. A follow up may properly implement the xe_gt_tlb_invalidation layer for both GuC and execlists. Fixes: a9351846d945 ("drm/xe: Break of TLB invalidation into its own file") Cc: Rodrigo Vivi <rodrigo.vivi@intel.com> Signed-off-by: Matthew Brost <matthew.brost@intel.com> Reviewed-by: Rodrigo Vivi <rodrigo.vivi@intel.com> Link: https://patchwork.freedesktop.org/patch/msgid/20240222232021.3911545-4-matthew.brost@intel.com
-rw-r--r--drivers/gpu/drm/xe/xe_gt_tlb_invalidation.c12
1 files changed, 12 insertions, 0 deletions
diff --git a/drivers/gpu/drm/xe/xe_gt_tlb_invalidation.c b/drivers/gpu/drm/xe/xe_gt_tlb_invalidation.c
index a7b1f7cfec87..f29ee1ccfa71 100644
--- a/drivers/gpu/drm/xe/xe_gt_tlb_invalidation.c
+++ b/drivers/gpu/drm/xe/xe_gt_tlb_invalidation.c
@@ -287,6 +287,14 @@ int xe_gt_tlb_invalidation_vma(struct xe_gt *gt,
xe_gt_assert(gt, vma);
+ /* Execlists not supported */
+ if (gt_to_xe(gt)->info.force_execlist) {
+ if (fence)
+ __invalidation_fence_signal(fence);
+
+ return 0;
+ }
+
action[len++] = XE_GUC_ACTION_TLB_INVALIDATION;
action[len++] = 0; /* seqno, replaced in send_tlb_invalidation */
if (!xe->info.has_range_tlb_invalidation) {
@@ -355,6 +363,10 @@ int xe_gt_tlb_invalidation_wait(struct xe_gt *gt, int seqno)
struct xe_guc *guc = &gt->uc.guc;
int ret;
+ /* Execlists not supported */
+ if (gt_to_xe(gt)->info.force_execlist)
+ return 0;
+
/*
* XXX: See above, this algorithm only works if seqno are always in
* order