if (ce->state) {
                struct drm_i915_gem_object *obj = ce->state->obj;
-               int type = i915_coherent_map_type(ce->engine->i915);
+               int type = i915_coherent_map_type(ce->engine->i915, obj, true);
                void *map;
 
                if (!i915_gem_object_trylock(obj))
 
        GEM_BUG_ON(!i915_vma_is_pinned(ce->state));
 
        *vaddr = i915_gem_object_pin_map(ce->state->obj,
-                                        i915_coherent_map_type(ce->engine->i915) |
+                                        i915_coherent_map_type(ce->engine->i915,
+                                                               ce->state->obj,
+                                                               false) |
                                         I915_MAP_OVERRIDE);
 
        return PTR_ERR_OR_ZERO(*vaddr);
 
        if (unlikely(ret))
                goto err_unpin;
 
-       if (i915_vma_is_map_and_fenceable(vma))
+       if (i915_vma_is_map_and_fenceable(vma)) {
                addr = (void __force *)i915_vma_pin_iomap(vma);
-       else
-               addr = i915_gem_object_pin_map(vma->obj,
-                                              i915_coherent_map_type(vma->vm->i915));
+       } else {
+               int type = i915_coherent_map_type(vma->vm->i915, vma->obj, false);
+
+               addr = i915_gem_object_pin_map(vma->obj, type);
+       }
+
        if (IS_ERR(addr)) {
                ret = PTR_ERR(addr);
                goto err_ring;
 
                goto err;
 
        vaddr = i915_gem_object_pin_map_unlocked(ce->state->obj,
-                                                i915_coherent_map_type(engine->i915));
+                                                i915_coherent_map_type(engine->i915,
+                                                                       ce->state->obj, false));
        if (IS_ERR(vaddr)) {
                err = PTR_ERR(vaddr);
                intel_context_unpin(ce);
 
        h->seqno = memset(vaddr, 0xff, PAGE_SIZE);
 
        vaddr = i915_gem_object_pin_map_unlocked(h->obj,
-                                                i915_coherent_map_type(gt->i915));
+                                                i915_coherent_map_type(gt->i915, h->obj, false));
        if (IS_ERR(vaddr)) {
                err = PTR_ERR(vaddr);
                goto err_unpin_hws;
                return ERR_CAST(obj);
        }
 
-       vaddr = i915_gem_object_pin_map_unlocked(obj, i915_coherent_map_type(gt->i915));
+       vaddr = i915_gem_object_pin_map_unlocked(obj, i915_coherent_map_type(gt->i915, obj, false));
        if (IS_ERR(vaddr)) {
                i915_gem_object_put(obj);
                i915_vm_put(vm);
 
        }
 
        lrc = i915_gem_object_pin_map_unlocked(ce->state->obj,
-                                     i915_coherent_map_type(engine->i915));
+                                              i915_coherent_map_type(engine->i915,
+                                                                     ce->state->obj,
+                                                                     false));
        if (IS_ERR(lrc)) {
                err = PTR_ERR(lrc);
                goto err_B1;
 
        if (IS_ERR(vma))
                return PTR_ERR(vma);
 
-       vaddr = i915_gem_object_pin_map_unlocked(vma->obj, I915_MAP_WB);
+       vaddr = i915_gem_object_pin_map_unlocked(vma->obj,
+                                                i915_coherent_map_type(guc_to_gt(guc)->i915,
+                                                                       vma->obj, true));
        if (IS_ERR(vaddr)) {
                i915_vma_unpin_and_release(&vma, 0);
                return PTR_ERR(vaddr);
 
        if (IS_ERR(vma))
                return PTR_ERR(vma);
 
-       vaddr = i915_gem_object_pin_map_unlocked(vma->obj, I915_MAP_WB);
+       vaddr = i915_gem_object_pin_map_unlocked(vma->obj,
+                                                i915_coherent_map_type(gt->i915,
+                                                                       vma->obj, true));
        if (IS_ERR(vaddr)) {
                i915_vma_unpin_and_release(&vma, 0);
                return PTR_ERR(vaddr);
 
 #include "gem/i915_gem_context_types.h"
 #include "gem/i915_gem_shrinker.h"
 #include "gem/i915_gem_stolen.h"
+#include "gem/i915_gem_lmem.h"
 
 #include "gt/intel_engine.h"
 #include "gt/intel_gt_types.h"
 }
 
 static inline enum i915_map_type
-i915_coherent_map_type(struct drm_i915_private *i915)
+i915_coherent_map_type(struct drm_i915_private *i915,
+                      struct drm_i915_gem_object *obj, bool always_coherent)
 {
-       return HAS_LLC(i915) ? I915_MAP_WB : I915_MAP_WC;
+       if (i915_gem_object_is_lmem(obj))
+               return I915_MAP_WC;
+       if (HAS_LLC(i915) || always_coherent)
+               return I915_MAP_WB;
+       else
+               return I915_MAP_WC;
 }
 
 #endif
 
        }
 
        if (!spin->batch) {
-               unsigned int mode =
-                       i915_coherent_map_type(spin->gt->i915);
+               unsigned int mode;
 
+               mode = i915_coherent_map_type(spin->gt->i915, spin->obj, false);
                vaddr = igt_spinner_pin_obj(ce, ww, spin->obj, mode, &spin->batch_vma);
                if (IS_ERR(vaddr))
                        return PTR_ERR(vaddr);