obj->cache_level == I915_CACHE_WT);
 }
 
+bool i915_gem_cpu_write_needs_clflush(struct drm_i915_gem_object *obj)
+{
+       if (obj->cache_dirty)
+               return false;
+
+       if (!(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE))
+               return true;
+
+       /* Currently in use by HW (display engine)? Keep flushed. */
+       return i915_gem_object_is_framebuffer(obj);
+}
+
 static void
 flush_write_domain(struct drm_i915_gem_object *obj, unsigned int flush_domains)
 {
 
 bool i915_gem_object_can_bypass_llc(struct drm_i915_gem_object *obj);
 void i915_gem_object_flush_if_display(struct drm_i915_gem_object *obj);
 void i915_gem_object_flush_if_display_locked(struct drm_i915_gem_object *obj);
+bool i915_gem_cpu_write_needs_clflush(struct drm_i915_gem_object *obj);
 
 int __must_check
 i915_gem_object_set_to_wc_domain(struct drm_i915_gem_object *obj, bool write);
 void __i915_gem_object_make_purgeable(struct drm_i915_gem_object *obj);
 void i915_gem_object_make_purgeable(struct drm_i915_gem_object *obj);
 
-static inline bool cpu_write_needs_clflush(struct drm_i915_gem_object *obj)
-{
-       if (obj->cache_dirty)
-               return false;
-
-       if (!(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE))
-               return true;
-
-       /* Currently in use by HW (display engine)? Keep flushed. */
-       return i915_gem_object_is_framebuffer(obj);
-}
-
 static inline void __start_cpu_write(struct drm_i915_gem_object *obj)
 {
        obj->read_domains = I915_GEM_DOMAIN_CPU;
        obj->write_domain = I915_GEM_DOMAIN_CPU;
-       if (cpu_write_needs_clflush(obj))
+       if (i915_gem_cpu_write_needs_clflush(obj))
                obj->cache_dirty = true;
 }
 
 
         * perspective, requiring manual detiling by the client.
         */
        if (!i915_gem_object_has_struct_page(obj) ||
-           cpu_write_needs_clflush(obj))
+           i915_gem_cpu_write_needs_clflush(obj))
                /* Note that the gtt paths might fail with non-page-backed user
                 * pointers (e.g. gtt mappings when moving data between
                 * textures). Fallback to the shmem path in that case.