{
struct xe_pt_stage_bind_walk *xe_walk =
container_of(walk, typeof(*xe_walk), base);
- u16 pat_index = xe_walk->vma->pat_index;
+ u16 pat_index = xe_walk->vma->attr.pat_index;
struct xe_pt *xe_parent = container_of(parent, typeof(*xe_parent), base);
struct xe_vm *vm = xe_walk->vm;
struct xe_pt *xe_child;
if (vm->xe->info.has_atomic_enable_pte_bit)
vma->gpuva.flags |= XE_VMA_ATOMIC_PTE_BIT;
- vma->pat_index = pat_index;
+ vma->attr.pat_index = pat_index;
if (bo) {
struct drm_gpuvm_bo *vm_bo;
if (op->base.remap.prev) {
vma = new_vma(vm, op->base.remap.prev,
- old->pat_index, flags);
+ old->attr.pat_index, flags);
if (IS_ERR(vma))
return PTR_ERR(vma);
if (op->base.remap.next) {
vma = new_vma(vm, op->base.remap.next,
- old->pat_index, flags);
+ old->attr.pat_index, flags);
if (IS_ERR(vma))
return PTR_ERR(vma);
* values. These are defined in uapi/drm/xe_drm.h.
*/
u32 atomic_access;
+
+ /**
+ * @pat_index: The pat index to use when encoding the PTEs for this vma.
+ */
+ u16 pat_index;
};
struct xe_vma {
/** @tile_staged: bind is staged for this VMA */
u8 tile_staged;
- /**
- * @pat_index: The pat index to use when encoding the PTEs for this vma.
- */
- u16 pat_index;
-
/**
* @ufence: The user fence that was provided with MAP.
* Needs to be signalled before UNMAP can be processed.