vma->ufence = NULL;
        }
 
-       if (vma->ufence) {
-               xe_sync_ufence_put(vma->ufence);
-               vma->ufence = NULL;
-       }
-
        if (xe_vma_is_userptr(vma)) {
                struct xe_userptr_vma *uvma = to_userptr_vma(vma);
                struct xe_userptr *userptr = &uvma->userptr;
                struct xe_vma_op *op = gpuva_op_to_vma_op(__op);
 
                if (__op->op == DRM_GPUVA_OP_MAP) {
+                       op->map.immediate =
+                               flags & DRM_XE_VM_BIND_FLAG_IMMEDIATE;
+                       op->map.read_only =
+                               flags & DRM_XE_VM_BIND_FLAG_READONLY;
                        op->map.is_null = flags & DRM_XE_VM_BIND_FLAG_NULL;
                        op->map.dumpable = flags & DRM_XE_VM_BIND_FLAG_DUMPABLE;
                        op->map.pat_index = pat_index;
                switch (op->base.op) {
                case DRM_GPUVA_OP_MAP:
                {
+                       flags |= op->map.read_only ?
+                               VMA_CREATE_FLAG_READ_ONLY : 0;
                        flags |= op->map.is_null ?
                                VMA_CREATE_FLAG_IS_NULL : 0;
                        flags |= op->map.dumpable ?
        case DRM_GPUVA_OP_MAP:
                err = xe_vm_bind(vm, vma, op->q, xe_vma_bo(vma),
                                 op->syncs, op->num_syncs,
-                                !xe_vm_in_fault_mode(vm),
+                                op->map.immediate || !xe_vm_in_fault_mode(vm),
                                 op->flags & XE_VMA_OP_FIRST,
                                 op->flags & XE_VMA_OP_LAST);
                break;