flush_delayed_work(&adev->delayed_init_work);
        adev->shutdown = true;
 
+       /* make sure IB test finished before entering exclusive mode
+        * to avoid preemption on IB test
+        * */
+       if (amdgpu_sriov_vf(adev))
+               amdgpu_virt_request_full_gpu(adev, false);
+
        /* disable all interrupts */
        amdgpu_irq_disable_all(adev);
        if (adev->mode_info.mode_config_initialized){
 
        if (adev->rmmio == NULL)
                goto done_free;
 
-       if (amdgpu_sriov_vf(adev))
-               amdgpu_virt_request_full_gpu(adev, false);
-
        if (adev->runpm) {
                pm_runtime_get_sync(dev->dev);
                pm_runtime_forbid(dev->dev);
 
                if (flags & AMDGPU_IB_PREEMPTED)
                        control |= INDIRECT_BUFFER_PRE_RESUME(1);
 
-               if (!(ib->flags & AMDGPU_IB_FLAG_CE))
+               if (!(ib->flags & AMDGPU_IB_FLAG_CE) && vmid)
                        gfx_v10_0_ring_emit_de_meta(ring,
                                    (!amdgpu_sriov_vf(ring->adev) && flags & AMDGPU_IB_PREEMPTED) ? true : false);
        }
 
        if (amdgpu_sriov_vf(ring->adev) && (ib->flags & AMDGPU_IB_FLAG_PREEMPT)) {
                control |= INDIRECT_BUFFER_PRE_ENB(1);
 
-               if (!(ib->flags & AMDGPU_IB_FLAG_CE))
+               if (!(ib->flags & AMDGPU_IB_FLAG_CE) && vmid)
                        gfx_v8_0_ring_emit_de_meta(ring);
        }
 
 
        if (amdgpu_sriov_vf(ring->adev) && (ib->flags & AMDGPU_IB_FLAG_PREEMPT)) {
                control |= INDIRECT_BUFFER_PRE_ENB(1);
 
-               if (!(ib->flags & AMDGPU_IB_FLAG_CE))
+               if (!(ib->flags & AMDGPU_IB_FLAG_CE) && vmid)
                        gfx_v9_0_ring_emit_de_meta(ring);
        }