return 0;
 }
 
+static int vcn_v3_0_stop_dpg_mode(struct amdgpu_device *adev, int inst_idx)
+{
+       int ret_code = 0;
+       uint32_t tmp;
+
+       /* Wait for power status to be 1 */
+       SOC15_WAIT_ON_RREG(VCN, inst_idx, mmUVD_POWER_STATUS, 1,
+               UVD_POWER_STATUS__UVD_POWER_STATUS_MASK, ret_code);
+
+       /* wait for read ptr to be equal to write ptr */
+       tmp = RREG32_SOC15(VCN, inst_idx, mmUVD_RB_WPTR);
+       SOC15_WAIT_ON_RREG(VCN, inst_idx, mmUVD_RB_RPTR, tmp, 0xFFFFFFFF, ret_code);
+
+       tmp = RREG32_SOC15(VCN, inst_idx, mmUVD_RB_WPTR2);
+       SOC15_WAIT_ON_RREG(VCN, inst_idx, mmUVD_RB_RPTR2, tmp, 0xFFFFFFFF, ret_code);
+
+       tmp = RREG32_SOC15(VCN, inst_idx, mmUVD_RBC_RB_WPTR) & 0x7FFFFFFF;
+       SOC15_WAIT_ON_RREG(VCN, inst_idx, mmUVD_RBC_RB_RPTR, tmp, 0xFFFFFFFF, ret_code);
+
+       SOC15_WAIT_ON_RREG(VCN, inst_idx, mmUVD_POWER_STATUS, 1,
+               UVD_POWER_STATUS__UVD_POWER_STATUS_MASK, ret_code);
+
+       /* disable dynamic power gating mode */
+       WREG32_P(SOC15_REG_OFFSET(VCN, inst_idx, mmUVD_POWER_STATUS), 0,
+               ~UVD_POWER_STATUS__UVD_PG_MODE_MASK);
+
+       return 0;
+}
+
 static int vcn_v3_0_stop(struct amdgpu_device *adev)
 {
        uint32_t tmp;
                if (adev->vcn.harvest_config & (1 << i))
                        continue;
 
+               if (adev->pg_flags & AMD_PG_SUPPORT_VCN_DPG) {
+                       r = vcn_v3_0_stop_dpg_mode(adev, i);
+                       continue;
+               }
+
                /* wait for vcn idle */
                SOC15_WAIT_ON_RREG(VCN, i, mmUVD_STATUS, UVD_STATUS__IDLE, 0x7, r);
                if (r)