return idle_frames;
 }
 
+static bool is_dc5_dc6_blocked(struct intel_dp *intel_dp)
+{
+       struct intel_display *display = to_intel_display(intel_dp);
+       u32 current_dc_state = intel_display_power_get_current_dc_state(display);
+       struct drm_vblank_crtc *vblank = &display->drm->vblank[intel_dp->psr.pipe];
+
+       return (current_dc_state != DC_STATE_EN_UPTO_DC5 &&
+               current_dc_state != DC_STATE_EN_UPTO_DC6) ||
+               intel_dp->psr.active_non_psr_pipes ||
+               READ_ONCE(vblank->enabled);
+}
+
 static void hsw_activate_psr1(struct intel_dp *intel_dp)
 {
        struct intel_display *display = to_intel_display(intel_dp);
 
        intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder),
                     ~EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK, val);
+
+       /* Wa_16025596647 */
+       if ((DISPLAY_VER(display) == 20 ||
+            IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
+           is_dc5_dc6_blocked(intel_dp))
+               intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
+                                                                      intel_dp->psr.pipe,
+                                                                      true);
 }
 
 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
        enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
        u32 val = EDP_PSR2_ENABLE;
        u32 psr_val = 0;
+       u8 idle_frames;
 
-       val |= EDP_PSR2_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
+       /* Wa_16025596647 */
+       if ((DISPLAY_VER(display) == 20 ||
+            IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
+           is_dc5_dc6_blocked(intel_dp))
+               idle_frames = 0;
+       else
+               idle_frames = psr_compute_idle_frames(intel_dp);
+       val |= EDP_PSR2_IDLE_FRAMES(idle_frames);
 
        if (DISPLAY_VER(display) < 14 && !display->platform.alderlake_p)
                val |= EDP_SU_TRACK_ENABLE;
 
                drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE));
        } else {
+               if (DISPLAY_VER(display) == 20 ||
+                   IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0))
+                       intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
+                                                                      intel_dp->psr.pipe,
+                                                                      false);
+
                val = intel_de_rmw(display,
                                   psr_ctl_reg(display, cpu_transcoder),
                                   EDP_PSR_ENABLE, 0);
 bool intel_psr_needs_block_dc_vblank(const struct intel_crtc_state *crtc_state)
 {
        struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
+       struct intel_display *display = to_intel_display(crtc_state);
        struct intel_encoder *encoder;
 
        for_each_encoder_on_crtc(crtc->base.dev, &crtc->base, encoder) {
 
                intel_dp = enc_to_intel_dp(encoder);
 
-               if (intel_dp_is_edp(intel_dp) &&
-                   CAN_PANEL_REPLAY(intel_dp))
+               if (!intel_dp_is_edp(intel_dp))
+                       continue;
+
+               if (CAN_PANEL_REPLAY(intel_dp))
+                       return true;
+
+               if ((DISPLAY_VER(display) == 20 ||
+                    IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) &&
+                   CAN_PSR(intel_dp))
                        return true;
        }
 
        }
 }
 
-/* Wa_16025596647 */
-static void psr1_apply_underrun_on_idle_wa_locked(struct intel_dp *intel_dp,
-                                                 bool dc5_dc6_blocked)
-{
-       struct intel_display *display = to_intel_display(intel_dp);
-       u32 val;
-
-       if (dc5_dc6_blocked)
-               val = DMC_EVT_CTL_ENABLE | DMC_EVT_CTL_RECURRING |
-                       REG_FIELD_PREP(DMC_EVT_CTL_TYPE_MASK,
-                                      DMC_EVT_CTL_TYPE_EDGE_0_1) |
-                       REG_FIELD_PREP(DMC_EVT_CTL_EVENT_ID_MASK,
-                                      DMC_EVT_CTL_EVENT_ID_VBLANK_A);
-       else
-               val = REG_FIELD_PREP(DMC_EVT_CTL_EVENT_ID_MASK,
-                                    DMC_EVT_CTL_EVENT_ID_FALSE) |
-                       REG_FIELD_PREP(DMC_EVT_CTL_TYPE_MASK,
-                                      DMC_EVT_CTL_TYPE_EDGE_0_1);
-
-       intel_de_write(display, MTL_PIPEDMC_EVT_CTL_4(intel_dp->psr.pipe),
-                      val);
-}
-
-/* Wa_16025596647 */
-static bool is_dc5_dc6_blocked(struct intel_dp *intel_dp)
-{
-       struct intel_display *display = to_intel_display(intel_dp);
-       u32 current_dc_state = intel_display_power_get_current_dc_state(display);
-       struct drm_vblank_crtc *vblank = &display->drm->vblank[intel_dp->psr.pipe];
-
-       return (current_dc_state != DC_STATE_EN_UPTO_DC5 &&
-               current_dc_state != DC_STATE_EN_UPTO_DC6) ||
-               intel_dp->psr.active_non_psr_pipes ||
-               READ_ONCE(vblank->enabled);
-}
-
 /* Wa_16025596647 */
 static void intel_psr_apply_underrun_on_idle_wa_locked(struct intel_dp *intel_dp)
 {
+       struct intel_display *display = to_intel_display(intel_dp);
        bool dc5_dc6_blocked;
 
        if (!intel_dp->psr.active)
                psr2_program_idle_frames(intel_dp, dc5_dc6_blocked ? 0 :
                                         psr_compute_idle_frames(intel_dp));
        else
-               psr1_apply_underrun_on_idle_wa_locked(intel_dp, dc5_dc6_blocked);
+               intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display,
+                                                                      intel_dp->psr.pipe,
+                                                                      dc5_dc6_blocked);
 }
 
 static void psr_dc5_dc6_wa_work(struct work_struct *work)