sumo_power_level_enable(rdev, i, false);
 }
 
+static void sumo_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev)
+{
+       struct sumo_ps *new_ps = sumo_get_ps(rdev->pm.dpm.requested_ps);
+       struct sumo_ps *current_ps = sumo_get_ps(rdev->pm.dpm.current_ps);
+
+       if ((rdev->pm.dpm.requested_ps->vclk == rdev->pm.dpm.current_ps->vclk) &&
+           (rdev->pm.dpm.requested_ps->dclk == rdev->pm.dpm.current_ps->dclk))
+               return;
+
+       if (new_ps->levels[new_ps->num_levels - 1].sclk >=
+           current_ps->levels[current_ps->num_levels - 1].sclk)
+               return;
+
+       radeon_set_uvd_clocks(rdev, rdev->pm.dpm.requested_ps->vclk,
+                             rdev->pm.dpm.requested_ps->dclk);
+}
+
+static void sumo_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev)
+{
+       struct sumo_ps *new_ps = sumo_get_ps(rdev->pm.dpm.requested_ps);
+       struct sumo_ps *current_ps = sumo_get_ps(rdev->pm.dpm.current_ps);
+
+       if ((rdev->pm.dpm.requested_ps->vclk == rdev->pm.dpm.current_ps->vclk) &&
+           (rdev->pm.dpm.requested_ps->dclk == rdev->pm.dpm.current_ps->dclk))
+               return;
+
+       if (new_ps->levels[new_ps->num_levels - 1].sclk <
+           current_ps->levels[current_ps->num_levels - 1].sclk)
+               return;
+
+       radeon_set_uvd_clocks(rdev, rdev->pm.dpm.requested_ps->vclk,
+                             rdev->pm.dpm.requested_ps->dclk);
+}
+
 void sumo_take_smu_control(struct radeon_device *rdev, bool enable)
 {
 /* This bit selects who handles display phy powergating.
        sumo_take_smu_control(rdev, false);
 }
 
+static void sumo_uvd_init(struct radeon_device *rdev)
+{
+       u32 tmp;
+
+       tmp = RREG32(CG_VCLK_CNTL);
+       tmp &= ~VCLK_DIR_CNTL_EN;
+       WREG32(CG_VCLK_CNTL, tmp);
+
+       tmp = RREG32(CG_DCLK_CNTL);
+       tmp &= ~DCLK_DIR_CNTL_EN;
+       WREG32(CG_DCLK_CNTL, tmp);
+
+       /* 100 Mhz */
+       radeon_set_uvd_clocks(rdev, 10000, 10000);
+}
+
 static int sumo_set_thermal_temperature_range(struct radeon_device *rdev,
                                              int min_temp, int max_temp)
 {
 
        if (pi->enable_dynamic_patch_ps)
                sumo_apply_state_adjust_rules(rdev);
+       if (pi->enable_dpm)
+               sumo_set_uvd_clock_before_set_eng_clock(rdev);
        sumo_update_current_power_levels(rdev);
        if (pi->enable_boost) {
                sumo_enable_boost(rdev, false);
        }
        if (pi->enable_boost)
                sumo_enable_boost(rdev, true);
+       if (pi->enable_dpm)
+               sumo_set_uvd_clock_after_set_eng_clock(rdev);
 
        return 0;
 }
        sumo_program_acpi_power_level(rdev);
        sumo_enable_acpi_pm(rdev);
        sumo_take_smu_control(rdev, true);
+       sumo_uvd_init(rdev);
 }
 
 void sumo_dpm_display_configuration_changed(struct radeon_device *rdev)
 
 #define CG_SCLK_STATUS                                  0x604
 #       define SCLK_OVERCLK_DETECT                      (1 << 2)
 
+#define CG_DCLK_CNTL                                    0x610
+#       define DCLK_DIVIDER_MASK                        0x7f
+#       define DCLK_DIR_CNTL_EN                         (1 << 8)
+#define CG_DCLK_STATUS                                  0x614
+#       define DCLK_STATUS                              (1 << 0)
+#define CG_VCLK_CNTL                                    0x618
+#       define VCLK_DIVIDER_MASK                        0x7f
+#       define VCLK_DIR_CNTL_EN                         (1 << 8)
+#define CG_VCLK_STATUS                                  0x61c
+
 #define GENERAL_PWRMGT                                  0x63c
 #       define STATIC_PM_EN                             (1 << 1)