a6xx_flush(gpu, ring);
 }
 
+const struct adreno_reglist a612_hwcg[] = {
+       {REG_A6XX_RBBM_CLOCK_CNTL_SP0, 0x22222222},
+       {REG_A6XX_RBBM_CLOCK_CNTL2_SP0, 0x02222220},
+       {REG_A6XX_RBBM_CLOCK_DELAY_SP0, 0x00000081},
+       {REG_A6XX_RBBM_CLOCK_HYST_SP0, 0x0000f3cf},
+       {REG_A6XX_RBBM_CLOCK_CNTL_TP0, 0x22222222},
+       {REG_A6XX_RBBM_CLOCK_CNTL2_TP0, 0x22222222},
+       {REG_A6XX_RBBM_CLOCK_CNTL3_TP0, 0x22222222},
+       {REG_A6XX_RBBM_CLOCK_CNTL4_TP0, 0x00022222},
+       {REG_A6XX_RBBM_CLOCK_DELAY_TP0, 0x11111111},
+       {REG_A6XX_RBBM_CLOCK_DELAY2_TP0, 0x11111111},
+       {REG_A6XX_RBBM_CLOCK_DELAY3_TP0, 0x11111111},
+       {REG_A6XX_RBBM_CLOCK_DELAY4_TP0, 0x00011111},
+       {REG_A6XX_RBBM_CLOCK_HYST_TP0, 0x77777777},
+       {REG_A6XX_RBBM_CLOCK_HYST2_TP0, 0x77777777},
+       {REG_A6XX_RBBM_CLOCK_HYST3_TP0, 0x77777777},
+       {REG_A6XX_RBBM_CLOCK_HYST4_TP0, 0x00077777},
+       {REG_A6XX_RBBM_CLOCK_CNTL_RB0, 0x22222222},
+       {REG_A6XX_RBBM_CLOCK_CNTL2_RB0, 0x01202222},
+       {REG_A6XX_RBBM_CLOCK_CNTL_CCU0, 0x00002220},
+       {REG_A6XX_RBBM_CLOCK_HYST_RB_CCU0, 0x00040f00},
+       {REG_A6XX_RBBM_CLOCK_CNTL_RAC, 0x05522022},
+       {REG_A6XX_RBBM_CLOCK_CNTL2_RAC, 0x00005555},
+       {REG_A6XX_RBBM_CLOCK_DELAY_RAC, 0x00000011},
+       {REG_A6XX_RBBM_CLOCK_HYST_RAC, 0x00445044},
+       {REG_A6XX_RBBM_CLOCK_CNTL_TSE_RAS_RBBM, 0x04222222},
+       {REG_A6XX_RBBM_CLOCK_MODE_VFD, 0x00002222},
+       {REG_A6XX_RBBM_CLOCK_MODE_GPC, 0x02222222},
+       {REG_A6XX_RBBM_CLOCK_DELAY_HLSQ_2, 0x00000002},
+       {REG_A6XX_RBBM_CLOCK_MODE_HLSQ, 0x00002222},
+       {REG_A6XX_RBBM_CLOCK_DELAY_TSE_RAS_RBBM, 0x00004000},
+       {REG_A6XX_RBBM_CLOCK_DELAY_VFD, 0x00002222},
+       {REG_A6XX_RBBM_CLOCK_DELAY_GPC, 0x00000200},
+       {REG_A6XX_RBBM_CLOCK_DELAY_HLSQ, 0x00000000},
+       {REG_A6XX_RBBM_CLOCK_HYST_TSE_RAS_RBBM, 0x00000000},
+       {REG_A6XX_RBBM_CLOCK_HYST_VFD, 0x00000000},
+       {REG_A6XX_RBBM_CLOCK_HYST_GPC, 0x04104004},
+       {REG_A6XX_RBBM_CLOCK_HYST_HLSQ, 0x00000000},
+       {REG_A6XX_RBBM_CLOCK_CNTL_UCHE, 0x22222222},
+       {REG_A6XX_RBBM_CLOCK_HYST_UCHE, 0x00000004},
+       {REG_A6XX_RBBM_CLOCK_DELAY_UCHE, 0x00000002},
+       {REG_A6XX_RBBM_ISDB_CNT, 0x00000182},
+       {REG_A6XX_RBBM_RAC_THRESHOLD_CNT, 0x00000000},
+       {REG_A6XX_RBBM_SP_HYST_CNT, 0x00000000},
+       {REG_A6XX_RBBM_CLOCK_CNTL_GMU_GX, 0x00000222},
+       {REG_A6XX_RBBM_CLOCK_DELAY_GMU_GX, 0x00000111},
+       {REG_A6XX_RBBM_CLOCK_HYST_GMU_GX, 0x00000555},
+       {},
+};
+
 /* For a615 family (a615, a616, a618 and a619) */
 const struct adreno_reglist a615_hwcg[] = {
        {REG_A6XX_RBBM_CLOCK_CNTL_SP0,  0x02222222},
 
        if (adreno_is_a630(adreno_gpu))
                clock_cntl_on = 0x8aa8aa02;
+       else if (adreno_is_a610(adreno_gpu))
+               clock_cntl_on = 0xaaa8aa82;
        else
                clock_cntl_on = 0x8aa8aa82;
 
                return;
 
        /* Disable SP clock before programming HWCG registers */
-       gmu_rmw(gmu, REG_A6XX_GPU_GMU_GX_SPTPRAC_CLOCK_CONTROL, 1, 0);
+       if (!adreno_is_a610(adreno_gpu))
+               gmu_rmw(gmu, REG_A6XX_GPU_GMU_GX_SPTPRAC_CLOCK_CONTROL, 1, 0);
 
        for (i = 0; (reg = &adreno_gpu->info->hwcg[i], reg->offset); i++)
                gpu_write(gpu, reg->offset, state ? reg->value : 0);
 
        /* Enable SP clock */
-       gmu_rmw(gmu, REG_A6XX_GPU_GMU_GX_SPTPRAC_CLOCK_CONTROL, 0, 1);
+       if (!adreno_is_a610(adreno_gpu))
+               gmu_rmw(gmu, REG_A6XX_GPU_GMU_GX_SPTPRAC_CLOCK_CONTROL, 0, 1);
 
        gpu_write(gpu, REG_A6XX_RBBM_CLOCK_CNTL, state ? clock_cntl_on : 0);
 }
        /* Unknown, introduced with A640/680 */
        u32 amsbc = 0;
 
+       if (adreno_is_a610(adreno_gpu)) {
+               /* HBB = 14 */
+               hbb_lo = 1;
+               min_acc_len = 1;
+               ubwc_mode = 1;
+       }
+
        /* a618 is using the hw default values */
        if (adreno_is_a618(adreno_gpu))
                return;
        a6xx_set_hwcg(gpu, true);
 
        /* VBIF/GBIF start*/
-       if (adreno_is_a640_family(adreno_gpu) ||
+       if (adreno_is_a610(adreno_gpu) ||
+           adreno_is_a640_family(adreno_gpu) ||
            adreno_is_a650_family(adreno_gpu)) {
                gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE0, 0x00071620);
                gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE1, 0x00071620);
                gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE2, 0x00071620);
                gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE3, 0x00071620);
-               gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE3, 0x00071620);
                gpu_write(gpu, REG_A6XX_RBBM_GBIF_CLIENT_QOS_CNTL, 0x3);
        } else {
                gpu_write(gpu, REG_A6XX_RBBM_VBIF_CLIENT_QOS_CNTL, 0x3);
        gpu_write(gpu, REG_A6XX_UCHE_FILTER_CNTL, 0x804);
        gpu_write(gpu, REG_A6XX_UCHE_CACHE_WAYS, 0x4);
 
-       if (adreno_is_a640_family(adreno_gpu) ||
-           adreno_is_a650_family(adreno_gpu))
+       if (adreno_is_a640_family(adreno_gpu) || adreno_is_a650_family(adreno_gpu)) {
                gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2, 0x02000140);
-       else
+               gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_1, 0x8040362c);
+       } else if (adreno_is_a610(adreno_gpu)) {
+               gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2, 0x00800060);
+               gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_1, 0x40201b16);
+       } else {
                gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2, 0x010000c0);
-       gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_1, 0x8040362c);
+               gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_1, 0x8040362c);
+       }
 
        if (adreno_is_a660_family(adreno_gpu))
                gpu_write(gpu, REG_A6XX_CP_LPAC_PROG_FIFO_SIZE, 0x00000020);
 
        /* Setting the mem pool size */
-       gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, 128);
+       if (adreno_is_a610(adreno_gpu)) {
+               gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, 48);
+               gpu_write(gpu, REG_A6XX_CP_MEM_POOL_DBG_ADDR, 47);
+       } else
+               gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, 128);
 
        /* Setting the primFifo thresholds default values,
         * and vccCacheSkipDis=1 bit (0x200) for A640 and newer
                gpu_write(gpu, REG_A6XX_PC_DBG_ECO_CNTL, 0x00200200);
        else if (adreno_is_a650(adreno_gpu) || adreno_is_a660(adreno_gpu))
                gpu_write(gpu, REG_A6XX_PC_DBG_ECO_CNTL, 0x00300200);
+       else if (adreno_is_a610(adreno_gpu))
+               gpu_write(gpu, REG_A6XX_PC_DBG_ECO_CNTL, 0x00080000);
        else
                gpu_write(gpu, REG_A6XX_PC_DBG_ECO_CNTL, 0x00180000);
 
        a6xx_set_ubwc_config(gpu);
 
        /* Enable fault detection */
-       gpu_write(gpu, REG_A6XX_RBBM_INTERFACE_HANG_INT_CNTL,
-               (1 << 30) | 0x1fffff);
+       if (adreno_is_a610(adreno_gpu))
+               gpu_write(gpu, REG_A6XX_RBBM_INTERFACE_HANG_INT_CNTL, (1 << 30) | 0x3ffff);
+       else
+               gpu_write(gpu, REG_A6XX_RBBM_INTERFACE_HANG_INT_CNTL, (1 << 30) | 0x1fffff);
 
        gpu_write(gpu, REG_A6XX_UCHE_CLIENT_PF, 1);
 
 
 void a6xx_gpu_sw_reset(struct msm_gpu *gpu, bool assert)
 {
+       /* 11nm chips (e.g. ones with A610) have hw issues with the reset line! */
+       if (adreno_is_a610(to_adreno_gpu(gpu)))
+               return;
+
        gpu_write(gpu, REG_A6XX_RBBM_SW_RESET_CMD, assert);
        /* Perform a bogus read and add a brief delay to ensure ordering. */
        gpu_read(gpu, REG_A6XX_RBBM_SW_RESET_CMD);