#include <linux/highmem.h>
 
 #include "ivpu_drv.h"
-#include "ivpu_hw_37xx_reg.h"
 #include "ivpu_hw_reg_io.h"
 #include "ivpu_mmu.h"
 #include "ivpu_mmu_context.h"
 #include "ivpu_pm.h"
 
+#define IVPU_MMU_REG_IDR0                    0x00200000u
+#define IVPU_MMU_REG_IDR1                    0x00200004u
+#define IVPU_MMU_REG_IDR3                    0x0020000cu
+#define IVPU_MMU_REG_IDR5                    0x00200014u
+#define IVPU_MMU_REG_CR0                     0x00200020u
+#define IVPU_MMU_REG_CR0ACK                  0x00200024u
+#define IVPU_MMU_REG_CR1                     0x00200028u
+#define IVPU_MMU_REG_CR2                     0x0020002cu
+#define IVPU_MMU_REG_IRQ_CTRL                0x00200050u
+#define IVPU_MMU_REG_IRQ_CTRLACK             0x00200054u
+
+#define IVPU_MMU_REG_GERROR                  0x00200060u
+#define IVPU_MMU_REG_GERROR_CMDQ_MASK        BIT_MASK(0)
+#define IVPU_MMU_REG_GERROR_EVTQ_ABT_MASK     BIT_MASK(2)
+#define IVPU_MMU_REG_GERROR_PRIQ_ABT_MASK     BIT_MASK(3)
+#define IVPU_MMU_REG_GERROR_MSI_CMDQ_ABT_MASK BIT_MASK(4)
+#define IVPU_MMU_REG_GERROR_MSI_EVTQ_ABT_MASK BIT_MASK(5)
+#define IVPU_MMU_REG_GERROR_MSI_PRIQ_ABT_MASK BIT_MASK(6)
+#define IVPU_MMU_REG_GERROR_MSI_ABT_MASK      BIT_MASK(7)
+
+#define IVPU_MMU_REG_GERRORN                 0x00200064u
+
+#define IVPU_MMU_REG_STRTAB_BASE             0x00200080u
+#define IVPU_MMU_REG_STRTAB_BASE_CFG         0x00200088u
+#define IVPU_MMU_REG_CMDQ_BASE               0x00200090u
+#define IVPU_MMU_REG_CMDQ_PROD               0x00200098u
+#define IVPU_MMU_REG_CMDQ_CONS               0x0020009cu
+#define IVPU_MMU_REG_EVTQ_BASE               0x002000a0u
+#define IVPU_MMU_REG_EVTQ_PROD               0x002000a8u
+#define IVPU_MMU_REG_EVTQ_CONS               0x002000acu
+#define IVPU_MMU_REG_EVTQ_PROD_SEC           (0x002000a8u + SZ_64K)
+#define IVPU_MMU_REG_EVTQ_CONS_SEC           (0x002000acu + SZ_64K)
+#define IVPU_MMU_REG_CMDQ_CONS_ERR_MASK              GENMASK(30, 24)
+
 #define IVPU_MMU_IDR0_REF              0x080f3e0f
 #define IVPU_MMU_IDR0_REF_SIMICS       0x080f3e1f
 #define IVPU_MMU_IDR1_REF              0x0e739d18
 #define IVPU_MMU_REG_TIMEOUT_US                (10 * USEC_PER_MSEC)
 #define IVPU_MMU_QUEUE_TIMEOUT_US      (100 * USEC_PER_MSEC)
 
-#define IVPU_MMU_GERROR_ERR_MASK ((REG_FLD(VPU_37XX_HOST_MMU_GERROR, CMDQ)) | \
-                                 (REG_FLD(VPU_37XX_HOST_MMU_GERROR, EVTQ_ABT)) | \
-                                 (REG_FLD(VPU_37XX_HOST_MMU_GERROR, PRIQ_ABT)) | \
-                                 (REG_FLD(VPU_37XX_HOST_MMU_GERROR, MSI_CMDQ_ABT)) | \
-                                 (REG_FLD(VPU_37XX_HOST_MMU_GERROR, MSI_EVTQ_ABT)) | \
-                                 (REG_FLD(VPU_37XX_HOST_MMU_GERROR, MSI_PRIQ_ABT)) | \
-                                 (REG_FLD(VPU_37XX_HOST_MMU_GERROR, MSI_ABT)))
+#define IVPU_MMU_GERROR_ERR_MASK ((REG_FLD(IVPU_MMU_REG_GERROR, CMDQ)) | \
+                                 (REG_FLD(IVPU_MMU_REG_GERROR, EVTQ_ABT)) | \
+                                 (REG_FLD(IVPU_MMU_REG_GERROR, PRIQ_ABT)) | \
+                                 (REG_FLD(IVPU_MMU_REG_GERROR, MSI_CMDQ_ABT)) | \
+                                 (REG_FLD(IVPU_MMU_REG_GERROR, MSI_EVTQ_ABT)) | \
+                                 (REG_FLD(IVPU_MMU_REG_GERROR, MSI_PRIQ_ABT)) | \
+                                 (REG_FLD(IVPU_MMU_REG_GERROR, MSI_ABT)))
 
 static char *ivpu_mmu_event_to_str(u32 cmd)
 {
        else
                val_ref = IVPU_MMU_IDR0_REF;
 
-       val = REGV_RD32(VPU_37XX_HOST_MMU_IDR0);
+       val = REGV_RD32(IVPU_MMU_REG_IDR0);
        if (val != val_ref)
                ivpu_dbg(vdev, MMU, "IDR0 0x%x != IDR0_REF 0x%x\n", val, val_ref);
 
-       val = REGV_RD32(VPU_37XX_HOST_MMU_IDR1);
+       val = REGV_RD32(IVPU_MMU_REG_IDR1);
        if (val != IVPU_MMU_IDR1_REF)
                ivpu_dbg(vdev, MMU, "IDR1 0x%x != IDR1_REF 0x%x\n", val, IVPU_MMU_IDR1_REF);
 
-       val = REGV_RD32(VPU_37XX_HOST_MMU_IDR3);
+       val = REGV_RD32(IVPU_MMU_REG_IDR3);
        if (val != IVPU_MMU_IDR3_REF)
                ivpu_dbg(vdev, MMU, "IDR3 0x%x != IDR3_REF 0x%x\n", val, IVPU_MMU_IDR3_REF);
 
        else
                val_ref = IVPU_MMU_IDR5_REF;
 
-       val = REGV_RD32(VPU_37XX_HOST_MMU_IDR5);
+       val = REGV_RD32(IVPU_MMU_REG_IDR5);
        if (val != val_ref)
                ivpu_dbg(vdev, MMU, "IDR5 0x%x != IDR5_REF 0x%x\n", val, val_ref);
 }
        u32 irq_ctrl = IVPU_MMU_IRQ_EVTQ_EN | IVPU_MMU_IRQ_GERROR_EN;
        int ret;
 
-       ret = ivpu_mmu_reg_write(vdev, VPU_37XX_HOST_MMU_IRQ_CTRL, 0);
+       ret = ivpu_mmu_reg_write(vdev, IVPU_MMU_REG_IRQ_CTRL, 0);
        if (ret)
                return ret;
 
-       return ivpu_mmu_reg_write(vdev, VPU_37XX_HOST_MMU_IRQ_CTRL, irq_ctrl);
+       return ivpu_mmu_reg_write(vdev, IVPU_MMU_REG_IRQ_CTRL, irq_ctrl);
 }
 
 static int ivpu_mmu_cmdq_wait_for_cons(struct ivpu_device *vdev)
 {
        struct ivpu_mmu_queue *cmdq = &vdev->mmu->cmdq;
 
-       return REGV_POLL(VPU_37XX_HOST_MMU_CMDQ_CONS, cmdq->cons, (cmdq->prod == cmdq->cons),
+       return REGV_POLL(IVPU_MMU_REG_CMDQ_CONS, cmdq->cons, (cmdq->prod == cmdq->cons),
                         IVPU_MMU_QUEUE_TIMEOUT_US);
 }
 
                return ret;
 
        clflush_cache_range(q->base, IVPU_MMU_CMDQ_SIZE);
-       REGV_WR32(VPU_37XX_HOST_MMU_CMDQ_PROD, q->prod);
+       REGV_WR32(IVPU_MMU_REG_CMDQ_PROD, q->prod);
 
        ret = ivpu_mmu_cmdq_wait_for_cons(vdev);
        if (ret)
        mmu->evtq.prod = 0;
        mmu->evtq.cons = 0;
 
-       ret = ivpu_mmu_reg_write(vdev, VPU_37XX_HOST_MMU_CR0, 0);
+       ret = ivpu_mmu_reg_write(vdev, IVPU_MMU_REG_CR0, 0);
        if (ret)
                return ret;
 
              FIELD_PREP(IVPU_MMU_CR1_QUEUE_SH, IVPU_MMU_SH_ISH) |
              FIELD_PREP(IVPU_MMU_CR1_QUEUE_OC, IVPU_MMU_CACHE_WB) |
              FIELD_PREP(IVPU_MMU_CR1_QUEUE_IC, IVPU_MMU_CACHE_WB);
-       REGV_WR32(VPU_37XX_HOST_MMU_CR1, val);
+       REGV_WR32(IVPU_MMU_REG_CR1, val);
 
-       REGV_WR64(VPU_37XX_HOST_MMU_STRTAB_BASE, mmu->strtab.dma_q);
-       REGV_WR32(VPU_37XX_HOST_MMU_STRTAB_BASE_CFG, mmu->strtab.base_cfg);
+       REGV_WR64(IVPU_MMU_REG_STRTAB_BASE, mmu->strtab.dma_q);
+       REGV_WR32(IVPU_MMU_REG_STRTAB_BASE_CFG, mmu->strtab.base_cfg);
 
-       REGV_WR64(VPU_37XX_HOST_MMU_CMDQ_BASE, mmu->cmdq.dma_q);
-       REGV_WR32(VPU_37XX_HOST_MMU_CMDQ_PROD, 0);
-       REGV_WR32(VPU_37XX_HOST_MMU_CMDQ_CONS, 0);
+       REGV_WR64(IVPU_MMU_REG_CMDQ_BASE, mmu->cmdq.dma_q);
+       REGV_WR32(IVPU_MMU_REG_CMDQ_PROD, 0);
+       REGV_WR32(IVPU_MMU_REG_CMDQ_CONS, 0);
 
        val = IVPU_MMU_CR0_CMDQEN;
-       ret = ivpu_mmu_reg_write(vdev, VPU_37XX_HOST_MMU_CR0, val);
+       ret = ivpu_mmu_reg_write(vdev, IVPU_MMU_REG_CR0, val);
        if (ret)
                return ret;
 
        if (ret)
                return ret;
 
-       REGV_WR64(VPU_37XX_HOST_MMU_EVTQ_BASE, mmu->evtq.dma_q);
-       REGV_WR32(VPU_37XX_HOST_MMU_EVTQ_PROD_SEC, 0);
-       REGV_WR32(VPU_37XX_HOST_MMU_EVTQ_CONS_SEC, 0);
+       REGV_WR64(IVPU_MMU_REG_EVTQ_BASE, mmu->evtq.dma_q);
+       REGV_WR32(IVPU_MMU_REG_EVTQ_PROD_SEC, 0);
+       REGV_WR32(IVPU_MMU_REG_EVTQ_CONS_SEC, 0);
 
        val |= IVPU_MMU_CR0_EVTQEN;
-       ret = ivpu_mmu_reg_write(vdev, VPU_37XX_HOST_MMU_CR0, val);
+       ret = ivpu_mmu_reg_write(vdev, IVPU_MMU_REG_CR0, val);
        if (ret)
                return ret;
 
        val |= IVPU_MMU_CR0_ATSCHK;
-       ret = ivpu_mmu_reg_write(vdev, VPU_37XX_HOST_MMU_CR0, val);
+       ret = ivpu_mmu_reg_write(vdev, IVPU_MMU_REG_CR0, val);
        if (ret)
                return ret;
 
                return ret;
 
        val |= IVPU_MMU_CR0_SMMUEN;
-       return ivpu_mmu_reg_write(vdev, VPU_37XX_HOST_MMU_CR0, val);
+       return ivpu_mmu_reg_write(vdev, IVPU_MMU_REG_CR0, val);
 }
 
 static void ivpu_mmu_strtab_link_cd(struct ivpu_device *vdev, u32 sid)
        u32 idx = IVPU_MMU_Q_IDX(evtq->cons);
        u32 *evt = evtq->base + (idx * IVPU_MMU_EVTQ_CMD_SIZE);
 
-       evtq->prod = REGV_RD32(VPU_37XX_HOST_MMU_EVTQ_PROD_SEC);
+       evtq->prod = REGV_RD32(IVPU_MMU_REG_EVTQ_PROD_SEC);
        if (!CIRC_CNT(IVPU_MMU_Q_IDX(evtq->prod), IVPU_MMU_Q_IDX(evtq->cons), IVPU_MMU_Q_COUNT))
                return NULL;
 
        clflush_cache_range(evt, IVPU_MMU_EVTQ_CMD_SIZE);
 
        evtq->cons = (evtq->cons + 1) & IVPU_MMU_Q_WRAP_MASK;
-       REGV_WR32(VPU_37XX_HOST_MMU_EVTQ_CONS_SEC, evtq->cons);
+       REGV_WR32(IVPU_MMU_REG_EVTQ_CONS_SEC, evtq->cons);
 
        return evt;
 }
 
        ivpu_dbg(vdev, IRQ, "MMU error\n");
 
-       gerror_val = REGV_RD32(VPU_37XX_HOST_MMU_GERROR);
-       gerrorn_val = REGV_RD32(VPU_37XX_HOST_MMU_GERRORN);
+       gerror_val = REGV_RD32(IVPU_MMU_REG_GERROR);
+       gerrorn_val = REGV_RD32(IVPU_MMU_REG_GERRORN);
 
        active = gerror_val ^ gerrorn_val;
        if (!(active & IVPU_MMU_GERROR_ERR_MASK))
                return;
 
-       if (REG_TEST_FLD(VPU_37XX_HOST_MMU_GERROR, MSI_ABT, active))
+       if (REG_TEST_FLD(IVPU_MMU_REG_GERROR, MSI_ABT, active))
                ivpu_warn_ratelimited(vdev, "MMU MSI ABT write aborted\n");
 
-       if (REG_TEST_FLD(VPU_37XX_HOST_MMU_GERROR, MSI_PRIQ_ABT, active))
+       if (REG_TEST_FLD(IVPU_MMU_REG_GERROR, MSI_PRIQ_ABT, active))
                ivpu_warn_ratelimited(vdev, "MMU PRIQ MSI ABT write aborted\n");
 
-       if (REG_TEST_FLD(VPU_37XX_HOST_MMU_GERROR, MSI_EVTQ_ABT, active))
+       if (REG_TEST_FLD(IVPU_MMU_REG_GERROR, MSI_EVTQ_ABT, active))
                ivpu_warn_ratelimited(vdev, "MMU EVTQ MSI ABT write aborted\n");
 
-       if (REG_TEST_FLD(VPU_37XX_HOST_MMU_GERROR, MSI_CMDQ_ABT, active))
+       if (REG_TEST_FLD(IVPU_MMU_REG_GERROR, MSI_CMDQ_ABT, active))
                ivpu_warn_ratelimited(vdev, "MMU CMDQ MSI ABT write aborted\n");
 
-       if (REG_TEST_FLD(VPU_37XX_HOST_MMU_GERROR, PRIQ_ABT, active))
+       if (REG_TEST_FLD(IVPU_MMU_REG_GERROR, PRIQ_ABT, active))
                ivpu_err_ratelimited(vdev, "MMU PRIQ write aborted\n");
 
-       if (REG_TEST_FLD(VPU_37XX_HOST_MMU_GERROR, EVTQ_ABT, active))
+       if (REG_TEST_FLD(IVPU_MMU_REG_GERROR, EVTQ_ABT, active))
                ivpu_err_ratelimited(vdev, "MMU EVTQ write aborted\n");
 
-       if (REG_TEST_FLD(VPU_37XX_HOST_MMU_GERROR, CMDQ, active))
+       if (REG_TEST_FLD(IVPU_MMU_REG_GERROR, CMDQ, active))
                ivpu_err_ratelimited(vdev, "MMU CMDQ write aborted\n");
 
-       REGV_WR32(VPU_37XX_HOST_MMU_GERRORN, gerror_val);
+       REGV_WR32(IVPU_MMU_REG_GERRORN, gerror_val);
 }
 
 int ivpu_mmu_set_pgtable(struct ivpu_device *vdev, int ssid, struct ivpu_mmu_pgtable *pgtable)