},
 };
 
+static __initconst const u64 amd_hw_cache_event_ids_f17h
+                               [PERF_COUNT_HW_CACHE_MAX]
+                               [PERF_COUNT_HW_CACHE_OP_MAX]
+                               [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
+[C(L1D)] = {
+       [C(OP_READ)] = {
+               [C(RESULT_ACCESS)] = 0x0040, /* Data Cache Accesses */
+               [C(RESULT_MISS)]   = 0xc860, /* L2$ access from DC Miss */
+       },
+       [C(OP_WRITE)] = {
+               [C(RESULT_ACCESS)] = 0,
+               [C(RESULT_MISS)]   = 0,
+       },
+       [C(OP_PREFETCH)] = {
+               [C(RESULT_ACCESS)] = 0xff5a, /* h/w prefetch DC Fills */
+               [C(RESULT_MISS)]   = 0,
+       },
+},
+[C(L1I)] = {
+       [C(OP_READ)] = {
+               [C(RESULT_ACCESS)] = 0x0080, /* Instruction cache fetches  */
+               [C(RESULT_MISS)]   = 0x0081, /* Instruction cache misses   */
+       },
+       [C(OP_WRITE)] = {
+               [C(RESULT_ACCESS)] = -1,
+               [C(RESULT_MISS)]   = -1,
+       },
+       [C(OP_PREFETCH)] = {
+               [C(RESULT_ACCESS)] = 0,
+               [C(RESULT_MISS)]   = 0,
+       },
+},
+[C(LL)] = {
+       [C(OP_READ)] = {
+               [C(RESULT_ACCESS)] = 0,
+               [C(RESULT_MISS)]   = 0,
+       },
+       [C(OP_WRITE)] = {
+               [C(RESULT_ACCESS)] = 0,
+               [C(RESULT_MISS)]   = 0,
+       },
+       [C(OP_PREFETCH)] = {
+               [C(RESULT_ACCESS)] = 0,
+               [C(RESULT_MISS)]   = 0,
+       },
+},
+[C(DTLB)] = {
+       [C(OP_READ)] = {
+               [C(RESULT_ACCESS)] = 0xff45, /* All L2 DTLB accesses */
+               [C(RESULT_MISS)]   = 0xf045, /* L2 DTLB misses (PT walks) */
+       },
+       [C(OP_WRITE)] = {
+               [C(RESULT_ACCESS)] = 0,
+               [C(RESULT_MISS)]   = 0,
+       },
+       [C(OP_PREFETCH)] = {
+               [C(RESULT_ACCESS)] = 0,
+               [C(RESULT_MISS)]   = 0,
+       },
+},
+[C(ITLB)] = {
+       [C(OP_READ)] = {
+               [C(RESULT_ACCESS)] = 0x0084, /* L1 ITLB misses, L2 ITLB hits */
+               [C(RESULT_MISS)]   = 0xff85, /* L1 ITLB misses, L2 misses */
+       },
+       [C(OP_WRITE)] = {
+               [C(RESULT_ACCESS)] = -1,
+               [C(RESULT_MISS)]   = -1,
+       },
+       [C(OP_PREFETCH)] = {
+               [C(RESULT_ACCESS)] = -1,
+               [C(RESULT_MISS)]   = -1,
+       },
+},
+[C(BPU)] = {
+       [C(OP_READ)] = {
+               [C(RESULT_ACCESS)] = 0x00c2, /* Retired Branch Instr.      */
+               [C(RESULT_MISS)]   = 0x00c3, /* Retired Mispredicted BI    */
+       },
+       [C(OP_WRITE)] = {
+               [C(RESULT_ACCESS)] = -1,
+               [C(RESULT_MISS)]   = -1,
+       },
+       [C(OP_PREFETCH)] = {
+               [C(RESULT_ACCESS)] = -1,
+               [C(RESULT_MISS)]   = -1,
+       },
+},
+[C(NODE)] = {
+       [C(OP_READ)] = {
+               [C(RESULT_ACCESS)] = 0,
+               [C(RESULT_MISS)]   = 0,
+       },
+       [C(OP_WRITE)] = {
+               [C(RESULT_ACCESS)] = -1,
+               [C(RESULT_MISS)]   = -1,
+       },
+       [C(OP_PREFETCH)] = {
+               [C(RESULT_ACCESS)] = -1,
+               [C(RESULT_MISS)]   = -1,
+       },
+},
+};
+
 /*
  * AMD Performance Monitor K7 and later, up to and including Family 16h:
  */
                x86_pmu.amd_nb_constraints = 0;
        }
 
-       /* Events are common for all AMDs */
-       memcpy(hw_cache_event_ids, amd_hw_cache_event_ids,
-              sizeof(hw_cache_event_ids));
+       if (boot_cpu_data.x86 >= 0x17)
+               memcpy(hw_cache_event_ids, amd_hw_cache_event_ids_f17h, sizeof(hw_cache_event_ids));
+       else
+               memcpy(hw_cache_event_ids, amd_hw_cache_event_ids, sizeof(hw_cache_event_ids));
 
        return 0;
 }