dm_info->cck_fa_avg = CCK_FA_AVG_RESET;
 }
 
+static void rtw_phy_cfo_init(struct rtw_dev *rtwdev)
+{
+       struct rtw_chip_info *chip = rtwdev->chip;
+
+       if (chip->ops->cfo_init)
+               chip->ops->cfo_init(rtwdev);
+}
+
 void rtw_phy_init(struct rtw_dev *rtwdev)
 {
        struct rtw_chip_info *chip = rtwdev->chip;
        rtw_phy_cck_pd_init(rtwdev);
 
        dm_info->iqk.done = false;
+       rtw_phy_cfo_init(rtwdev);
 }
 EXPORT_SYMBOL(rtw_phy_init);
 
                chip->ops->dpk_track(rtwdev);
 }
 
+struct rtw_rx_addr_match_data {
+       struct rtw_dev *rtwdev;
+       struct ieee80211_hdr *hdr;
+       struct rtw_rx_pkt_stat *pkt_stat;
+       u8 *bssid;
+};
+
+static void rtw_phy_parsing_cfo_iter(void *data, u8 *mac,
+                                    struct ieee80211_vif *vif)
+{
+       struct rtw_rx_addr_match_data *iter_data = data;
+       struct rtw_dev *rtwdev = iter_data->rtwdev;
+       struct rtw_rx_pkt_stat *pkt_stat = iter_data->pkt_stat;
+       struct rtw_dm_info *dm_info = &rtwdev->dm_info;
+       struct rtw_cfo_track *cfo = &dm_info->cfo_track;
+       u8 *bssid = iter_data->bssid;
+       u8 i;
+
+       if (!ether_addr_equal(vif->bss_conf.bssid, bssid))
+               return;
+
+       for (i = 0; i < rtwdev->hal.rf_path_num; i++) {
+               cfo->cfo_tail[i] += pkt_stat->cfo_tail[i];
+               cfo->cfo_cnt[i]++;
+       }
+
+       cfo->packet_count++;
+}
+
+void rtw_phy_parsing_cfo(struct rtw_dev *rtwdev,
+                        struct rtw_rx_pkt_stat *pkt_stat)
+{
+       struct ieee80211_hdr *hdr = pkt_stat->hdr;
+       struct rtw_rx_addr_match_data data = {};
+
+       if (pkt_stat->crc_err || pkt_stat->icv_err || !pkt_stat->phy_status ||
+           ieee80211_is_ctl(hdr->frame_control))
+               return;
+
+       data.rtwdev = rtwdev;
+       data.hdr = hdr;
+       data.pkt_stat = pkt_stat;
+       data.bssid = get_hdr_bssid(hdr);
+
+       rtw_iterate_vifs_atomic(rtwdev, rtw_phy_parsing_cfo_iter, &data);
+}
+EXPORT_SYMBOL(rtw_phy_parsing_cfo);
+
+static void rtw_phy_cfo_track(struct rtw_dev *rtwdev)
+{
+       struct rtw_chip_info *chip = rtwdev->chip;
+
+       if (chip->ops->cfo_track)
+               chip->ops->cfo_track(rtwdev);
+}
+
 #define CCK_PD_FA_LV1_MIN      1000
 #define CCK_PD_FA_LV0_MAX      500
 
        rtw_phy_dig(rtwdev);
        rtw_phy_cck_pd(rtwdev);
        rtw_phy_ra_track(rtwdev);
+       rtw_phy_cfo_track(rtwdev);
        rtw_phy_dpk_track(rtwdev);
        rtw_phy_pwr_track(rtwdev);
 }
 
 #include "util.h"
 #include "bf.h"
 #include "efuse.h"
+#include "coex.h"
 
 #define IQK_DONE_8822C 0xaa
 
 
        efuse->rfe_option = map->rfe_option;
        efuse->rf_board_option = map->rf_board_option;
-       efuse->crystal_cap = map->xtal_k;
+       efuse->crystal_cap = map->xtal_k & XCAP_MASK;
        efuse->channel_plan = map->channel_plan;
        efuse->country_code[0] = map->country_code[0];
        efuse->country_code[1] = map->country_code[1];
                }
                dm_info->rx_evm_dbm[path] = evm_dbm;
        }
+       rtw_phy_parsing_cfo(rtwdev, pkt_stat);
 }
 
 static void query_phy_status(struct rtw_dev *rtwdev, u8 *phy_status,
 
        hdr = (struct ieee80211_hdr *)(rx_desc + desc_sz + pkt_stat->shift +
                                       pkt_stat->drv_info_sz);
+       pkt_stat->hdr = hdr;
        if (pkt_stat->phy_status) {
                phy_status = rx_desc + desc_sz + pkt_stat->shift;
                query_phy_status(rtwdev, phy_status, pkt_stat);
        }
 }
 
+#define XCAP_EXTEND(val) ({typeof(val) _v = (val); _v | _v << 7; })
+static void rtw8822c_set_crystal_cap_reg(struct rtw_dev *rtwdev, u8 crystal_cap)
+{
+       struct rtw_dm_info *dm_info = &rtwdev->dm_info;
+       struct rtw_cfo_track *cfo = &dm_info->cfo_track;
+       u32 val = 0;
+
+       val = XCAP_EXTEND(crystal_cap);
+       cfo->crystal_cap = crystal_cap;
+       rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, BIT_XCAP_0, val);
+}
+
+static void rtw8822c_set_crystal_cap(struct rtw_dev *rtwdev, u8 crystal_cap)
+{
+       struct rtw_dm_info *dm_info = &rtwdev->dm_info;
+       struct rtw_cfo_track *cfo = &dm_info->cfo_track;
+
+       if (cfo->crystal_cap == crystal_cap)
+               return;
+
+       rtw8822c_set_crystal_cap_reg(rtwdev, crystal_cap);
+}
+
+static void rtw8822c_cfo_tracking_reset(struct rtw_dev *rtwdev)
+{
+       struct rtw_dm_info *dm_info = &rtwdev->dm_info;
+       struct rtw_cfo_track *cfo = &dm_info->cfo_track;
+
+       cfo->is_adjust = true;
+
+       if (cfo->crystal_cap > rtwdev->efuse.crystal_cap)
+               rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap - 1);
+       else if (cfo->crystal_cap < rtwdev->efuse.crystal_cap)
+               rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap + 1);
+}
+
+static void rtw8822c_cfo_init(struct rtw_dev *rtwdev)
+{
+       struct rtw_dm_info *dm_info = &rtwdev->dm_info;
+       struct rtw_cfo_track *cfo = &dm_info->cfo_track;
+
+       cfo->crystal_cap = rtwdev->efuse.crystal_cap;
+       cfo->is_adjust = true;
+}
+
+#define REPORT_TO_KHZ(val) ({typeof(val) _v = (val); (_v << 1) + (_v >> 1); })
+static s32 rtw8822c_cfo_calc_avg(struct rtw_dev *rtwdev, u8 path_num)
+{
+       struct rtw_dm_info *dm_info = &rtwdev->dm_info;
+       struct rtw_cfo_track *cfo = &dm_info->cfo_track;
+       s32 cfo_avg, cfo_path_sum = 0, cfo_rpt_sum;
+       u8 i;
+
+       for (i = 0; i < path_num; i++) {
+               cfo_rpt_sum = REPORT_TO_KHZ(cfo->cfo_tail[i]);
+
+               if (cfo->cfo_cnt[i])
+                       cfo_avg = cfo_rpt_sum / cfo->cfo_cnt[i];
+               else
+                       cfo_avg = 0;
+
+               cfo_path_sum += cfo_avg;
+       }
+
+       for (i = 0; i < path_num; i++) {
+               cfo->cfo_tail[i] = 0;
+               cfo->cfo_cnt[i] = 0;
+       }
+
+       return cfo_path_sum / path_num;
+}
+
+static void rtw8822c_cfo_need_adjust(struct rtw_dev *rtwdev, s32 cfo_avg)
+{
+       struct rtw_dm_info *dm_info = &rtwdev->dm_info;
+       struct rtw_cfo_track *cfo = &dm_info->cfo_track;
+
+       if (!cfo->is_adjust) {
+               if (abs(cfo_avg) > CFO_TRK_ENABLE_TH)
+                       cfo->is_adjust = true;
+       } else {
+               if (abs(cfo_avg) <= CFO_TRK_STOP_TH)
+                       cfo->is_adjust = false;
+       }
+
+       if (!rtw_coex_disabled(rtwdev)) {
+               cfo->is_adjust = false;
+               rtw8822c_set_crystal_cap(rtwdev, rtwdev->efuse.crystal_cap);
+       }
+}
+
+static void rtw8822c_cfo_track(struct rtw_dev *rtwdev)
+{
+       struct rtw_dm_info *dm_info = &rtwdev->dm_info;
+       struct rtw_cfo_track *cfo = &dm_info->cfo_track;
+       u8 path_num = rtwdev->hal.rf_path_num;
+       s8 crystal_cap = cfo->crystal_cap;
+       s32 cfo_avg = 0;
+
+       if (rtwdev->sta_cnt != 1) {
+               rtw8822c_cfo_tracking_reset(rtwdev);
+               return;
+       }
+
+       if (cfo->packet_count == cfo->packet_count_pre)
+               return;
+
+       cfo->packet_count_pre = cfo->packet_count;
+       cfo_avg = rtw8822c_cfo_calc_avg(rtwdev, path_num);
+       rtw8822c_cfo_need_adjust(rtwdev, cfo_avg);
+
+       if (cfo->is_adjust) {
+               if (cfo_avg > CFO_TRK_ADJ_TH)
+                       crystal_cap++;
+               else if (cfo_avg < -CFO_TRK_ADJ_TH)
+                       crystal_cap--;
+
+               crystal_cap = clamp_t(s8, crystal_cap, 0, XCAP_MASK);
+               rtw8822c_set_crystal_cap(rtwdev, (u8)crystal_cap);
+       }
+}
+
 static const struct rtw_phy_cck_pd_reg
 rtw8822c_cck_pd_reg[RTW_CHANNEL_WIDTH_40 + 1][RTW_RF_PATH_MAX] = {
        {
        .config_bfee            = rtw8822c_bf_config_bfee,
        .set_gid_table          = rtw_bf_set_gid_table,
        .cfg_csi_rate           = rtw_bf_cfg_csi_rate,
+       .cfo_init               = rtw8822c_cfo_init,
+       .cfo_track              = rtw8822c_cfo_track,
 
        .coex_set_init          = rtw8822c_coex_cfg_init,
        .coex_set_ant_switch    = NULL,