chip->ops->cfo_init(rtwdev);
 }
 
+static void rtw_phy_tx_path_div_init(struct rtw_dev *rtwdev)
+{
+       struct rtw_path_div *path_div = &rtwdev->dm_path_div;
+
+       path_div->current_tx_path = rtwdev->chip->default_1ss_tx_path;
+       path_div->path_a_cnt = 0;
+       path_div->path_a_sum = 0;
+       path_div->path_b_cnt = 0;
+       path_div->path_b_sum = 0;
+}
+
 void rtw_phy_init(struct rtw_dev *rtwdev)
 {
        struct rtw_chip_info *chip = rtwdev->chip;
 
        dm_info->iqk.done = false;
        rtw_phy_cfo_init(rtwdev);
+       rtw_phy_tx_path_div_init(rtwdev);
 }
 EXPORT_SYMBOL(rtw_phy_init);
 
        rtw_phy_dig(rtwdev);
        rtw_phy_cck_pd(rtwdev);
        rtw_phy_ra_track(rtwdev);
+       rtw_phy_tx_path_diversity(rtwdev);
        rtw_phy_cfo_track(rtwdev);
        rtw_phy_dpk_track(rtwdev);
        rtw_phy_pwr_track(rtwdev);
        return false;
 }
 EXPORT_SYMBOL(rtw_phy_pwrtrack_need_iqk);
+
+static void rtw_phy_set_tx_path_by_reg(struct rtw_dev *rtwdev,
+                                      enum rtw_bb_path tx_path_sel_1ss)
+{
+       struct rtw_path_div *path_div = &rtwdev->dm_path_div;
+       enum rtw_bb_path tx_path_sel_cck = tx_path_sel_1ss;
+       struct rtw_chip_info *chip = rtwdev->chip;
+
+       if (tx_path_sel_1ss == path_div->current_tx_path)
+               return;
+
+       path_div->current_tx_path = tx_path_sel_1ss;
+       rtw_dbg(rtwdev, RTW_DBG_PATH_DIV, "Switch TX path=%s\n",
+               tx_path_sel_1ss == BB_PATH_A ? "A" : "B");
+       chip->ops->config_tx_path(rtwdev, rtwdev->hal.antenna_tx,
+                                 tx_path_sel_1ss, tx_path_sel_cck, false);
+}
+
+static void rtw_phy_tx_path_div_select(struct rtw_dev *rtwdev)
+{
+       struct rtw_path_div *path_div = &rtwdev->dm_path_div;
+       enum rtw_bb_path path = path_div->current_tx_path;
+       s32 rssi_a = 0, rssi_b = 0;
+
+       if (path_div->path_a_cnt)
+               rssi_a = path_div->path_a_sum / path_div->path_a_cnt;
+       else
+               rssi_a = 0;
+       if (path_div->path_b_cnt)
+               rssi_b = path_div->path_b_sum / path_div->path_b_cnt;
+       else
+               rssi_b = 0;
+
+       if (rssi_a != rssi_b)
+               path = (rssi_a > rssi_b) ? BB_PATH_A : BB_PATH_B;
+
+       path_div->path_a_cnt = 0;
+       path_div->path_a_sum = 0;
+       path_div->path_b_cnt = 0;
+       path_div->path_b_sum = 0;
+       rtw_phy_set_tx_path_by_reg(rtwdev, path);
+}
+
+static void rtw_phy_tx_path_diversity_2ss(struct rtw_dev *rtwdev)
+{
+       if (rtwdev->hal.antenna_rx != BB_PATH_AB) {
+               rtw_dbg(rtwdev, RTW_DBG_PATH_DIV,
+                       "[Return] tx_Path_en=%d, rx_Path_en=%d\n",
+                       rtwdev->hal.antenna_tx, rtwdev->hal.antenna_rx);
+               return;
+       }
+       if (rtwdev->sta_cnt == 0) {
+               rtw_dbg(rtwdev, RTW_DBG_PATH_DIV, "No Link\n");
+               return;
+       }
+
+       rtw_phy_tx_path_div_select(rtwdev);
+}
+
+void rtw_phy_tx_path_diversity(struct rtw_dev *rtwdev)
+{
+       struct rtw_chip_info *chip = rtwdev->chip;
+
+       if (!chip->path_div_supported)
+               return;
+
+       rtw_phy_tx_path_diversity_2ss(rtwdev);
+}
 
                rtw_write32_set(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
 }
 
+static void rtw8822c_bb_reset(struct rtw_dev *rtwdev)
+{
+       rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
+       rtw_write16_clr(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
+       rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
+}
+
 static void rtw8822c_dac_backup_reg(struct rtw_dev *rtwdev,
                                    struct rtw_backup_info *backup,
                                    struct rtw_backup_info *backup_rf)
                else
                        rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
        }
+       rtw8822c_bb_reset(rtwdev);
 }
 
 static void rtw8822c_config_ofdm_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
-                                        bool is_tx2_path)
+                                        enum rtw_bb_path tx_path_sel_1ss)
 {
        if (tx_path == BB_PATH_A) {
                rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x11);
                rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x12);
                rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
        } else {
-               if (is_tx2_path) {
+               if (tx_path_sel_1ss == BB_PATH_AB) {
                        rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x33);
                        rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0404);
-               } else {
+               } else if (tx_path_sel_1ss == BB_PATH_B) {
+                       rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x32);
+                       rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
+               } else if (tx_path_sel_1ss == BB_PATH_A) {
                        rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x31);
                        rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
                }
        }
+       rtw8822c_bb_reset(rtwdev);
 }
 
 static void rtw8822c_config_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
+                                   enum rtw_bb_path tx_path_sel_1ss,
+                                   enum rtw_bb_path tx_path_cck,
                                    bool is_tx2_path)
 {
-       rtw8822c_config_cck_tx_path(rtwdev, tx_path, is_tx2_path);
-       rtw8822c_config_ofdm_tx_path(rtwdev, tx_path, is_tx2_path);
+       rtw8822c_config_cck_tx_path(rtwdev, tx_path_cck, is_tx2_path);
+       rtw8822c_config_ofdm_tx_path(rtwdev, tx_path, tx_path_sel_1ss);
+       rtw8822c_bb_reset(rtwdev);
 }
 
 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
                rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x11111);
 
        rtw8822c_config_rx_path(rtwdev, rx_path);
-       rtw8822c_config_tx_path(rtwdev, tx_path, is_tx2_path);
+       rtw8822c_config_tx_path(rtwdev, tx_path, BB_PATH_A, BB_PATH_A,
+                               is_tx2_path);
 
        rtw8822c_toggle_igi(rtwdev);
 }
 static void query_phy_status_page1(struct rtw_dev *rtwdev, u8 *phy_status,
                                   struct rtw_rx_pkt_stat *pkt_stat)
 {
+       struct rtw_path_div *p_div = &rtwdev->dm_path_div;
        struct rtw_dm_info *dm_info = &rtwdev->dm_info;
        u8 rxsc, bw;
        s8 min_rx_power = -120;
        for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
                rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
                dm_info->rssi[path] = rssi;
+               if (path == RF_PATH_A) {
+                       p_div->path_a_sum += rssi;
+                       p_div->path_a_cnt++;
+               } else if (path == RF_PATH_B) {
+                       p_div->path_b_sum += rssi;
+                       p_div->path_b_cnt++;
+               }
                dm_info->rx_snr[path] = pkt_stat->rx_snr[path] >> 1;
                dm_info->cfo_tail[path] = (pkt_stat->cfo_tail[path] * 5) >> 1;
 
        .cfg_csi_rate           = rtw_bf_cfg_csi_rate,
        .cfo_init               = rtw8822c_cfo_init,
        .cfo_track              = rtw8822c_cfo_track,
+       .config_tx_path         = rtw8822c_config_tx_path,
 
        .coex_set_init          = rtw8822c_coex_cfg_init,
        .coex_set_ant_switch    = NULL,
        .band = RTW_BAND_2G | RTW_BAND_5G,
        .page_size = 128,
        .dig_min = 0x20,
+       .default_1ss_tx_path = BB_PATH_A,
+       .path_div_supported = true,
        .ht_supported = true,
        .vht_supported = true,
        .lps_deep_mode_supported = BIT(LPS_DEEP_MODE_LCLK) | BIT(LPS_DEEP_MODE_PG),