rtw8852c.c 101 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147
  1. // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
  2. /* Copyright(c) 2019-2022 Realtek Corporation
  3. */
  4. #include "coex.h"
  5. #include "debug.h"
  6. #include "fw.h"
  7. #include "mac.h"
  8. #include "phy.h"
  9. #include "reg.h"
  10. #include "rtw8852c.h"
  11. #include "rtw8852c_rfk.h"
  12. #include "rtw8852c_table.h"
  13. #include "util.h"
  14. static const struct rtw89_hfc_ch_cfg rtw8852c_hfc_chcfg_pcie[] = {
  15. {13, 1614, grp_0}, /* ACH 0 */
  16. {13, 1614, grp_0}, /* ACH 1 */
  17. {13, 1614, grp_0}, /* ACH 2 */
  18. {13, 1614, grp_0}, /* ACH 3 */
  19. {13, 1614, grp_1}, /* ACH 4 */
  20. {13, 1614, grp_1}, /* ACH 5 */
  21. {13, 1614, grp_1}, /* ACH 6 */
  22. {13, 1614, grp_1}, /* ACH 7 */
  23. {13, 1614, grp_0}, /* B0MGQ */
  24. {13, 1614, grp_0}, /* B0HIQ */
  25. {13, 1614, grp_1}, /* B1MGQ */
  26. {13, 1614, grp_1}, /* B1HIQ */
  27. {40, 0, 0} /* FWCMDQ */
  28. };
  29. static const struct rtw89_hfc_pub_cfg rtw8852c_hfc_pubcfg_pcie = {
  30. 1614, /* Group 0 */
  31. 1614, /* Group 1 */
  32. 3228, /* Public Max */
  33. 0 /* WP threshold */
  34. };
  35. static const struct rtw89_hfc_param_ini rtw8852c_hfc_param_ini_pcie[] = {
  36. [RTW89_QTA_SCC] = {rtw8852c_hfc_chcfg_pcie, &rtw8852c_hfc_pubcfg_pcie,
  37. &rtw89_mac_size.hfc_preccfg_pcie, RTW89_HCIFC_POH},
  38. [RTW89_QTA_DLFW] = {NULL, NULL, &rtw89_mac_size.hfc_preccfg_pcie,
  39. RTW89_HCIFC_POH},
  40. [RTW89_QTA_INVALID] = {NULL},
  41. };
  42. static const struct rtw89_dle_mem rtw8852c_dle_mem_pcie[] = {
  43. [RTW89_QTA_SCC] = {RTW89_QTA_SCC, &rtw89_mac_size.wde_size19,
  44. &rtw89_mac_size.ple_size19, &rtw89_mac_size.wde_qt18,
  45. &rtw89_mac_size.wde_qt18, &rtw89_mac_size.ple_qt46,
  46. &rtw89_mac_size.ple_qt47},
  47. [RTW89_QTA_DLFW] = {RTW89_QTA_DLFW, &rtw89_mac_size.wde_size18,
  48. &rtw89_mac_size.ple_size18, &rtw89_mac_size.wde_qt17,
  49. &rtw89_mac_size.wde_qt17, &rtw89_mac_size.ple_qt44,
  50. &rtw89_mac_size.ple_qt45},
  51. [RTW89_QTA_INVALID] = {RTW89_QTA_INVALID, NULL, NULL, NULL, NULL, NULL,
  52. NULL},
  53. };
  54. static const u32 rtw8852c_h2c_regs[RTW89_H2CREG_MAX] = {
  55. R_AX_H2CREG_DATA0_V1, R_AX_H2CREG_DATA1_V1, R_AX_H2CREG_DATA2_V1,
  56. R_AX_H2CREG_DATA3_V1
  57. };
  58. static const u32 rtw8852c_c2h_regs[RTW89_H2CREG_MAX] = {
  59. R_AX_C2HREG_DATA0_V1, R_AX_C2HREG_DATA1_V1, R_AX_C2HREG_DATA2_V1,
  60. R_AX_C2HREG_DATA3_V1
  61. };
  62. static const struct rtw89_page_regs rtw8852c_page_regs = {
  63. .hci_fc_ctrl = R_AX_HCI_FC_CTRL_V1,
  64. .ch_page_ctrl = R_AX_CH_PAGE_CTRL_V1,
  65. .ach_page_ctrl = R_AX_ACH0_PAGE_CTRL_V1,
  66. .ach_page_info = R_AX_ACH0_PAGE_INFO_V1,
  67. .pub_page_info3 = R_AX_PUB_PAGE_INFO3_V1,
  68. .pub_page_ctrl1 = R_AX_PUB_PAGE_CTRL1_V1,
  69. .pub_page_ctrl2 = R_AX_PUB_PAGE_CTRL2_V1,
  70. .pub_page_info1 = R_AX_PUB_PAGE_INFO1_V1,
  71. .pub_page_info2 = R_AX_PUB_PAGE_INFO2_V1,
  72. .wp_page_ctrl1 = R_AX_WP_PAGE_CTRL1_V1,
  73. .wp_page_ctrl2 = R_AX_WP_PAGE_CTRL2_V1,
  74. .wp_page_info1 = R_AX_WP_PAGE_INFO1_V1,
  75. };
  76. static const struct rtw89_reg_def rtw8852c_dcfo_comp = {
  77. R_DCFO_COMP_S0_V1, B_DCFO_COMP_S0_V1_MSK
  78. };
  79. static const struct rtw89_imr_info rtw8852c_imr_info = {
  80. .wdrls_imr_set = B_AX_WDRLS_IMR_SET_V1,
  81. .wsec_imr_reg = R_AX_SEC_ERROR_FLAG_IMR,
  82. .wsec_imr_set = B_AX_TX_HANG_IMR | B_AX_RX_HANG_IMR,
  83. .mpdu_tx_imr_set = B_AX_MPDU_TX_IMR_SET_V1,
  84. .mpdu_rx_imr_set = B_AX_MPDU_RX_IMR_SET_V1,
  85. .sta_sch_imr_set = B_AX_STA_SCHEDULER_IMR_SET,
  86. .txpktctl_imr_b0_reg = R_AX_TXPKTCTL_B0_ERRFLAG_IMR,
  87. .txpktctl_imr_b0_clr = B_AX_TXPKTCTL_IMR_B0_CLR_V1,
  88. .txpktctl_imr_b0_set = B_AX_TXPKTCTL_IMR_B0_SET_V1,
  89. .txpktctl_imr_b1_reg = R_AX_TXPKTCTL_B1_ERRFLAG_IMR,
  90. .txpktctl_imr_b1_clr = B_AX_TXPKTCTL_IMR_B1_CLR_V1,
  91. .txpktctl_imr_b1_set = B_AX_TXPKTCTL_IMR_B1_SET_V1,
  92. .wde_imr_clr = B_AX_WDE_IMR_CLR_V1,
  93. .wde_imr_set = B_AX_WDE_IMR_SET_V1,
  94. .ple_imr_clr = B_AX_PLE_IMR_CLR_V1,
  95. .ple_imr_set = B_AX_PLE_IMR_SET_V1,
  96. .host_disp_imr_clr = B_AX_HOST_DISP_IMR_CLR_V1,
  97. .host_disp_imr_set = B_AX_HOST_DISP_IMR_SET_V1,
  98. .cpu_disp_imr_clr = B_AX_CPU_DISP_IMR_CLR_V1,
  99. .cpu_disp_imr_set = B_AX_CPU_DISP_IMR_SET_V1,
  100. .other_disp_imr_clr = B_AX_OTHER_DISP_IMR_CLR_V1,
  101. .other_disp_imr_set = B_AX_OTHER_DISP_IMR_SET_V1,
  102. .bbrpt_com_err_imr_reg = R_AX_BBRPT_COM_ERR_IMR,
  103. .bbrpt_chinfo_err_imr_reg = R_AX_BBRPT_CHINFO_ERR_IMR,
  104. .bbrpt_err_imr_set = R_AX_BBRPT_CHINFO_IMR_SET_V1,
  105. .bbrpt_dfs_err_imr_reg = R_AX_BBRPT_DFS_ERR_IMR,
  106. .ptcl_imr_clr = B_AX_PTCL_IMR_CLR_V1,
  107. .ptcl_imr_set = B_AX_PTCL_IMR_SET_V1,
  108. .cdma_imr_0_reg = R_AX_RX_ERR_FLAG_IMR,
  109. .cdma_imr_0_clr = B_AX_RX_ERR_IMR_CLR_V1,
  110. .cdma_imr_0_set = B_AX_RX_ERR_IMR_SET_V1,
  111. .cdma_imr_1_reg = R_AX_TX_ERR_FLAG_IMR,
  112. .cdma_imr_1_clr = B_AX_TX_ERR_IMR_CLR_V1,
  113. .cdma_imr_1_set = B_AX_TX_ERR_IMR_SET_V1,
  114. .phy_intf_imr_reg = R_AX_PHYINFO_ERR_IMR_V1,
  115. .phy_intf_imr_clr = B_AX_PHYINFO_IMR_CLR_V1,
  116. .phy_intf_imr_set = B_AX_PHYINFO_IMR_SET_V1,
  117. .rmac_imr_reg = R_AX_RX_ERR_IMR,
  118. .rmac_imr_clr = B_AX_RMAC_IMR_CLR_V1,
  119. .rmac_imr_set = B_AX_RMAC_IMR_SET_V1,
  120. .tmac_imr_reg = R_AX_TRXPTCL_ERROR_INDICA_MASK,
  121. .tmac_imr_clr = B_AX_TMAC_IMR_CLR_V1,
  122. .tmac_imr_set = B_AX_TMAC_IMR_SET_V1,
  123. };
  124. static const struct rtw89_rrsr_cfgs rtw8852c_rrsr_cfgs = {
  125. .ref_rate = {R_AX_TRXPTCL_RRSR_CTL_0, B_AX_WMAC_RESP_REF_RATE_SEL, 0},
  126. .rsc = {R_AX_PTCL_RRSR1, B_AX_RSC_MASK, 2},
  127. };
  128. static const struct rtw89_dig_regs rtw8852c_dig_regs = {
  129. .seg0_pd_reg = R_SEG0R_PD,
  130. .pd_lower_bound_mask = B_SEG0R_PD_LOWER_BOUND_MSK,
  131. .pd_spatial_reuse_en = B_SEG0R_PD_SPATIAL_REUSE_EN_MSK,
  132. .p0_lna_init = {R_PATH0_LNA_INIT_V1, B_PATH0_LNA_INIT_IDX_MSK},
  133. .p1_lna_init = {R_PATH1_LNA_INIT_V1, B_PATH1_LNA_INIT_IDX_MSK},
  134. .p0_tia_init = {R_PATH0_TIA_INIT_V1, B_PATH0_TIA_INIT_IDX_MSK_V1},
  135. .p1_tia_init = {R_PATH1_TIA_INIT_V1, B_PATH1_TIA_INIT_IDX_MSK_V1},
  136. .p0_rxb_init = {R_PATH0_RXB_INIT_V1, B_PATH0_RXB_INIT_IDX_MSK_V1},
  137. .p1_rxb_init = {R_PATH1_RXB_INIT_V1, B_PATH1_RXB_INIT_IDX_MSK_V1},
  138. .p0_p20_pagcugc_en = {R_PATH0_P20_FOLLOW_BY_PAGCUGC_V1,
  139. B_PATH0_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
  140. .p0_s20_pagcugc_en = {R_PATH0_S20_FOLLOW_BY_PAGCUGC_V1,
  141. B_PATH0_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
  142. .p1_p20_pagcugc_en = {R_PATH1_P20_FOLLOW_BY_PAGCUGC_V1,
  143. B_PATH1_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
  144. .p1_s20_pagcugc_en = {R_PATH1_S20_FOLLOW_BY_PAGCUGC_V1,
  145. B_PATH1_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
  146. };
  147. static void rtw8852c_ctrl_btg(struct rtw89_dev *rtwdev, bool btg);
  148. static void rtw8852c_ctrl_tx_path_tmac(struct rtw89_dev *rtwdev, u8 tx_path,
  149. enum rtw89_mac_idx mac_idx);
  150. static int rtw8852c_pwr_on_func(struct rtw89_dev *rtwdev)
  151. {
  152. u32 val32;
  153. u32 ret;
  154. val32 = rtw89_read32_mask(rtwdev, R_AX_SYS_STATUS1, B_AX_PAD_HCI_SEL_V2_MASK);
  155. if (val32 == MAC_AX_HCI_SEL_PCIE_USB)
  156. rtw89_write32_set(rtwdev, R_AX_LDO_AON_CTRL0, B_AX_PD_REGU_L);
  157. rtw89_write32_clr(rtwdev, R_AX_SYS_PW_CTRL, B_AX_AFSM_WLSUS_EN |
  158. B_AX_AFSM_PCIE_SUS_EN);
  159. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_DIS_WLBT_PDNSUSEN_SOPC);
  160. rtw89_write32_set(rtwdev, R_AX_WLLPS_CTRL, B_AX_DIS_WLBT_LPSEN_LOPC);
  161. rtw89_write32_clr(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APDM_HPDN);
  162. rtw89_write32_clr(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFM_SWLPS);
  163. ret = read_poll_timeout(rtw89_read32, val32, val32 & B_AX_RDY_SYSPWR,
  164. 1000, 20000, false, rtwdev, R_AX_SYS_PW_CTRL);
  165. if (ret)
  166. return ret;
  167. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_EN_WLON);
  168. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFN_ONMAC);
  169. ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_AX_APFN_ONMAC),
  170. 1000, 20000, false, rtwdev, R_AX_SYS_PW_CTRL);
  171. if (ret)
  172. return ret;
  173. rtw89_write8_set(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  174. rtw89_write8_clr(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  175. rtw89_write8_set(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  176. rtw89_write8_clr(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  177. rtw89_write8_set(rtwdev, R_AX_PLATFORM_ENABLE, B_AX_PLATFORM_EN);
  178. rtw89_write32_clr(rtwdev, R_AX_SYS_SDIO_CTRL, B_AX_PCIE_CALIB_EN_V1);
  179. rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL_EXTEND, B_AX_CMAC1_FEN);
  180. rtw89_write32_set(rtwdev, R_AX_SYS_ISO_CTRL_EXTEND, B_AX_R_SYM_ISO_CMAC12PP);
  181. rtw89_write32_clr(rtwdev, R_AX_AFE_CTRL1, B_AX_R_SYM_WLCMAC1_P4_PC_EN |
  182. B_AX_R_SYM_WLCMAC1_P3_PC_EN |
  183. B_AX_R_SYM_WLCMAC1_P2_PC_EN |
  184. B_AX_R_SYM_WLCMAC1_P1_PC_EN |
  185. B_AX_R_SYM_WLCMAC1_PC_EN);
  186. rtw89_write32_set(rtwdev, R_AX_SYS_ADIE_PAD_PWR_CTRL, B_AX_SYM_PADPDN_WL_PTA_1P3);
  187. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL,
  188. XTAL_SI_GND_SHDN_WL, XTAL_SI_GND_SHDN_WL);
  189. if (ret)
  190. return ret;
  191. rtw89_write32_set(rtwdev, R_AX_SYS_ADIE_PAD_PWR_CTRL, B_AX_SYM_PADPDN_WL_RFC_1P3);
  192. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL,
  193. XTAL_SI_SHDN_WL, XTAL_SI_SHDN_WL);
  194. if (ret)
  195. return ret;
  196. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_OFF_WEI,
  197. XTAL_SI_OFF_WEI);
  198. if (ret)
  199. return ret;
  200. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_OFF_EI,
  201. XTAL_SI_OFF_EI);
  202. if (ret)
  203. return ret;
  204. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_RFC2RF);
  205. if (ret)
  206. return ret;
  207. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_PON_WEI,
  208. XTAL_SI_PON_WEI);
  209. if (ret)
  210. return ret;
  211. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_PON_EI,
  212. XTAL_SI_PON_EI);
  213. if (ret)
  214. return ret;
  215. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_SRAM2RFC);
  216. if (ret)
  217. return ret;
  218. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XTAL_XMD_2, 0, XTAL_SI_LDO_LPS);
  219. if (ret)
  220. return ret;
  221. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XTAL_XMD_4, 0, XTAL_SI_LPS_CAP);
  222. if (ret)
  223. return ret;
  224. rtw89_write32_set(rtwdev, R_AX_PMC_DBG_CTRL2, B_AX_SYSON_DIS_PMCR_AX_WRMSK);
  225. rtw89_write32_set(rtwdev, R_AX_SYS_ISO_CTRL, B_AX_ISO_EB2CORE);
  226. rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL, B_AX_PWC_EV2EF_B15);
  227. fsleep(1000);
  228. rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL, B_AX_PWC_EV2EF_B14);
  229. rtw89_write32_clr(rtwdev, R_AX_PMC_DBG_CTRL2, B_AX_SYSON_DIS_PMCR_AX_WRMSK);
  230. rtw89_write32_set(rtwdev, R_AX_GPIO0_15_EECS_EESK_LED1_PULL_LOW_EN,
  231. B_AX_EECS_PULL_LOW_EN | B_AX_EESK_PULL_LOW_EN |
  232. B_AX_LED1_PULL_LOW_EN);
  233. rtw89_write32_set(rtwdev, R_AX_DMAC_FUNC_EN,
  234. B_AX_MAC_FUNC_EN | B_AX_DMAC_FUNC_EN | B_AX_MPDU_PROC_EN |
  235. B_AX_WD_RLS_EN | B_AX_DLE_WDE_EN | B_AX_TXPKT_CTRL_EN |
  236. B_AX_STA_SCH_EN | B_AX_DLE_PLE_EN | B_AX_PKT_BUF_EN |
  237. B_AX_DMAC_TBL_EN | B_AX_PKT_IN_EN | B_AX_DLE_CPUIO_EN |
  238. B_AX_DISPATCHER_EN | B_AX_BBRPT_EN | B_AX_MAC_SEC_EN |
  239. B_AX_MAC_UN_EN | B_AX_H_AXIDMA_EN);
  240. rtw89_write32_set(rtwdev, R_AX_CMAC_FUNC_EN,
  241. B_AX_CMAC_EN | B_AX_CMAC_TXEN | B_AX_CMAC_RXEN |
  242. B_AX_FORCE_CMACREG_GCKEN | B_AX_PHYINTF_EN |
  243. B_AX_CMAC_DMA_EN | B_AX_PTCLTOP_EN | B_AX_SCHEDULER_EN |
  244. B_AX_TMAC_EN | B_AX_RMAC_EN);
  245. return 0;
  246. }
  247. static int rtw8852c_pwr_off_func(struct rtw89_dev *rtwdev)
  248. {
  249. u32 val32;
  250. u32 ret;
  251. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_RFC2RF,
  252. XTAL_SI_RFC2RF);
  253. if (ret)
  254. return ret;
  255. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_OFF_EI);
  256. if (ret)
  257. return ret;
  258. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_OFF_WEI);
  259. if (ret)
  260. return ret;
  261. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0, XTAL_SI_RF00);
  262. if (ret)
  263. return ret;
  264. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0, XTAL_SI_RF10);
  265. if (ret)
  266. return ret;
  267. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, XTAL_SI_SRAM2RFC,
  268. XTAL_SI_SRAM2RFC);
  269. if (ret)
  270. return ret;
  271. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_PON_EI);
  272. if (ret)
  273. return ret;
  274. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_PON_WEI);
  275. if (ret)
  276. return ret;
  277. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_EN_WLON);
  278. rtw89_write8_clr(rtwdev, R_AX_SYS_FUNC_EN, B_AX_FEN_BB_GLB_RSTN | B_AX_FEN_BBRSTB);
  279. rtw89_write32_clr(rtwdev, R_AX_SYS_ISO_CTRL_EXTEND,
  280. B_AX_R_SYM_FEN_WLBBGLB_1 | B_AX_R_SYM_FEN_WLBBFUN_1);
  281. rtw89_write32_clr(rtwdev, R_AX_SYS_ADIE_PAD_PWR_CTRL, B_AX_SYM_PADPDN_WL_RFC_1P3);
  282. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_SHDN_WL);
  283. if (ret)
  284. return ret;
  285. rtw89_write32_clr(rtwdev, R_AX_SYS_ADIE_PAD_PWR_CTRL, B_AX_SYM_PADPDN_WL_PTA_1P3);
  286. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, XTAL_SI_GND_SHDN_WL);
  287. if (ret)
  288. return ret;
  289. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFM_OFFMAC);
  290. ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_AX_APFM_OFFMAC),
  291. 1000, 20000, false, rtwdev, R_AX_SYS_PW_CTRL);
  292. if (ret)
  293. return ret;
  294. rtw89_write32(rtwdev, R_AX_WLLPS_CTRL, 0x0001A0B0);
  295. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_XTAL_OFF_A_DIE);
  296. rtw89_write32_set(rtwdev, R_AX_SYS_PW_CTRL, B_AX_APFM_SWLPS);
  297. return 0;
  298. }
  299. static void rtw8852c_e_efuse_parsing(struct rtw89_efuse *efuse,
  300. struct rtw8852c_efuse *map)
  301. {
  302. ether_addr_copy(efuse->addr, map->e.mac_addr);
  303. efuse->rfe_type = map->rfe_type;
  304. efuse->xtal_cap = map->xtal_k;
  305. }
  306. static void rtw8852c_efuse_parsing_tssi(struct rtw89_dev *rtwdev,
  307. struct rtw8852c_efuse *map)
  308. {
  309. struct rtw89_tssi_info *tssi = &rtwdev->tssi;
  310. struct rtw8852c_tssi_offset *ofst[] = {&map->path_a_tssi, &map->path_b_tssi};
  311. u8 *bw40_1s_tssi_6g_ofst[] = {map->bw40_1s_tssi_6g_a, map->bw40_1s_tssi_6g_b};
  312. u8 i, j;
  313. tssi->thermal[RF_PATH_A] = map->path_a_therm;
  314. tssi->thermal[RF_PATH_B] = map->path_b_therm;
  315. for (i = 0; i < RF_PATH_NUM_8852C; i++) {
  316. memcpy(tssi->tssi_cck[i], ofst[i]->cck_tssi,
  317. sizeof(ofst[i]->cck_tssi));
  318. for (j = 0; j < TSSI_CCK_CH_GROUP_NUM; j++)
  319. rtw89_debug(rtwdev, RTW89_DBG_TSSI,
  320. "[TSSI][EFUSE] path=%d cck[%d]=0x%x\n",
  321. i, j, tssi->tssi_cck[i][j]);
  322. memcpy(tssi->tssi_mcs[i], ofst[i]->bw40_tssi,
  323. sizeof(ofst[i]->bw40_tssi));
  324. memcpy(tssi->tssi_mcs[i] + TSSI_MCS_2G_CH_GROUP_NUM,
  325. ofst[i]->bw40_1s_tssi_5g, sizeof(ofst[i]->bw40_1s_tssi_5g));
  326. memcpy(tssi->tssi_6g_mcs[i], bw40_1s_tssi_6g_ofst[i],
  327. sizeof(tssi->tssi_6g_mcs[i]));
  328. for (j = 0; j < TSSI_MCS_CH_GROUP_NUM; j++)
  329. rtw89_debug(rtwdev, RTW89_DBG_TSSI,
  330. "[TSSI][EFUSE] path=%d mcs[%d]=0x%x\n",
  331. i, j, tssi->tssi_mcs[i][j]);
  332. }
  333. }
  334. static bool _decode_efuse_gain(u8 data, s8 *high, s8 *low)
  335. {
  336. if (high)
  337. *high = sign_extend32(FIELD_GET(GENMASK(7, 4), data), 3);
  338. if (low)
  339. *low = sign_extend32(FIELD_GET(GENMASK(3, 0), data), 3);
  340. return data != 0xff;
  341. }
  342. static void rtw8852c_efuse_parsing_gain_offset(struct rtw89_dev *rtwdev,
  343. struct rtw8852c_efuse *map)
  344. {
  345. struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
  346. bool valid = false;
  347. valid |= _decode_efuse_gain(map->rx_gain_2g_cck,
  348. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_CCK],
  349. &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_CCK]);
  350. valid |= _decode_efuse_gain(map->rx_gain_2g_ofdm,
  351. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_OFDM],
  352. &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_OFDM]);
  353. valid |= _decode_efuse_gain(map->rx_gain_5g_low,
  354. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_LOW],
  355. &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_LOW]);
  356. valid |= _decode_efuse_gain(map->rx_gain_5g_mid,
  357. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_MID],
  358. &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_MID]);
  359. valid |= _decode_efuse_gain(map->rx_gain_5g_high,
  360. &gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_HIGH],
  361. &gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_HIGH]);
  362. gain->offset_valid = valid;
  363. }
  364. static int rtw8852c_read_efuse(struct rtw89_dev *rtwdev, u8 *log_map)
  365. {
  366. struct rtw89_efuse *efuse = &rtwdev->efuse;
  367. struct rtw8852c_efuse *map;
  368. map = (struct rtw8852c_efuse *)log_map;
  369. efuse->country_code[0] = map->country_code[0];
  370. efuse->country_code[1] = map->country_code[1];
  371. rtw8852c_efuse_parsing_tssi(rtwdev, map);
  372. rtw8852c_efuse_parsing_gain_offset(rtwdev, map);
  373. switch (rtwdev->hci.type) {
  374. case RTW89_HCI_TYPE_PCIE:
  375. rtw8852c_e_efuse_parsing(efuse, map);
  376. break;
  377. default:
  378. return -ENOTSUPP;
  379. }
  380. rtw89_info(rtwdev, "chip rfe_type is %d\n", efuse->rfe_type);
  381. return 0;
  382. }
  383. static void rtw8852c_phycap_parsing_tssi(struct rtw89_dev *rtwdev, u8 *phycap_map)
  384. {
  385. struct rtw89_tssi_info *tssi = &rtwdev->tssi;
  386. static const u32 tssi_trim_addr[RF_PATH_NUM_8852C] = {0x5D6, 0x5AB};
  387. static const u32 tssi_trim_addr_6g[RF_PATH_NUM_8852C] = {0x5CE, 0x5A3};
  388. u32 addr = rtwdev->chip->phycap_addr;
  389. bool pg = false;
  390. u32 ofst;
  391. u8 i, j;
  392. for (i = 0; i < RF_PATH_NUM_8852C; i++) {
  393. for (j = 0; j < TSSI_TRIM_CH_GROUP_NUM; j++) {
  394. /* addrs are in decreasing order */
  395. ofst = tssi_trim_addr[i] - addr - j;
  396. tssi->tssi_trim[i][j] = phycap_map[ofst];
  397. if (phycap_map[ofst] != 0xff)
  398. pg = true;
  399. }
  400. for (j = 0; j < TSSI_TRIM_CH_GROUP_NUM_6G; j++) {
  401. /* addrs are in decreasing order */
  402. ofst = tssi_trim_addr_6g[i] - addr - j;
  403. tssi->tssi_trim_6g[i][j] = phycap_map[ofst];
  404. if (phycap_map[ofst] != 0xff)
  405. pg = true;
  406. }
  407. }
  408. if (!pg) {
  409. memset(tssi->tssi_trim, 0, sizeof(tssi->tssi_trim));
  410. memset(tssi->tssi_trim_6g, 0, sizeof(tssi->tssi_trim_6g));
  411. rtw89_debug(rtwdev, RTW89_DBG_TSSI,
  412. "[TSSI][TRIM] no PG, set all trim info to 0\n");
  413. }
  414. for (i = 0; i < RF_PATH_NUM_8852C; i++)
  415. for (j = 0; j < TSSI_TRIM_CH_GROUP_NUM; j++)
  416. rtw89_debug(rtwdev, RTW89_DBG_TSSI,
  417. "[TSSI] path=%d idx=%d trim=0x%x addr=0x%x\n",
  418. i, j, tssi->tssi_trim[i][j],
  419. tssi_trim_addr[i] - j);
  420. }
  421. static void rtw8852c_phycap_parsing_thermal_trim(struct rtw89_dev *rtwdev,
  422. u8 *phycap_map)
  423. {
  424. struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
  425. static const u32 thm_trim_addr[RF_PATH_NUM_8852C] = {0x5DF, 0x5DC};
  426. u32 addr = rtwdev->chip->phycap_addr;
  427. u8 i;
  428. for (i = 0; i < RF_PATH_NUM_8852C; i++) {
  429. info->thermal_trim[i] = phycap_map[thm_trim_addr[i] - addr];
  430. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  431. "[THERMAL][TRIM] path=%d thermal_trim=0x%x\n",
  432. i, info->thermal_trim[i]);
  433. if (info->thermal_trim[i] != 0xff)
  434. info->pg_thermal_trim = true;
  435. }
  436. }
  437. static void rtw8852c_thermal_trim(struct rtw89_dev *rtwdev)
  438. {
  439. #define __thm_setting(raw) \
  440. ({ \
  441. u8 __v = (raw); \
  442. ((__v & 0x1) << 3) | ((__v & 0x1f) >> 1); \
  443. })
  444. struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
  445. u8 i, val;
  446. if (!info->pg_thermal_trim) {
  447. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  448. "[THERMAL][TRIM] no PG, do nothing\n");
  449. return;
  450. }
  451. for (i = 0; i < RF_PATH_NUM_8852C; i++) {
  452. val = __thm_setting(info->thermal_trim[i]);
  453. rtw89_write_rf(rtwdev, i, RR_TM2, RR_TM2_OFF, val);
  454. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  455. "[THERMAL][TRIM] path=%d thermal_setting=0x%x\n",
  456. i, val);
  457. }
  458. #undef __thm_setting
  459. }
  460. static void rtw8852c_phycap_parsing_pa_bias_trim(struct rtw89_dev *rtwdev,
  461. u8 *phycap_map)
  462. {
  463. struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
  464. static const u32 pabias_trim_addr[RF_PATH_NUM_8852C] = {0x5DE, 0x5DB};
  465. u32 addr = rtwdev->chip->phycap_addr;
  466. u8 i;
  467. for (i = 0; i < RF_PATH_NUM_8852C; i++) {
  468. info->pa_bias_trim[i] = phycap_map[pabias_trim_addr[i] - addr];
  469. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  470. "[PA_BIAS][TRIM] path=%d pa_bias_trim=0x%x\n",
  471. i, info->pa_bias_trim[i]);
  472. if (info->pa_bias_trim[i] != 0xff)
  473. info->pg_pa_bias_trim = true;
  474. }
  475. }
  476. static void rtw8852c_pa_bias_trim(struct rtw89_dev *rtwdev)
  477. {
  478. struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
  479. u8 pabias_2g, pabias_5g;
  480. u8 i;
  481. if (!info->pg_pa_bias_trim) {
  482. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  483. "[PA_BIAS][TRIM] no PG, do nothing\n");
  484. return;
  485. }
  486. for (i = 0; i < RF_PATH_NUM_8852C; i++) {
  487. pabias_2g = FIELD_GET(GENMASK(3, 0), info->pa_bias_trim[i]);
  488. pabias_5g = FIELD_GET(GENMASK(7, 4), info->pa_bias_trim[i]);
  489. rtw89_debug(rtwdev, RTW89_DBG_RFK,
  490. "[PA_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n",
  491. i, pabias_2g, pabias_5g);
  492. rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXG, pabias_2g);
  493. rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXA, pabias_5g);
  494. }
  495. }
  496. static int rtw8852c_read_phycap(struct rtw89_dev *rtwdev, u8 *phycap_map)
  497. {
  498. rtw8852c_phycap_parsing_tssi(rtwdev, phycap_map);
  499. rtw8852c_phycap_parsing_thermal_trim(rtwdev, phycap_map);
  500. rtw8852c_phycap_parsing_pa_bias_trim(rtwdev, phycap_map);
  501. return 0;
  502. }
  503. static void rtw8852c_power_trim(struct rtw89_dev *rtwdev)
  504. {
  505. rtw8852c_thermal_trim(rtwdev);
  506. rtw8852c_pa_bias_trim(rtwdev);
  507. }
  508. static void rtw8852c_set_channel_mac(struct rtw89_dev *rtwdev,
  509. const struct rtw89_chan *chan,
  510. u8 mac_idx)
  511. {
  512. u32 rf_mod = rtw89_mac_reg_by_idx(R_AX_WMAC_RFMOD, mac_idx);
  513. u32 sub_carr = rtw89_mac_reg_by_idx(R_AX_TX_SUB_CARRIER_VALUE,
  514. mac_idx);
  515. u32 chk_rate = rtw89_mac_reg_by_idx(R_AX_TXRATE_CHK, mac_idx);
  516. u8 txsc20 = 0, txsc40 = 0, txsc80 = 0;
  517. u8 rf_mod_val = 0, chk_rate_mask = 0;
  518. u32 txsc;
  519. switch (chan->band_width) {
  520. case RTW89_CHANNEL_WIDTH_160:
  521. txsc80 = rtw89_phy_get_txsc(rtwdev, chan,
  522. RTW89_CHANNEL_WIDTH_80);
  523. fallthrough;
  524. case RTW89_CHANNEL_WIDTH_80:
  525. txsc40 = rtw89_phy_get_txsc(rtwdev, chan,
  526. RTW89_CHANNEL_WIDTH_40);
  527. fallthrough;
  528. case RTW89_CHANNEL_WIDTH_40:
  529. txsc20 = rtw89_phy_get_txsc(rtwdev, chan,
  530. RTW89_CHANNEL_WIDTH_20);
  531. break;
  532. default:
  533. break;
  534. }
  535. switch (chan->band_width) {
  536. case RTW89_CHANNEL_WIDTH_160:
  537. rf_mod_val = AX_WMAC_RFMOD_160M;
  538. txsc = FIELD_PREP(B_AX_TXSC_20M_MASK, txsc20) |
  539. FIELD_PREP(B_AX_TXSC_40M_MASK, txsc40) |
  540. FIELD_PREP(B_AX_TXSC_80M_MASK, txsc80);
  541. break;
  542. case RTW89_CHANNEL_WIDTH_80:
  543. rf_mod_val = AX_WMAC_RFMOD_80M;
  544. txsc = FIELD_PREP(B_AX_TXSC_20M_MASK, txsc20) |
  545. FIELD_PREP(B_AX_TXSC_40M_MASK, txsc40);
  546. break;
  547. case RTW89_CHANNEL_WIDTH_40:
  548. rf_mod_val = AX_WMAC_RFMOD_40M;
  549. txsc = FIELD_PREP(B_AX_TXSC_20M_MASK, txsc20);
  550. break;
  551. case RTW89_CHANNEL_WIDTH_20:
  552. default:
  553. rf_mod_val = AX_WMAC_RFMOD_20M;
  554. txsc = 0;
  555. break;
  556. }
  557. rtw89_write8_mask(rtwdev, rf_mod, B_AX_WMAC_RFMOD_MASK, rf_mod_val);
  558. rtw89_write32(rtwdev, sub_carr, txsc);
  559. switch (chan->band_type) {
  560. case RTW89_BAND_2G:
  561. chk_rate_mask = B_AX_BAND_MODE;
  562. break;
  563. case RTW89_BAND_5G:
  564. case RTW89_BAND_6G:
  565. chk_rate_mask = B_AX_CHECK_CCK_EN | B_AX_RTS_LIMIT_IN_OFDM6;
  566. break;
  567. default:
  568. rtw89_warn(rtwdev, "Invalid band_type:%d\n", chan->band_type);
  569. return;
  570. }
  571. rtw89_write8_clr(rtwdev, chk_rate, B_AX_BAND_MODE | B_AX_CHECK_CCK_EN |
  572. B_AX_RTS_LIMIT_IN_OFDM6);
  573. rtw89_write8_set(rtwdev, chk_rate, chk_rate_mask);
  574. }
  575. static const u32 rtw8852c_sco_barker_threshold[14] = {
  576. 0x1fe4f, 0x1ff5e, 0x2006c, 0x2017b, 0x2028a, 0x20399, 0x204a8, 0x205b6,
  577. 0x206c5, 0x207d4, 0x208e3, 0x209f2, 0x20b00, 0x20d8a
  578. };
  579. static const u32 rtw8852c_sco_cck_threshold[14] = {
  580. 0x2bdac, 0x2bf21, 0x2c095, 0x2c209, 0x2c37e, 0x2c4f2, 0x2c666, 0x2c7db,
  581. 0x2c94f, 0x2cac3, 0x2cc38, 0x2cdac, 0x2cf21, 0x2d29e
  582. };
  583. static int rtw8852c_ctrl_sco_cck(struct rtw89_dev *rtwdev, u8 central_ch,
  584. u8 primary_ch, enum rtw89_bandwidth bw)
  585. {
  586. u8 ch_element;
  587. if (bw == RTW89_CHANNEL_WIDTH_20) {
  588. ch_element = central_ch - 1;
  589. } else if (bw == RTW89_CHANNEL_WIDTH_40) {
  590. if (primary_ch == 1)
  591. ch_element = central_ch - 1 + 2;
  592. else
  593. ch_element = central_ch - 1 - 2;
  594. } else {
  595. rtw89_warn(rtwdev, "Invalid BW:%d for CCK\n", bw);
  596. return -EINVAL;
  597. }
  598. rtw89_phy_write32_mask(rtwdev, R_BK_FC0_INV_V1, B_BK_FC0_INV_MSK_V1,
  599. rtw8852c_sco_barker_threshold[ch_element]);
  600. rtw89_phy_write32_mask(rtwdev, R_CCK_FC0_INV_V1, B_CCK_FC0_INV_MSK_V1,
  601. rtw8852c_sco_cck_threshold[ch_element]);
  602. return 0;
  603. }
  604. struct rtw8852c_bb_gain {
  605. u32 gain_g[BB_PATH_NUM_8852C];
  606. u32 gain_a[BB_PATH_NUM_8852C];
  607. u32 gain_mask;
  608. };
  609. static const struct rtw8852c_bb_gain bb_gain_lna[LNA_GAIN_NUM] = {
  610. { .gain_g = {0x4678, 0x475C}, .gain_a = {0x45DC, 0x4740},
  611. .gain_mask = 0x00ff0000 },
  612. { .gain_g = {0x4678, 0x475C}, .gain_a = {0x45DC, 0x4740},
  613. .gain_mask = 0xff000000 },
  614. { .gain_g = {0x467C, 0x4760}, .gain_a = {0x4660, 0x4744},
  615. .gain_mask = 0x000000ff },
  616. { .gain_g = {0x467C, 0x4760}, .gain_a = {0x4660, 0x4744},
  617. .gain_mask = 0x0000ff00 },
  618. { .gain_g = {0x467C, 0x4760}, .gain_a = {0x4660, 0x4744},
  619. .gain_mask = 0x00ff0000 },
  620. { .gain_g = {0x467C, 0x4760}, .gain_a = {0x4660, 0x4744},
  621. .gain_mask = 0xff000000 },
  622. { .gain_g = {0x4680, 0x4764}, .gain_a = {0x4664, 0x4748},
  623. .gain_mask = 0x000000ff },
  624. };
  625. static const struct rtw8852c_bb_gain bb_gain_tia[TIA_GAIN_NUM] = {
  626. { .gain_g = {0x4680, 0x4764}, .gain_a = {0x4664, 0x4748},
  627. .gain_mask = 0x00ff0000 },
  628. { .gain_g = {0x4680, 0x4764}, .gain_a = {0x4664, 0x4748},
  629. .gain_mask = 0xff000000 },
  630. };
  631. struct rtw8852c_bb_gain_bypass {
  632. u32 gain_g[BB_PATH_NUM_8852C];
  633. u32 gain_a[BB_PATH_NUM_8852C];
  634. u32 gain_mask_g;
  635. u32 gain_mask_a;
  636. };
  637. static
  638. const struct rtw8852c_bb_gain_bypass bb_gain_bypass_lna[LNA_GAIN_NUM] = {
  639. { .gain_g = {0x4BB8, 0x4C7C}, .gain_a = {0x4BB4, 0x4C78},
  640. .gain_mask_g = 0xff000000, .gain_mask_a = 0xff},
  641. { .gain_g = {0x4BBC, 0x4C80}, .gain_a = {0x4BB4, 0x4C78},
  642. .gain_mask_g = 0xff, .gain_mask_a = 0xff00},
  643. { .gain_g = {0x4BBC, 0x4C80}, .gain_a = {0x4BB4, 0x4C78},
  644. .gain_mask_g = 0xff00, .gain_mask_a = 0xff0000},
  645. { .gain_g = {0x4BBC, 0x4C80}, .gain_a = {0x4BB4, 0x4C78},
  646. .gain_mask_g = 0xff0000, .gain_mask_a = 0xff000000},
  647. { .gain_g = {0x4BBC, 0x4C80}, .gain_a = {0x4BB8, 0x4C7C},
  648. .gain_mask_g = 0xff000000, .gain_mask_a = 0xff},
  649. { .gain_g = {0x4BC0, 0x4C84}, .gain_a = {0x4BB8, 0x4C7C},
  650. .gain_mask_g = 0xff, .gain_mask_a = 0xff00},
  651. { .gain_g = {0x4BC0, 0x4C84}, .gain_a = {0x4BB8, 0x4C7C},
  652. .gain_mask_g = 0xff00, .gain_mask_a = 0xff0000},
  653. };
  654. struct rtw8852c_bb_gain_op1db {
  655. struct {
  656. u32 lna[BB_PATH_NUM_8852C];
  657. u32 tia_lna[BB_PATH_NUM_8852C];
  658. u32 mask;
  659. } reg[LNA_GAIN_NUM];
  660. u32 reg_tia0_lna6[BB_PATH_NUM_8852C];
  661. u32 mask_tia0_lna6;
  662. };
  663. static const struct rtw8852c_bb_gain_op1db bb_gain_op1db_a = {
  664. .reg = {
  665. { .lna = {0x4668, 0x474c}, .tia_lna = {0x4670, 0x4754},
  666. .mask = 0xff},
  667. { .lna = {0x4668, 0x474c}, .tia_lna = {0x4670, 0x4754},
  668. .mask = 0xff00},
  669. { .lna = {0x4668, 0x474c}, .tia_lna = {0x4670, 0x4754},
  670. .mask = 0xff0000},
  671. { .lna = {0x4668, 0x474c}, .tia_lna = {0x4670, 0x4754},
  672. .mask = 0xff000000},
  673. { .lna = {0x466c, 0x4750}, .tia_lna = {0x4674, 0x4758},
  674. .mask = 0xff},
  675. { .lna = {0x466c, 0x4750}, .tia_lna = {0x4674, 0x4758},
  676. .mask = 0xff00},
  677. { .lna = {0x466c, 0x4750}, .tia_lna = {0x4674, 0x4758},
  678. .mask = 0xff0000},
  679. },
  680. .reg_tia0_lna6 = {0x4674, 0x4758},
  681. .mask_tia0_lna6 = 0xff000000,
  682. };
  683. static enum rtw89_phy_bb_gain_band
  684. rtw8852c_mapping_gain_band(enum rtw89_subband subband)
  685. {
  686. switch (subband) {
  687. default:
  688. case RTW89_CH_2G:
  689. return RTW89_BB_GAIN_BAND_2G;
  690. case RTW89_CH_5G_BAND_1:
  691. return RTW89_BB_GAIN_BAND_5G_L;
  692. case RTW89_CH_5G_BAND_3:
  693. return RTW89_BB_GAIN_BAND_5G_M;
  694. case RTW89_CH_5G_BAND_4:
  695. return RTW89_BB_GAIN_BAND_5G_H;
  696. case RTW89_CH_6G_BAND_IDX0:
  697. case RTW89_CH_6G_BAND_IDX1:
  698. return RTW89_BB_GAIN_BAND_6G_L;
  699. case RTW89_CH_6G_BAND_IDX2:
  700. case RTW89_CH_6G_BAND_IDX3:
  701. return RTW89_BB_GAIN_BAND_6G_M;
  702. case RTW89_CH_6G_BAND_IDX4:
  703. case RTW89_CH_6G_BAND_IDX5:
  704. return RTW89_BB_GAIN_BAND_6G_H;
  705. case RTW89_CH_6G_BAND_IDX6:
  706. case RTW89_CH_6G_BAND_IDX7:
  707. return RTW89_BB_GAIN_BAND_6G_UH;
  708. }
  709. }
  710. static void rtw8852c_set_gain_error(struct rtw89_dev *rtwdev,
  711. enum rtw89_subband subband,
  712. enum rtw89_rf_path path)
  713. {
  714. const struct rtw89_phy_bb_gain_info *gain = &rtwdev->bb_gain;
  715. u8 gain_band = rtw8852c_mapping_gain_band(subband);
  716. s32 val;
  717. u32 reg;
  718. u32 mask;
  719. int i;
  720. for (i = 0; i < LNA_GAIN_NUM; i++) {
  721. if (subband == RTW89_CH_2G)
  722. reg = bb_gain_lna[i].gain_g[path];
  723. else
  724. reg = bb_gain_lna[i].gain_a[path];
  725. mask = bb_gain_lna[i].gain_mask;
  726. val = gain->lna_gain[gain_band][path][i];
  727. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  728. if (subband == RTW89_CH_2G) {
  729. reg = bb_gain_bypass_lna[i].gain_g[path];
  730. mask = bb_gain_bypass_lna[i].gain_mask_g;
  731. } else {
  732. reg = bb_gain_bypass_lna[i].gain_a[path];
  733. mask = bb_gain_bypass_lna[i].gain_mask_a;
  734. }
  735. val = gain->lna_gain_bypass[gain_band][path][i];
  736. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  737. if (subband != RTW89_CH_2G) {
  738. reg = bb_gain_op1db_a.reg[i].lna[path];
  739. mask = bb_gain_op1db_a.reg[i].mask;
  740. val = gain->lna_op1db[gain_band][path][i];
  741. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  742. reg = bb_gain_op1db_a.reg[i].tia_lna[path];
  743. mask = bb_gain_op1db_a.reg[i].mask;
  744. val = gain->tia_lna_op1db[gain_band][path][i];
  745. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  746. }
  747. }
  748. if (subband != RTW89_CH_2G) {
  749. reg = bb_gain_op1db_a.reg_tia0_lna6[path];
  750. mask = bb_gain_op1db_a.mask_tia0_lna6;
  751. val = gain->tia_lna_op1db[gain_band][path][7];
  752. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  753. }
  754. for (i = 0; i < TIA_GAIN_NUM; i++) {
  755. if (subband == RTW89_CH_2G)
  756. reg = bb_gain_tia[i].gain_g[path];
  757. else
  758. reg = bb_gain_tia[i].gain_a[path];
  759. mask = bb_gain_tia[i].gain_mask;
  760. val = gain->tia_gain[gain_band][path][i];
  761. rtw89_phy_write32_mask(rtwdev, reg, mask, val);
  762. }
  763. }
  764. static
  765. const u8 rtw8852c_ch_base_table[16] = {1, 0xff,
  766. 36, 100, 132, 149, 0xff,
  767. 1, 33, 65, 97, 129, 161, 193, 225, 0xff};
  768. #define RTW8852C_CH_BASE_IDX_2G 0
  769. #define RTW8852C_CH_BASE_IDX_5G_FIRST 2
  770. #define RTW8852C_CH_BASE_IDX_5G_LAST 5
  771. #define RTW8852C_CH_BASE_IDX_6G_FIRST 7
  772. #define RTW8852C_CH_BASE_IDX_6G_LAST 14
  773. #define RTW8852C_CH_BASE_IDX_MASK GENMASK(7, 4)
  774. #define RTW8852C_CH_OFFSET_MASK GENMASK(3, 0)
  775. static u8 rtw8852c_encode_chan_idx(struct rtw89_dev *rtwdev, u8 central_ch, u8 band)
  776. {
  777. u8 chan_idx;
  778. u8 last, first;
  779. u8 idx;
  780. switch (band) {
  781. case RTW89_BAND_2G:
  782. chan_idx = FIELD_PREP(RTW8852C_CH_BASE_IDX_MASK, RTW8852C_CH_BASE_IDX_2G) |
  783. FIELD_PREP(RTW8852C_CH_OFFSET_MASK, central_ch);
  784. return chan_idx;
  785. case RTW89_BAND_5G:
  786. first = RTW8852C_CH_BASE_IDX_5G_FIRST;
  787. last = RTW8852C_CH_BASE_IDX_5G_LAST;
  788. break;
  789. case RTW89_BAND_6G:
  790. first = RTW8852C_CH_BASE_IDX_6G_FIRST;
  791. last = RTW8852C_CH_BASE_IDX_6G_LAST;
  792. break;
  793. default:
  794. rtw89_warn(rtwdev, "Unsupported band %d\n", band);
  795. return 0;
  796. }
  797. for (idx = last; idx >= first; idx--)
  798. if (central_ch >= rtw8852c_ch_base_table[idx])
  799. break;
  800. if (idx < first) {
  801. rtw89_warn(rtwdev, "Unknown band %d channel %d\n", band, central_ch);
  802. return 0;
  803. }
  804. chan_idx = FIELD_PREP(RTW8852C_CH_BASE_IDX_MASK, idx) |
  805. FIELD_PREP(RTW8852C_CH_OFFSET_MASK,
  806. (central_ch - rtw8852c_ch_base_table[idx]) >> 1);
  807. return chan_idx;
  808. }
  809. static void rtw8852c_decode_chan_idx(struct rtw89_dev *rtwdev, u8 chan_idx,
  810. u8 *ch, enum nl80211_band *band)
  811. {
  812. u8 idx, offset;
  813. idx = FIELD_GET(RTW8852C_CH_BASE_IDX_MASK, chan_idx);
  814. offset = FIELD_GET(RTW8852C_CH_OFFSET_MASK, chan_idx);
  815. if (idx == RTW8852C_CH_BASE_IDX_2G) {
  816. *band = NL80211_BAND_2GHZ;
  817. *ch = offset;
  818. return;
  819. }
  820. *band = idx <= RTW8852C_CH_BASE_IDX_5G_LAST ? NL80211_BAND_5GHZ : NL80211_BAND_6GHZ;
  821. *ch = rtw8852c_ch_base_table[idx] + (offset << 1);
  822. }
  823. static void rtw8852c_set_gain_offset(struct rtw89_dev *rtwdev,
  824. const struct rtw89_chan *chan,
  825. enum rtw89_phy_idx phy_idx,
  826. enum rtw89_rf_path path)
  827. {
  828. static const u32 rssi_ofst_addr[2] = {R_PATH0_G_TIA0_LNA6_OP1DB_V1,
  829. R_PATH1_G_TIA0_LNA6_OP1DB_V1};
  830. static const u32 rpl_mask[2] = {B_RPL_PATHA_MASK, B_RPL_PATHB_MASK};
  831. static const u32 rpl_tb_mask[2] = {B_RSSI_M_PATHA_MASK, B_RSSI_M_PATHB_MASK};
  832. struct rtw89_phy_efuse_gain *efuse_gain = &rtwdev->efuse_gain;
  833. enum rtw89_gain_offset gain_band;
  834. s32 offset_q0, offset_base_q4;
  835. s32 tmp = 0;
  836. if (!efuse_gain->offset_valid)
  837. return;
  838. if (rtwdev->dbcc_en && path == RF_PATH_B)
  839. phy_idx = RTW89_PHY_1;
  840. if (chan->band_type == RTW89_BAND_2G) {
  841. offset_q0 = efuse_gain->offset[path][RTW89_GAIN_OFFSET_2G_CCK];
  842. offset_base_q4 = efuse_gain->offset_base[phy_idx];
  843. tmp = clamp_t(s32, (-offset_q0 << 3) + (offset_base_q4 >> 1),
  844. S8_MIN >> 1, S8_MAX >> 1);
  845. rtw89_phy_write32_mask(rtwdev, R_RPL_OFST, B_RPL_OFST_MASK, tmp & 0x7f);
  846. }
  847. switch (chan->subband_type) {
  848. default:
  849. case RTW89_CH_2G:
  850. gain_band = RTW89_GAIN_OFFSET_2G_OFDM;
  851. break;
  852. case RTW89_CH_5G_BAND_1:
  853. gain_band = RTW89_GAIN_OFFSET_5G_LOW;
  854. break;
  855. case RTW89_CH_5G_BAND_3:
  856. gain_band = RTW89_GAIN_OFFSET_5G_MID;
  857. break;
  858. case RTW89_CH_5G_BAND_4:
  859. gain_band = RTW89_GAIN_OFFSET_5G_HIGH;
  860. break;
  861. }
  862. offset_q0 = -efuse_gain->offset[path][gain_band];
  863. offset_base_q4 = efuse_gain->offset_base[phy_idx];
  864. tmp = (offset_q0 << 2) + (offset_base_q4 >> 2);
  865. tmp = clamp_t(s32, -tmp, S8_MIN, S8_MAX);
  866. rtw89_phy_write32_mask(rtwdev, rssi_ofst_addr[path], B_PATH0_R_G_OFST_MASK, tmp & 0xff);
  867. tmp = clamp_t(s32, offset_q0 << 4, S8_MIN, S8_MAX);
  868. rtw89_phy_write32_idx(rtwdev, R_RPL_PATHAB, rpl_mask[path], tmp & 0xff, phy_idx);
  869. rtw89_phy_write32_idx(rtwdev, R_RSSI_M_PATHAB, rpl_tb_mask[path], tmp & 0xff, phy_idx);
  870. }
  871. static void rtw8852c_ctrl_ch(struct rtw89_dev *rtwdev,
  872. const struct rtw89_chan *chan,
  873. enum rtw89_phy_idx phy_idx)
  874. {
  875. u8 sco;
  876. u16 central_freq = chan->freq;
  877. u8 central_ch = chan->channel;
  878. u8 band = chan->band_type;
  879. u8 subband = chan->subband_type;
  880. bool is_2g = band == RTW89_BAND_2G;
  881. u8 chan_idx;
  882. if (!central_freq) {
  883. rtw89_warn(rtwdev, "Invalid central_freq\n");
  884. return;
  885. }
  886. if (phy_idx == RTW89_PHY_0) {
  887. /* Path A */
  888. rtw8852c_set_gain_error(rtwdev, subband, RF_PATH_A);
  889. rtw8852c_set_gain_offset(rtwdev, chan, phy_idx, RF_PATH_A);
  890. if (is_2g)
  891. rtw89_phy_write32_idx(rtwdev, R_PATH0_BAND_SEL_V1,
  892. B_PATH0_BAND_SEL_MSK_V1, 1,
  893. phy_idx);
  894. else
  895. rtw89_phy_write32_idx(rtwdev, R_PATH0_BAND_SEL_V1,
  896. B_PATH0_BAND_SEL_MSK_V1, 0,
  897. phy_idx);
  898. /* Path B */
  899. if (!rtwdev->dbcc_en) {
  900. rtw8852c_set_gain_error(rtwdev, subband, RF_PATH_B);
  901. rtw8852c_set_gain_offset(rtwdev, chan, phy_idx, RF_PATH_B);
  902. if (is_2g)
  903. rtw89_phy_write32_idx(rtwdev,
  904. R_PATH1_BAND_SEL_V1,
  905. B_PATH1_BAND_SEL_MSK_V1,
  906. 1, phy_idx);
  907. else
  908. rtw89_phy_write32_idx(rtwdev,
  909. R_PATH1_BAND_SEL_V1,
  910. B_PATH1_BAND_SEL_MSK_V1,
  911. 0, phy_idx);
  912. rtw89_phy_write32_clr(rtwdev, R_2P4G_BAND, B_2P4G_BAND_SEL);
  913. } else {
  914. if (is_2g)
  915. rtw89_phy_write32_clr(rtwdev, R_2P4G_BAND, B_2P4G_BAND_SEL);
  916. else
  917. rtw89_phy_write32_set(rtwdev, R_2P4G_BAND, B_2P4G_BAND_SEL);
  918. }
  919. /* SCO compensate FC setting */
  920. rtw89_phy_write32_idx(rtwdev, R_FC0_V1, B_FC0_MSK_V1,
  921. central_freq, phy_idx);
  922. /* round_up((1/fc0)*pow(2,18)) */
  923. sco = DIV_ROUND_CLOSEST(1 << 18, central_freq);
  924. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_INV, sco,
  925. phy_idx);
  926. } else {
  927. /* Path B */
  928. rtw8852c_set_gain_error(rtwdev, subband, RF_PATH_B);
  929. rtw8852c_set_gain_offset(rtwdev, chan, phy_idx, RF_PATH_B);
  930. if (is_2g)
  931. rtw89_phy_write32_idx(rtwdev, R_PATH1_BAND_SEL_V1,
  932. B_PATH1_BAND_SEL_MSK_V1,
  933. 1, phy_idx);
  934. else
  935. rtw89_phy_write32_idx(rtwdev, R_PATH1_BAND_SEL_V1,
  936. B_PATH1_BAND_SEL_MSK_V1,
  937. 0, phy_idx);
  938. /* SCO compensate FC setting */
  939. rtw89_phy_write32_idx(rtwdev, R_FC0_V1, B_FC0_MSK_V1,
  940. central_freq, phy_idx);
  941. /* round_up((1/fc0)*pow(2,18)) */
  942. sco = DIV_ROUND_CLOSEST(1 << 18, central_freq);
  943. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_INV, sco,
  944. phy_idx);
  945. }
  946. /* CCK parameters */
  947. if (band == RTW89_BAND_2G) {
  948. if (central_ch == 14) {
  949. rtw89_phy_write32_mask(rtwdev, R_PCOEFF0_V1,
  950. B_PCOEFF01_MSK_V1, 0x3b13ff);
  951. rtw89_phy_write32_mask(rtwdev, R_PCOEFF2_V1,
  952. B_PCOEFF23_MSK_V1, 0x1c42de);
  953. rtw89_phy_write32_mask(rtwdev, R_PCOEFF4_V1,
  954. B_PCOEFF45_MSK_V1, 0xfdb0ad);
  955. rtw89_phy_write32_mask(rtwdev, R_PCOEFF6_V1,
  956. B_PCOEFF67_MSK_V1, 0xf60f6e);
  957. rtw89_phy_write32_mask(rtwdev, R_PCOEFF8_V1,
  958. B_PCOEFF89_MSK_V1, 0xfd8f92);
  959. rtw89_phy_write32_mask(rtwdev, R_PCOEFFA_V1,
  960. B_PCOEFFAB_MSK_V1, 0x2d011);
  961. rtw89_phy_write32_mask(rtwdev, R_PCOEFFC_V1,
  962. B_PCOEFFCD_MSK_V1, 0x1c02c);
  963. rtw89_phy_write32_mask(rtwdev, R_PCOEFFE_V1,
  964. B_PCOEFFEF_MSK_V1, 0xfff00a);
  965. } else {
  966. rtw89_phy_write32_mask(rtwdev, R_PCOEFF0_V1,
  967. B_PCOEFF01_MSK_V1, 0x3d23ff);
  968. rtw89_phy_write32_mask(rtwdev, R_PCOEFF2_V1,
  969. B_PCOEFF23_MSK_V1, 0x29b354);
  970. rtw89_phy_write32_mask(rtwdev, R_PCOEFF4_V1,
  971. B_PCOEFF45_MSK_V1, 0xfc1c8);
  972. rtw89_phy_write32_mask(rtwdev, R_PCOEFF6_V1,
  973. B_PCOEFF67_MSK_V1, 0xfdb053);
  974. rtw89_phy_write32_mask(rtwdev, R_PCOEFF8_V1,
  975. B_PCOEFF89_MSK_V1, 0xf86f9a);
  976. rtw89_phy_write32_mask(rtwdev, R_PCOEFFA_V1,
  977. B_PCOEFFAB_MSK_V1, 0xfaef92);
  978. rtw89_phy_write32_mask(rtwdev, R_PCOEFFC_V1,
  979. B_PCOEFFCD_MSK_V1, 0xfe5fcc);
  980. rtw89_phy_write32_mask(rtwdev, R_PCOEFFE_V1,
  981. B_PCOEFFEF_MSK_V1, 0xffdff5);
  982. }
  983. }
  984. chan_idx = rtw8852c_encode_chan_idx(rtwdev, chan->primary_channel, band);
  985. rtw89_phy_write32_idx(rtwdev, R_MAC_PIN_SEL, B_CH_IDX_SEG0, chan_idx, phy_idx);
  986. }
  987. static void rtw8852c_bw_setting(struct rtw89_dev *rtwdev, u8 bw, u8 path)
  988. {
  989. static const u32 adc_sel[2] = {0xC0EC, 0xC1EC};
  990. static const u32 wbadc_sel[2] = {0xC0E4, 0xC1E4};
  991. switch (bw) {
  992. case RTW89_CHANNEL_WIDTH_5:
  993. rtw89_phy_write32_mask(rtwdev, adc_sel[path], 0x6000, 0x1);
  994. rtw89_phy_write32_mask(rtwdev, wbadc_sel[path], 0x30, 0x0);
  995. break;
  996. case RTW89_CHANNEL_WIDTH_10:
  997. rtw89_phy_write32_mask(rtwdev, adc_sel[path], 0x6000, 0x2);
  998. rtw89_phy_write32_mask(rtwdev, wbadc_sel[path], 0x30, 0x1);
  999. break;
  1000. case RTW89_CHANNEL_WIDTH_20:
  1001. case RTW89_CHANNEL_WIDTH_40:
  1002. case RTW89_CHANNEL_WIDTH_80:
  1003. case RTW89_CHANNEL_WIDTH_160:
  1004. rtw89_phy_write32_mask(rtwdev, adc_sel[path], 0x6000, 0x0);
  1005. rtw89_phy_write32_mask(rtwdev, wbadc_sel[path], 0x30, 0x2);
  1006. break;
  1007. default:
  1008. rtw89_warn(rtwdev, "Fail to set ADC\n");
  1009. }
  1010. }
  1011. static void rtw8852c_edcca_per20_bitmap_sifs(struct rtw89_dev *rtwdev, u8 bw,
  1012. enum rtw89_phy_idx phy_idx)
  1013. {
  1014. if (bw == RTW89_CHANNEL_WIDTH_20) {
  1015. rtw89_phy_write32_idx(rtwdev, R_SNDCCA_A1, B_SNDCCA_A1_EN, 0xff, phy_idx);
  1016. rtw89_phy_write32_idx(rtwdev, R_SNDCCA_A2, B_SNDCCA_A2_VAL, 0, phy_idx);
  1017. } else {
  1018. rtw89_phy_write32_idx(rtwdev, R_SNDCCA_A1, B_SNDCCA_A1_EN, 0, phy_idx);
  1019. rtw89_phy_write32_idx(rtwdev, R_SNDCCA_A2, B_SNDCCA_A2_VAL, 0, phy_idx);
  1020. }
  1021. }
  1022. static void
  1023. rtw8852c_ctrl_bw(struct rtw89_dev *rtwdev, u8 pri_ch, u8 bw,
  1024. enum rtw89_phy_idx phy_idx)
  1025. {
  1026. u8 mod_sbw = 0;
  1027. switch (bw) {
  1028. case RTW89_CHANNEL_WIDTH_5:
  1029. case RTW89_CHANNEL_WIDTH_10:
  1030. case RTW89_CHANNEL_WIDTH_20:
  1031. if (bw == RTW89_CHANNEL_WIDTH_5)
  1032. mod_sbw = 0x1;
  1033. else if (bw == RTW89_CHANNEL_WIDTH_10)
  1034. mod_sbw = 0x2;
  1035. else if (bw == RTW89_CHANNEL_WIDTH_20)
  1036. mod_sbw = 0x0;
  1037. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_SET, 0x0,
  1038. phy_idx);
  1039. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_SBW,
  1040. mod_sbw, phy_idx);
  1041. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_PRICH, 0x0,
  1042. phy_idx);
  1043. rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1,
  1044. B_PATH0_SAMPL_DLY_T_MSK_V1, 0x3);
  1045. rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1,
  1046. B_PATH1_SAMPL_DLY_T_MSK_V1, 0x3);
  1047. rtw89_phy_write32_mask(rtwdev, R_PATH0_BW_SEL_V1,
  1048. B_PATH0_BW_SEL_MSK_V1, 0xf);
  1049. rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1,
  1050. B_PATH1_BW_SEL_MSK_V1, 0xf);
  1051. break;
  1052. case RTW89_CHANNEL_WIDTH_40:
  1053. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_SET, 0x1,
  1054. phy_idx);
  1055. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_SBW, 0x0,
  1056. phy_idx);
  1057. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_PRICH,
  1058. pri_ch,
  1059. phy_idx);
  1060. rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1,
  1061. B_PATH0_SAMPL_DLY_T_MSK_V1, 0x3);
  1062. rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1,
  1063. B_PATH1_SAMPL_DLY_T_MSK_V1, 0x3);
  1064. rtw89_phy_write32_mask(rtwdev, R_PATH0_BW_SEL_V1,
  1065. B_PATH0_BW_SEL_MSK_V1, 0xf);
  1066. rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1,
  1067. B_PATH1_BW_SEL_MSK_V1, 0xf);
  1068. break;
  1069. case RTW89_CHANNEL_WIDTH_80:
  1070. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_SET, 0x2,
  1071. phy_idx);
  1072. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_SBW, 0x0,
  1073. phy_idx);
  1074. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_PRICH,
  1075. pri_ch,
  1076. phy_idx);
  1077. rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1,
  1078. B_PATH0_SAMPL_DLY_T_MSK_V1, 0x2);
  1079. rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1,
  1080. B_PATH1_SAMPL_DLY_T_MSK_V1, 0x2);
  1081. rtw89_phy_write32_mask(rtwdev, R_PATH0_BW_SEL_V1,
  1082. B_PATH0_BW_SEL_MSK_V1, 0xd);
  1083. rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1,
  1084. B_PATH1_BW_SEL_MSK_V1, 0xd);
  1085. break;
  1086. case RTW89_CHANNEL_WIDTH_160:
  1087. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_FC0_BW_SET, 0x3,
  1088. phy_idx);
  1089. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_SBW, 0x0,
  1090. phy_idx);
  1091. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_CHBW_MOD_PRICH,
  1092. pri_ch,
  1093. phy_idx);
  1094. rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1,
  1095. B_PATH0_SAMPL_DLY_T_MSK_V1, 0x1);
  1096. rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1,
  1097. B_PATH1_SAMPL_DLY_T_MSK_V1, 0x1);
  1098. rtw89_phy_write32_mask(rtwdev, R_PATH0_BW_SEL_V1,
  1099. B_PATH0_BW_SEL_MSK_V1, 0xb);
  1100. rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1,
  1101. B_PATH1_BW_SEL_MSK_V1, 0xb);
  1102. break;
  1103. default:
  1104. rtw89_warn(rtwdev, "Fail to switch bw (bw:%d, pri ch:%d)\n", bw,
  1105. pri_ch);
  1106. }
  1107. if (bw == RTW89_CHANNEL_WIDTH_40) {
  1108. rtw89_phy_write32_idx(rtwdev, R_RX_BW40_2XFFT_EN_V1,
  1109. B_RX_BW40_2XFFT_EN_MSK_V1, 0x1, phy_idx);
  1110. rtw89_phy_write32_idx(rtwdev, R_T2F_GI_COMB, B_T2F_GI_COMB_EN, 1, phy_idx);
  1111. } else {
  1112. rtw89_phy_write32_idx(rtwdev, R_RX_BW40_2XFFT_EN_V1,
  1113. B_RX_BW40_2XFFT_EN_MSK_V1, 0x0, phy_idx);
  1114. rtw89_phy_write32_idx(rtwdev, R_T2F_GI_COMB, B_T2F_GI_COMB_EN, 0, phy_idx);
  1115. }
  1116. if (phy_idx == RTW89_PHY_0) {
  1117. rtw8852c_bw_setting(rtwdev, bw, RF_PATH_A);
  1118. if (!rtwdev->dbcc_en)
  1119. rtw8852c_bw_setting(rtwdev, bw, RF_PATH_B);
  1120. } else {
  1121. rtw8852c_bw_setting(rtwdev, bw, RF_PATH_B);
  1122. }
  1123. rtw8852c_edcca_per20_bitmap_sifs(rtwdev, bw, phy_idx);
  1124. }
  1125. static u32 rtw8852c_spur_freq(struct rtw89_dev *rtwdev,
  1126. const struct rtw89_chan *chan)
  1127. {
  1128. u8 center_chan = chan->channel;
  1129. u8 bw = chan->band_width;
  1130. switch (chan->band_type) {
  1131. case RTW89_BAND_2G:
  1132. if (bw == RTW89_CHANNEL_WIDTH_20) {
  1133. if (center_chan >= 5 && center_chan <= 8)
  1134. return 2440;
  1135. if (center_chan == 13)
  1136. return 2480;
  1137. } else if (bw == RTW89_CHANNEL_WIDTH_40) {
  1138. if (center_chan >= 3 && center_chan <= 10)
  1139. return 2440;
  1140. }
  1141. break;
  1142. case RTW89_BAND_5G:
  1143. if (center_chan == 151 || center_chan == 153 ||
  1144. center_chan == 155 || center_chan == 163)
  1145. return 5760;
  1146. break;
  1147. case RTW89_BAND_6G:
  1148. if (center_chan == 195 || center_chan == 197 ||
  1149. center_chan == 199 || center_chan == 207)
  1150. return 6920;
  1151. break;
  1152. default:
  1153. break;
  1154. }
  1155. return 0;
  1156. }
  1157. #define CARRIER_SPACING_312_5 312500 /* 312.5 kHz */
  1158. #define CARRIER_SPACING_78_125 78125 /* 78.125 kHz */
  1159. #define MAX_TONE_NUM 2048
  1160. static void rtw8852c_set_csi_tone_idx(struct rtw89_dev *rtwdev,
  1161. const struct rtw89_chan *chan,
  1162. enum rtw89_phy_idx phy_idx)
  1163. {
  1164. u32 spur_freq;
  1165. s32 freq_diff, csi_idx, csi_tone_idx;
  1166. spur_freq = rtw8852c_spur_freq(rtwdev, chan);
  1167. if (spur_freq == 0) {
  1168. rtw89_phy_write32_idx(rtwdev, R_SEG0CSI_EN, B_SEG0CSI_EN, 0, phy_idx);
  1169. return;
  1170. }
  1171. freq_diff = (spur_freq - chan->freq) * 1000000;
  1172. csi_idx = s32_div_u32_round_closest(freq_diff, CARRIER_SPACING_78_125);
  1173. s32_div_u32_round_down(csi_idx, MAX_TONE_NUM, &csi_tone_idx);
  1174. rtw89_phy_write32_idx(rtwdev, R_SEG0CSI, B_SEG0CSI_IDX, csi_tone_idx, phy_idx);
  1175. rtw89_phy_write32_idx(rtwdev, R_SEG0CSI_EN, B_SEG0CSI_EN, 1, phy_idx);
  1176. }
  1177. static const struct rtw89_nbi_reg_def rtw8852c_nbi_reg_def[] = {
  1178. [RF_PATH_A] = {
  1179. .notch1_idx = {0x4C14, 0xFF},
  1180. .notch1_frac_idx = {0x4C14, 0xC00},
  1181. .notch1_en = {0x4C14, 0x1000},
  1182. .notch2_idx = {0x4C20, 0xFF},
  1183. .notch2_frac_idx = {0x4C20, 0xC00},
  1184. .notch2_en = {0x4C20, 0x1000},
  1185. },
  1186. [RF_PATH_B] = {
  1187. .notch1_idx = {0x4CD8, 0xFF},
  1188. .notch1_frac_idx = {0x4CD8, 0xC00},
  1189. .notch1_en = {0x4CD8, 0x1000},
  1190. .notch2_idx = {0x4CE4, 0xFF},
  1191. .notch2_frac_idx = {0x4CE4, 0xC00},
  1192. .notch2_en = {0x4CE4, 0x1000},
  1193. },
  1194. };
  1195. static void rtw8852c_set_nbi_tone_idx(struct rtw89_dev *rtwdev,
  1196. const struct rtw89_chan *chan,
  1197. enum rtw89_rf_path path)
  1198. {
  1199. const struct rtw89_nbi_reg_def *nbi = &rtw8852c_nbi_reg_def[path];
  1200. u32 spur_freq, fc;
  1201. s32 freq_diff;
  1202. s32 nbi_idx, nbi_tone_idx;
  1203. s32 nbi_frac_idx, nbi_frac_tone_idx;
  1204. bool notch2_chk = false;
  1205. spur_freq = rtw8852c_spur_freq(rtwdev, chan);
  1206. if (spur_freq == 0) {
  1207. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 0);
  1208. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 0);
  1209. return;
  1210. }
  1211. fc = chan->freq;
  1212. if (chan->band_width == RTW89_CHANNEL_WIDTH_160) {
  1213. fc = (spur_freq > fc) ? fc + 40 : fc - 40;
  1214. if ((fc > spur_freq &&
  1215. chan->channel < chan->primary_channel) ||
  1216. (fc < spur_freq &&
  1217. chan->channel > chan->primary_channel))
  1218. notch2_chk = true;
  1219. }
  1220. freq_diff = (spur_freq - fc) * 1000000;
  1221. nbi_idx = s32_div_u32_round_down(freq_diff, CARRIER_SPACING_312_5, &nbi_frac_idx);
  1222. if (chan->band_width == RTW89_CHANNEL_WIDTH_20) {
  1223. s32_div_u32_round_down(nbi_idx + 32, 64, &nbi_tone_idx);
  1224. } else {
  1225. u16 tone_para = (chan->band_width == RTW89_CHANNEL_WIDTH_40) ?
  1226. 128 : 256;
  1227. s32_div_u32_round_down(nbi_idx, tone_para, &nbi_tone_idx);
  1228. }
  1229. nbi_frac_tone_idx = s32_div_u32_round_closest(nbi_frac_idx, CARRIER_SPACING_78_125);
  1230. if (chan->band_width == RTW89_CHANNEL_WIDTH_160 && notch2_chk) {
  1231. rtw89_phy_write32_mask(rtwdev, nbi->notch2_idx.addr,
  1232. nbi->notch2_idx.mask, nbi_tone_idx);
  1233. rtw89_phy_write32_mask(rtwdev, nbi->notch2_frac_idx.addr,
  1234. nbi->notch2_frac_idx.mask, nbi_frac_tone_idx);
  1235. rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr, nbi->notch2_en.mask, 0);
  1236. rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr, nbi->notch2_en.mask, 1);
  1237. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 0);
  1238. } else {
  1239. rtw89_phy_write32_mask(rtwdev, nbi->notch1_idx.addr,
  1240. nbi->notch1_idx.mask, nbi_tone_idx);
  1241. rtw89_phy_write32_mask(rtwdev, nbi->notch1_frac_idx.addr,
  1242. nbi->notch1_frac_idx.mask, nbi_frac_tone_idx);
  1243. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 0);
  1244. rtw89_phy_write32_mask(rtwdev, nbi->notch1_en.addr, nbi->notch1_en.mask, 1);
  1245. rtw89_phy_write32_mask(rtwdev, nbi->notch2_en.addr, nbi->notch2_en.mask, 0);
  1246. }
  1247. }
  1248. static void rtw8852c_spur_notch(struct rtw89_dev *rtwdev, u32 val,
  1249. enum rtw89_phy_idx phy_idx)
  1250. {
  1251. u32 notch;
  1252. u32 notch2;
  1253. if (phy_idx == RTW89_PHY_0) {
  1254. notch = R_PATH0_NOTCH;
  1255. notch2 = R_PATH0_NOTCH2;
  1256. } else {
  1257. notch = R_PATH1_NOTCH;
  1258. notch2 = R_PATH1_NOTCH2;
  1259. }
  1260. rtw89_phy_write32_mask(rtwdev, notch,
  1261. B_PATH0_NOTCH_VAL | B_PATH0_NOTCH_EN, val);
  1262. rtw89_phy_write32_set(rtwdev, notch, B_PATH0_NOTCH_EN);
  1263. rtw89_phy_write32_mask(rtwdev, notch2,
  1264. B_PATH0_NOTCH2_VAL | B_PATH0_NOTCH2_EN, val);
  1265. rtw89_phy_write32_set(rtwdev, notch2, B_PATH0_NOTCH2_EN);
  1266. }
  1267. static void rtw8852c_spur_elimination(struct rtw89_dev *rtwdev,
  1268. const struct rtw89_chan *chan,
  1269. u8 pri_ch_idx,
  1270. enum rtw89_phy_idx phy_idx)
  1271. {
  1272. rtw8852c_set_csi_tone_idx(rtwdev, chan, phy_idx);
  1273. if (phy_idx == RTW89_PHY_0) {
  1274. if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
  1275. (pri_ch_idx == RTW89_SC_20_LOWER ||
  1276. pri_ch_idx == RTW89_SC_20_UP3X)) {
  1277. rtw8852c_spur_notch(rtwdev, 0xe7f, RTW89_PHY_0);
  1278. if (!rtwdev->dbcc_en)
  1279. rtw8852c_spur_notch(rtwdev, 0xe7f, RTW89_PHY_1);
  1280. } else if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
  1281. (pri_ch_idx == RTW89_SC_20_UPPER ||
  1282. pri_ch_idx == RTW89_SC_20_LOW3X)) {
  1283. rtw8852c_spur_notch(rtwdev, 0x280, RTW89_PHY_0);
  1284. if (!rtwdev->dbcc_en)
  1285. rtw8852c_spur_notch(rtwdev, 0x280, RTW89_PHY_1);
  1286. } else {
  1287. rtw8852c_set_nbi_tone_idx(rtwdev, chan, RF_PATH_A);
  1288. if (!rtwdev->dbcc_en)
  1289. rtw8852c_set_nbi_tone_idx(rtwdev, chan,
  1290. RF_PATH_B);
  1291. }
  1292. } else {
  1293. if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
  1294. (pri_ch_idx == RTW89_SC_20_LOWER ||
  1295. pri_ch_idx == RTW89_SC_20_UP3X)) {
  1296. rtw8852c_spur_notch(rtwdev, 0xe7f, RTW89_PHY_1);
  1297. } else if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
  1298. (pri_ch_idx == RTW89_SC_20_UPPER ||
  1299. pri_ch_idx == RTW89_SC_20_LOW3X)) {
  1300. rtw8852c_spur_notch(rtwdev, 0x280, RTW89_PHY_1);
  1301. } else {
  1302. rtw8852c_set_nbi_tone_idx(rtwdev, chan, RF_PATH_B);
  1303. }
  1304. }
  1305. if (pri_ch_idx == RTW89_SC_20_UP3X || pri_ch_idx == RTW89_SC_20_LOW3X)
  1306. rtw89_phy_write32_idx(rtwdev, R_PD_BOOST_EN, B_PD_BOOST_EN, 0, phy_idx);
  1307. else
  1308. rtw89_phy_write32_idx(rtwdev, R_PD_BOOST_EN, B_PD_BOOST_EN, 1, phy_idx);
  1309. }
  1310. static void rtw8852c_5m_mask(struct rtw89_dev *rtwdev,
  1311. const struct rtw89_chan *chan,
  1312. enum rtw89_phy_idx phy_idx)
  1313. {
  1314. u8 pri_ch = chan->pri_ch_idx;
  1315. bool mask_5m_low;
  1316. bool mask_5m_en;
  1317. switch (chan->band_width) {
  1318. case RTW89_CHANNEL_WIDTH_40:
  1319. mask_5m_en = true;
  1320. mask_5m_low = pri_ch == RTW89_SC_20_LOWER;
  1321. break;
  1322. case RTW89_CHANNEL_WIDTH_80:
  1323. mask_5m_en = pri_ch == RTW89_SC_20_UPMOST ||
  1324. pri_ch == RTW89_SC_20_LOWEST;
  1325. mask_5m_low = pri_ch == RTW89_SC_20_LOWEST;
  1326. break;
  1327. default:
  1328. mask_5m_en = false;
  1329. mask_5m_low = false;
  1330. break;
  1331. }
  1332. if (!mask_5m_en) {
  1333. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_EN, 0x0);
  1334. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_EN, 0x0);
  1335. rtw89_phy_write32_idx(rtwdev, R_ASSIGN_SBD_OPT,
  1336. B_ASSIGN_SBD_OPT_EN, 0x0, phy_idx);
  1337. } else {
  1338. if (mask_5m_low) {
  1339. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_TH, 0x4);
  1340. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_EN, 0x1);
  1341. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_SB2, 0x0);
  1342. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_SB0, 0x1);
  1343. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_TH, 0x4);
  1344. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_EN, 0x1);
  1345. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_SB2, 0x0);
  1346. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_SB0, 0x1);
  1347. } else {
  1348. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_TH, 0x4);
  1349. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_EN, 0x1);
  1350. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_SB2, 0x1);
  1351. rtw89_phy_write32_mask(rtwdev, R_PATH0_5MDET, B_PATH0_5MDET_SB0, 0x0);
  1352. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_TH, 0x4);
  1353. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_EN, 0x1);
  1354. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_SB2, 0x1);
  1355. rtw89_phy_write32_mask(rtwdev, R_PATH1_5MDET, B_PATH1_5MDET_SB0, 0x0);
  1356. }
  1357. rtw89_phy_write32_idx(rtwdev, R_ASSIGN_SBD_OPT, B_ASSIGN_SBD_OPT_EN, 0x1, phy_idx);
  1358. }
  1359. }
  1360. static void rtw8852c_bb_reset_all(struct rtw89_dev *rtwdev,
  1361. enum rtw89_phy_idx phy_idx)
  1362. {
  1363. /*HW SI reset*/
  1364. rtw89_phy_write32_mask(rtwdev, R_S0_HW_SI_DIS, B_S0_HW_SI_DIS_W_R_TRIG,
  1365. 0x7);
  1366. rtw89_phy_write32_mask(rtwdev, R_S1_HW_SI_DIS, B_S1_HW_SI_DIS_W_R_TRIG,
  1367. 0x7);
  1368. udelay(1);
  1369. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1,
  1370. phy_idx);
  1371. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0,
  1372. phy_idx);
  1373. /*HW SI reset*/
  1374. rtw89_phy_write32_mask(rtwdev, R_S0_HW_SI_DIS, B_S0_HW_SI_DIS_W_R_TRIG,
  1375. 0x0);
  1376. rtw89_phy_write32_mask(rtwdev, R_S1_HW_SI_DIS, B_S1_HW_SI_DIS_W_R_TRIG,
  1377. 0x0);
  1378. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1,
  1379. phy_idx);
  1380. }
  1381. static void rtw8852c_bb_reset_en(struct rtw89_dev *rtwdev, enum rtw89_band band,
  1382. enum rtw89_phy_idx phy_idx, bool en)
  1383. {
  1384. if (en) {
  1385. rtw89_phy_write32_idx(rtwdev, R_S0_HW_SI_DIS,
  1386. B_S0_HW_SI_DIS_W_R_TRIG, 0x0, phy_idx);
  1387. rtw89_phy_write32_idx(rtwdev, R_S1_HW_SI_DIS,
  1388. B_S1_HW_SI_DIS_W_R_TRIG, 0x0, phy_idx);
  1389. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1,
  1390. phy_idx);
  1391. if (band == RTW89_BAND_2G)
  1392. rtw89_phy_write32_mask(rtwdev, R_RXCCA_V1, B_RXCCA_DIS_V1, 0x0);
  1393. rtw89_phy_write32_mask(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x0);
  1394. } else {
  1395. rtw89_phy_write32_mask(rtwdev, R_RXCCA_V1, B_RXCCA_DIS_V1, 0x1);
  1396. rtw89_phy_write32_mask(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x1);
  1397. rtw89_phy_write32_idx(rtwdev, R_S0_HW_SI_DIS,
  1398. B_S0_HW_SI_DIS_W_R_TRIG, 0x7, phy_idx);
  1399. rtw89_phy_write32_idx(rtwdev, R_S1_HW_SI_DIS,
  1400. B_S1_HW_SI_DIS_W_R_TRIG, 0x7, phy_idx);
  1401. fsleep(1);
  1402. rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0,
  1403. phy_idx);
  1404. }
  1405. }
  1406. static void rtw8852c_bb_reset(struct rtw89_dev *rtwdev,
  1407. enum rtw89_phy_idx phy_idx)
  1408. {
  1409. rtw8852c_bb_reset_all(rtwdev, phy_idx);
  1410. }
  1411. static
  1412. void rtw8852c_bb_gpio_trsw(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
  1413. u8 tx_path_en, u8 trsw_tx,
  1414. u8 trsw_rx, u8 trsw, u8 trsw_b)
  1415. {
  1416. static const u32 path_cr_bases[] = {0x5868, 0x7868};
  1417. u32 mask_ofst = 16;
  1418. u32 cr;
  1419. u32 val;
  1420. if (path >= ARRAY_SIZE(path_cr_bases))
  1421. return;
  1422. cr = path_cr_bases[path];
  1423. mask_ofst += (tx_path_en * 4 + trsw_tx * 2 + trsw_rx) * 2;
  1424. val = FIELD_PREP(B_P0_TRSW_A, trsw) | FIELD_PREP(B_P0_TRSW_B, trsw_b);
  1425. rtw89_phy_write32_mask(rtwdev, cr, (B_P0_TRSW_A | B_P0_TRSW_B) << mask_ofst, val);
  1426. }
  1427. enum rtw8852c_rfe_src {
  1428. PAPE_RFM,
  1429. TRSW_RFM,
  1430. LNAON_RFM,
  1431. };
  1432. static
  1433. void rtw8852c_bb_gpio_rfm(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
  1434. enum rtw8852c_rfe_src src, u8 dis_tx_gnt_wl,
  1435. u8 active_tx_opt, u8 act_bt_en, u8 rfm_output_val)
  1436. {
  1437. static const u32 path_cr_bases[] = {0x5894, 0x7894};
  1438. static const u32 masks[] = {0, 8, 16};
  1439. u32 mask, mask_ofst;
  1440. u32 cr;
  1441. u32 val;
  1442. if (src >= ARRAY_SIZE(masks) || path >= ARRAY_SIZE(path_cr_bases))
  1443. return;
  1444. mask_ofst = masks[src];
  1445. cr = path_cr_bases[path];
  1446. val = FIELD_PREP(B_P0_RFM_DIS_WL, dis_tx_gnt_wl) |
  1447. FIELD_PREP(B_P0_RFM_TX_OPT, active_tx_opt) |
  1448. FIELD_PREP(B_P0_RFM_BT_EN, act_bt_en) |
  1449. FIELD_PREP(B_P0_RFM_OUT, rfm_output_val);
  1450. mask = 0xff << mask_ofst;
  1451. rtw89_phy_write32_mask(rtwdev, cr, mask, val);
  1452. }
  1453. static void rtw8852c_bb_gpio_init(struct rtw89_dev *rtwdev)
  1454. {
  1455. static const u32 cr_bases[] = {0x5800, 0x7800};
  1456. u32 addr;
  1457. u8 i;
  1458. for (i = 0; i < ARRAY_SIZE(cr_bases); i++) {
  1459. addr = cr_bases[i];
  1460. rtw89_phy_write32_set(rtwdev, (addr | 0x68), B_P0_TRSW_A);
  1461. rtw89_phy_write32_clr(rtwdev, (addr | 0x68), B_P0_TRSW_X);
  1462. rtw89_phy_write32_clr(rtwdev, (addr | 0x68), B_P0_TRSW_SO_A2);
  1463. rtw89_phy_write32(rtwdev, (addr | 0x80), 0x77777777);
  1464. rtw89_phy_write32(rtwdev, (addr | 0x84), 0x77777777);
  1465. }
  1466. rtw89_phy_write32(rtwdev, R_RFE_E_A2, 0xffffffff);
  1467. rtw89_phy_write32(rtwdev, R_RFE_O_SEL_A2, 0);
  1468. rtw89_phy_write32(rtwdev, R_RFE_SEL0_A2, 0);
  1469. rtw89_phy_write32(rtwdev, R_RFE_SEL32_A2, 0);
  1470. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 0, 0, 0, 1);
  1471. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 0, 1, 1, 0);
  1472. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 1, 0, 1, 0);
  1473. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 0, 1, 1, 1, 0);
  1474. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 0, 0, 0, 1);
  1475. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 0, 1, 1, 0);
  1476. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 1, 0, 1, 0);
  1477. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_A, 1, 1, 1, 1, 0);
  1478. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 0, 0, 0, 0, 1);
  1479. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 0, 0, 1, 1, 0);
  1480. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 0, 1, 0, 1, 0);
  1481. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 0, 1, 1, 1, 0);
  1482. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 1, 0, 0, 0, 1);
  1483. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 1, 0, 1, 1, 0);
  1484. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 1, 1, 0, 1, 0);
  1485. rtw8852c_bb_gpio_trsw(rtwdev, RF_PATH_B, 1, 1, 1, 1, 0);
  1486. rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_A, PAPE_RFM, 0, 0, 0, 0x0);
  1487. rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_A, TRSW_RFM, 0, 0, 0, 0x4);
  1488. rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_A, LNAON_RFM, 0, 0, 0, 0x8);
  1489. rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_B, PAPE_RFM, 0, 0, 0, 0x0);
  1490. rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_B, TRSW_RFM, 0, 0, 0, 0x4);
  1491. rtw8852c_bb_gpio_rfm(rtwdev, RF_PATH_B, LNAON_RFM, 0, 0, 0, 0x8);
  1492. }
  1493. static void rtw8852c_bb_macid_ctrl_init(struct rtw89_dev *rtwdev,
  1494. enum rtw89_phy_idx phy_idx)
  1495. {
  1496. u32 addr;
  1497. for (addr = R_AX_PWR_MACID_LMT_TABLE0;
  1498. addr <= R_AX_PWR_MACID_LMT_TABLE127; addr += 4)
  1499. rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, 0);
  1500. }
  1501. static void rtw8852c_bb_sethw(struct rtw89_dev *rtwdev)
  1502. {
  1503. struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
  1504. rtw89_phy_write32_set(rtwdev, R_DBCC_80P80_SEL_EVM_RPT,
  1505. B_DBCC_80P80_SEL_EVM_RPT_EN);
  1506. rtw89_phy_write32_set(rtwdev, R_DBCC_80P80_SEL_EVM_RPT2,
  1507. B_DBCC_80P80_SEL_EVM_RPT2_EN);
  1508. rtw8852c_bb_macid_ctrl_init(rtwdev, RTW89_PHY_0);
  1509. rtw8852c_bb_gpio_init(rtwdev);
  1510. /* read these registers after loading BB parameters */
  1511. gain->offset_base[RTW89_PHY_0] =
  1512. rtw89_phy_read32_mask(rtwdev, R_RPL_BIAS_COMP, B_RPL_BIAS_COMP_MASK);
  1513. gain->offset_base[RTW89_PHY_1] =
  1514. rtw89_phy_read32_mask(rtwdev, R_RPL_BIAS_COMP1, B_RPL_BIAS_COMP1_MASK);
  1515. }
  1516. static void rtw8852c_set_channel_bb(struct rtw89_dev *rtwdev,
  1517. const struct rtw89_chan *chan,
  1518. enum rtw89_phy_idx phy_idx)
  1519. {
  1520. struct rtw89_hal *hal = &rtwdev->hal;
  1521. bool cck_en = chan->band_type == RTW89_BAND_2G;
  1522. u8 pri_ch_idx = chan->pri_ch_idx;
  1523. u32 mask, reg;
  1524. u32 ru_alloc_msk[2] = {B_P80_AT_HIGH_FREQ_RU_ALLOC_PHY0,
  1525. B_P80_AT_HIGH_FREQ_RU_ALLOC_PHY1};
  1526. u8 ntx_path;
  1527. if (chan->band_type == RTW89_BAND_2G)
  1528. rtw8852c_ctrl_sco_cck(rtwdev, chan->channel,
  1529. chan->primary_channel,
  1530. chan->band_width);
  1531. rtw8852c_ctrl_ch(rtwdev, chan, phy_idx);
  1532. rtw8852c_ctrl_bw(rtwdev, pri_ch_idx, chan->band_width, phy_idx);
  1533. if (cck_en) {
  1534. rtw89_phy_write32_mask(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 1);
  1535. rtw89_phy_write32_mask(rtwdev, R_RXCCA_V1, B_RXCCA_DIS_V1, 0);
  1536. rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF,
  1537. B_PD_ARBITER_OFF, 0x0, phy_idx);
  1538. } else {
  1539. rtw89_phy_write32_mask(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 0);
  1540. rtw89_phy_write32_mask(rtwdev, R_RXCCA_V1, B_RXCCA_DIS_V1, 1);
  1541. rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF,
  1542. B_PD_ARBITER_OFF, 0x1, phy_idx);
  1543. }
  1544. rtw8852c_spur_elimination(rtwdev, chan, pri_ch_idx, phy_idx);
  1545. rtw8852c_ctrl_btg(rtwdev, chan->band_type == RTW89_BAND_2G);
  1546. rtw8852c_5m_mask(rtwdev, chan, phy_idx);
  1547. if (chan->band_width == RTW89_CHANNEL_WIDTH_160 &&
  1548. rtwdev->hal.cv != CHIP_CAV) {
  1549. rtw89_phy_write32_idx(rtwdev, R_P80_AT_HIGH_FREQ,
  1550. B_P80_AT_HIGH_FREQ, 0x0, phy_idx);
  1551. reg = rtw89_mac_reg_by_idx(R_P80_AT_HIGH_FREQ_BB_WRP,
  1552. phy_idx);
  1553. if (chan->primary_channel > chan->channel) {
  1554. rtw89_phy_write32_mask(rtwdev,
  1555. R_P80_AT_HIGH_FREQ_RU_ALLOC,
  1556. ru_alloc_msk[phy_idx], 1);
  1557. rtw89_write32_mask(rtwdev, reg,
  1558. B_P80_AT_HIGH_FREQ_BB_WRP, 1);
  1559. } else {
  1560. rtw89_phy_write32_mask(rtwdev,
  1561. R_P80_AT_HIGH_FREQ_RU_ALLOC,
  1562. ru_alloc_msk[phy_idx], 0);
  1563. rtw89_write32_mask(rtwdev, reg,
  1564. B_P80_AT_HIGH_FREQ_BB_WRP, 0);
  1565. }
  1566. }
  1567. if (chan->band_type == RTW89_BAND_6G &&
  1568. chan->band_width == RTW89_CHANNEL_WIDTH_160)
  1569. rtw89_phy_write32_idx(rtwdev, R_CDD_EVM_CHK_EN,
  1570. B_CDD_EVM_CHK_EN, 0, phy_idx);
  1571. else
  1572. rtw89_phy_write32_idx(rtwdev, R_CDD_EVM_CHK_EN,
  1573. B_CDD_EVM_CHK_EN, 1, phy_idx);
  1574. if (!rtwdev->dbcc_en) {
  1575. mask = B_P0_TXPW_RSTB_TSSI | B_P0_TXPW_RSTB_MANON;
  1576. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, mask, 0x1);
  1577. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, mask, 0x3);
  1578. mask = B_P1_TXPW_RSTB_TSSI | B_P1_TXPW_RSTB_MANON;
  1579. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, mask, 0x1);
  1580. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, mask, 0x3);
  1581. } else {
  1582. if (phy_idx == RTW89_PHY_0) {
  1583. mask = B_P0_TXPW_RSTB_TSSI | B_P0_TXPW_RSTB_MANON;
  1584. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, mask, 0x1);
  1585. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, mask, 0x3);
  1586. } else {
  1587. mask = B_P1_TXPW_RSTB_TSSI | B_P1_TXPW_RSTB_MANON;
  1588. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, mask, 0x1);
  1589. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, mask, 0x3);
  1590. }
  1591. }
  1592. if (chan->band_type == RTW89_BAND_6G)
  1593. rtw89_phy_write32_set(rtwdev, R_MUIC, B_MUIC_EN);
  1594. else
  1595. rtw89_phy_write32_clr(rtwdev, R_MUIC, B_MUIC_EN);
  1596. if (hal->antenna_tx)
  1597. ntx_path = hal->antenna_tx;
  1598. else
  1599. ntx_path = chan->band_type == RTW89_BAND_6G ? RF_B : RF_AB;
  1600. rtw8852c_ctrl_tx_path_tmac(rtwdev, ntx_path, (enum rtw89_mac_idx)phy_idx);
  1601. rtw8852c_bb_reset_all(rtwdev, phy_idx);
  1602. }
  1603. static void rtw8852c_set_channel(struct rtw89_dev *rtwdev,
  1604. const struct rtw89_chan *chan,
  1605. enum rtw89_mac_idx mac_idx,
  1606. enum rtw89_phy_idx phy_idx)
  1607. {
  1608. rtw8852c_set_channel_mac(rtwdev, chan, mac_idx);
  1609. rtw8852c_set_channel_bb(rtwdev, chan, phy_idx);
  1610. rtw8852c_set_channel_rf(rtwdev, chan, phy_idx);
  1611. }
  1612. static void rtw8852c_dfs_en(struct rtw89_dev *rtwdev, bool en)
  1613. {
  1614. if (en)
  1615. rtw89_phy_write32_mask(rtwdev, R_UPD_P0, B_UPD_P0_EN, 1);
  1616. else
  1617. rtw89_phy_write32_mask(rtwdev, R_UPD_P0, B_UPD_P0_EN, 0);
  1618. }
  1619. static void rtw8852c_adc_en(struct rtw89_dev *rtwdev, bool en)
  1620. {
  1621. if (en)
  1622. rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_RST,
  1623. 0x0);
  1624. else
  1625. rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_RST,
  1626. 0xf);
  1627. }
  1628. static void rtw8852c_set_channel_help(struct rtw89_dev *rtwdev, bool enter,
  1629. struct rtw89_channel_help_params *p,
  1630. const struct rtw89_chan *chan,
  1631. enum rtw89_mac_idx mac_idx,
  1632. enum rtw89_phy_idx phy_idx)
  1633. {
  1634. if (enter) {
  1635. rtw89_chip_stop_sch_tx(rtwdev, mac_idx, &p->tx_en,
  1636. RTW89_SCH_TX_SEL_ALL);
  1637. rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, false);
  1638. rtw8852c_dfs_en(rtwdev, false);
  1639. rtw8852c_tssi_cont_en_phyidx(rtwdev, false, phy_idx);
  1640. rtw8852c_adc_en(rtwdev, false);
  1641. fsleep(40);
  1642. rtw8852c_bb_reset_en(rtwdev, chan->band_type, phy_idx, false);
  1643. } else {
  1644. rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, true);
  1645. rtw8852c_adc_en(rtwdev, true);
  1646. rtw8852c_dfs_en(rtwdev, true);
  1647. rtw8852c_tssi_cont_en_phyidx(rtwdev, true, phy_idx);
  1648. rtw8852c_bb_reset_en(rtwdev, chan->band_type, phy_idx, true);
  1649. rtw89_chip_resume_sch_tx(rtwdev, mac_idx, p->tx_en);
  1650. }
  1651. }
  1652. static void rtw8852c_rfk_init(struct rtw89_dev *rtwdev)
  1653. {
  1654. struct rtw89_mcc_info *mcc_info = &rtwdev->mcc;
  1655. rtwdev->is_tssi_mode[RF_PATH_A] = false;
  1656. rtwdev->is_tssi_mode[RF_PATH_B] = false;
  1657. memset(mcc_info, 0, sizeof(*mcc_info));
  1658. rtw8852c_lck_init(rtwdev);
  1659. rtw8852c_rck(rtwdev);
  1660. rtw8852c_dack(rtwdev);
  1661. rtw8852c_rx_dck(rtwdev, RTW89_PHY_0, false);
  1662. }
  1663. static void rtw8852c_rfk_channel(struct rtw89_dev *rtwdev)
  1664. {
  1665. enum rtw89_phy_idx phy_idx = RTW89_PHY_0;
  1666. rtw8852c_mcc_get_ch_info(rtwdev, phy_idx);
  1667. rtw8852c_rx_dck(rtwdev, phy_idx, false);
  1668. rtw8852c_iqk(rtwdev, phy_idx);
  1669. rtw8852c_tssi(rtwdev, phy_idx);
  1670. rtw8852c_dpk(rtwdev, phy_idx);
  1671. rtw89_fw_h2c_rf_ntfy_mcc(rtwdev);
  1672. }
  1673. static void rtw8852c_rfk_band_changed(struct rtw89_dev *rtwdev,
  1674. enum rtw89_phy_idx phy_idx)
  1675. {
  1676. rtw8852c_tssi_scan(rtwdev, phy_idx);
  1677. }
  1678. static void rtw8852c_rfk_scan(struct rtw89_dev *rtwdev, bool start)
  1679. {
  1680. rtw8852c_wifi_scan_notify(rtwdev, start, RTW89_PHY_0);
  1681. }
  1682. static void rtw8852c_rfk_track(struct rtw89_dev *rtwdev)
  1683. {
  1684. rtw8852c_dpk_track(rtwdev);
  1685. rtw8852c_lck_track(rtwdev);
  1686. rtw8852c_rx_dck_track(rtwdev);
  1687. }
  1688. static u32 rtw8852c_bb_cal_txpwr_ref(struct rtw89_dev *rtwdev,
  1689. enum rtw89_phy_idx phy_idx, s16 ref)
  1690. {
  1691. s8 ofst_int = 0;
  1692. u8 base_cw_0db = 0x27;
  1693. u16 tssi_16dbm_cw = 0x12c;
  1694. s16 pwr_s10_3 = 0;
  1695. s16 rf_pwr_cw = 0;
  1696. u16 bb_pwr_cw = 0;
  1697. u32 pwr_cw = 0;
  1698. u32 tssi_ofst_cw = 0;
  1699. pwr_s10_3 = (ref << 1) + (s16)(ofst_int) + (s16)(base_cw_0db << 3);
  1700. bb_pwr_cw = FIELD_GET(GENMASK(2, 0), pwr_s10_3);
  1701. rf_pwr_cw = FIELD_GET(GENMASK(8, 3), pwr_s10_3);
  1702. rf_pwr_cw = clamp_t(s16, rf_pwr_cw, 15, 63);
  1703. pwr_cw = (rf_pwr_cw << 3) | bb_pwr_cw;
  1704. tssi_ofst_cw = (u32)((s16)tssi_16dbm_cw + (ref << 1) - (16 << 3));
  1705. rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
  1706. "[TXPWR] tssi_ofst_cw=%d rf_cw=0x%x bb_cw=0x%x\n",
  1707. tssi_ofst_cw, rf_pwr_cw, bb_pwr_cw);
  1708. return (tssi_ofst_cw << 18) | (pwr_cw << 9) | (ref & GENMASK(8, 0));
  1709. }
  1710. static
  1711. void rtw8852c_set_txpwr_ul_tb_offset(struct rtw89_dev *rtwdev,
  1712. s8 pw_ofst, enum rtw89_mac_idx mac_idx)
  1713. {
  1714. s8 pw_ofst_2tx;
  1715. s8 val_1t;
  1716. s8 val_2t;
  1717. u32 reg;
  1718. u8 i;
  1719. if (pw_ofst < -32 || pw_ofst > 31) {
  1720. rtw89_warn(rtwdev, "[ULTB] Err pwr_offset=%d\n", pw_ofst);
  1721. return;
  1722. }
  1723. val_1t = pw_ofst << 2;
  1724. pw_ofst_2tx = max(pw_ofst - 3, -32);
  1725. val_2t = pw_ofst_2tx << 2;
  1726. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[ULTB] val_1tx=0x%x\n", val_1t);
  1727. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[ULTB] val_2tx=0x%x\n", val_2t);
  1728. for (i = 0; i < 4; i++) {
  1729. /* 1TX */
  1730. reg = rtw89_mac_reg_by_idx(R_AX_PWR_UL_TB_1T, mac_idx);
  1731. rtw89_write32_mask(rtwdev, reg,
  1732. B_AX_PWR_UL_TB_1T_V1_MASK << (8 * i),
  1733. val_1t);
  1734. /* 2TX */
  1735. reg = rtw89_mac_reg_by_idx(R_AX_PWR_UL_TB_2T, mac_idx);
  1736. rtw89_write32_mask(rtwdev, reg,
  1737. B_AX_PWR_UL_TB_2T_V1_MASK << (8 * i),
  1738. val_2t);
  1739. }
  1740. }
  1741. static void rtw8852c_set_txpwr_ref(struct rtw89_dev *rtwdev,
  1742. enum rtw89_phy_idx phy_idx)
  1743. {
  1744. static const u32 addr[RF_PATH_NUM_8852C] = {0x5800, 0x7800};
  1745. const u32 mask = 0x7FFFFFF;
  1746. const u8 ofst_ofdm = 0x4;
  1747. const u8 ofst_cck = 0x8;
  1748. s16 ref_ofdm = 0;
  1749. s16 ref_cck = 0;
  1750. u32 val;
  1751. u8 i;
  1752. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set txpwr reference\n");
  1753. rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_AX_PWR_RATE_CTRL,
  1754. GENMASK(27, 10), 0x0);
  1755. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set bb ofdm txpwr ref\n");
  1756. val = rtw8852c_bb_cal_txpwr_ref(rtwdev, phy_idx, ref_ofdm);
  1757. for (i = 0; i < RF_PATH_NUM_8852C; i++)
  1758. rtw89_phy_write32_idx(rtwdev, addr[i] + ofst_ofdm, mask, val,
  1759. phy_idx);
  1760. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set bb cck txpwr ref\n");
  1761. val = rtw8852c_bb_cal_txpwr_ref(rtwdev, phy_idx, ref_cck);
  1762. for (i = 0; i < RF_PATH_NUM_8852C; i++)
  1763. rtw89_phy_write32_idx(rtwdev, addr[i] + ofst_cck, mask, val,
  1764. phy_idx);
  1765. }
  1766. static void rtw8852c_set_txpwr_byrate(struct rtw89_dev *rtwdev,
  1767. const struct rtw89_chan *chan,
  1768. enum rtw89_phy_idx phy_idx)
  1769. {
  1770. u8 band = chan->band_type;
  1771. u8 ch = chan->channel;
  1772. static const u8 rs[] = {
  1773. RTW89_RS_CCK,
  1774. RTW89_RS_OFDM,
  1775. RTW89_RS_MCS,
  1776. RTW89_RS_HEDCM,
  1777. };
  1778. s8 tmp;
  1779. u8 i, j;
  1780. u32 val, shf, addr = R_AX_PWR_BY_RATE;
  1781. struct rtw89_rate_desc cur;
  1782. rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
  1783. "[TXPWR] set txpwr byrate with ch=%d\n", ch);
  1784. for (cur.nss = 0; cur.nss <= RTW89_NSS_2; cur.nss++) {
  1785. for (i = 0; i < ARRAY_SIZE(rs); i++) {
  1786. if (cur.nss >= rtw89_rs_nss_max[rs[i]])
  1787. continue;
  1788. val = 0;
  1789. cur.rs = rs[i];
  1790. for (j = 0; j < rtw89_rs_idx_max[rs[i]]; j++) {
  1791. cur.idx = j;
  1792. shf = (j % 4) * 8;
  1793. tmp = rtw89_phy_read_txpwr_byrate(rtwdev, band,
  1794. &cur);
  1795. val |= (tmp << shf);
  1796. if ((j + 1) % 4)
  1797. continue;
  1798. rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, val);
  1799. val = 0;
  1800. addr += 4;
  1801. }
  1802. }
  1803. }
  1804. }
  1805. static void rtw8852c_set_txpwr_offset(struct rtw89_dev *rtwdev,
  1806. const struct rtw89_chan *chan,
  1807. enum rtw89_phy_idx phy_idx)
  1808. {
  1809. u8 band = chan->band_type;
  1810. struct rtw89_rate_desc desc = {
  1811. .nss = RTW89_NSS_1,
  1812. .rs = RTW89_RS_OFFSET,
  1813. };
  1814. u32 val = 0;
  1815. s8 v;
  1816. rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set txpwr offset\n");
  1817. for (desc.idx = 0; desc.idx < RTW89_RATE_OFFSET_MAX; desc.idx++) {
  1818. v = rtw89_phy_read_txpwr_byrate(rtwdev, band, &desc);
  1819. val |= ((v & 0xf) << (4 * desc.idx));
  1820. }
  1821. rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_AX_PWR_RATE_OFST_CTRL,
  1822. GENMASK(19, 0), val);
  1823. }
  1824. static void rtw8852c_bb_set_tx_shape_dfir(struct rtw89_dev *rtwdev,
  1825. u8 tx_shape_idx,
  1826. enum rtw89_phy_idx phy_idx)
  1827. {
  1828. #define __DFIR_CFG_MASK 0xffffff
  1829. #define __DFIR_CFG_NR 8
  1830. #define __DECL_DFIR_VAR(_prefix, _name, _val...) \
  1831. static const u32 _prefix ## _ ## _name[] = {_val}; \
  1832. static_assert(ARRAY_SIZE(_prefix ## _ ## _name) == __DFIR_CFG_NR)
  1833. #define __DECL_DFIR_PARAM(_name, _val...) __DECL_DFIR_VAR(param, _name, _val)
  1834. #define __DECL_DFIR_ADDR(_name, _val...) __DECL_DFIR_VAR(addr, _name, _val)
  1835. __DECL_DFIR_PARAM(flat,
  1836. 0x003D23FF, 0x0029B354, 0x000FC1C8, 0x00FDB053,
  1837. 0x00F86F9A, 0x00FAEF92, 0x00FE5FCC, 0x00FFDFF5);
  1838. __DECL_DFIR_PARAM(sharp,
  1839. 0x003D83FF, 0x002C636A, 0x0013F204, 0x00008090,
  1840. 0x00F87FB0, 0x00F99F83, 0x00FDBFBA, 0x00003FF5);
  1841. __DECL_DFIR_PARAM(sharp_14,
  1842. 0x003B13FF, 0x001C42DE, 0x00FDB0AD, 0x00F60F6E,
  1843. 0x00FD8F92, 0x0002D011, 0x0001C02C, 0x00FFF00A);
  1844. __DECL_DFIR_ADDR(filter,
  1845. 0x45BC, 0x45CC, 0x45D0, 0x45D4, 0x45D8, 0x45C0,
  1846. 0x45C4, 0x45C8);
  1847. const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
  1848. u8 ch = chan->channel;
  1849. const u32 *param;
  1850. int i;
  1851. if (ch > 14) {
  1852. rtw89_warn(rtwdev,
  1853. "set tx shape dfir by unknown ch: %d on 2G\n", ch);
  1854. return;
  1855. }
  1856. if (ch == 14)
  1857. param = param_sharp_14;
  1858. else
  1859. param = tx_shape_idx == 0 ? param_flat : param_sharp;
  1860. for (i = 0; i < __DFIR_CFG_NR; i++) {
  1861. rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
  1862. "set tx shape dfir: 0x%x: 0x%x\n", addr_filter[i],
  1863. param[i]);
  1864. rtw89_phy_write32_idx(rtwdev, addr_filter[i], __DFIR_CFG_MASK,
  1865. param[i], phy_idx);
  1866. }
  1867. #undef __DECL_DFIR_ADDR
  1868. #undef __DECL_DFIR_PARAM
  1869. #undef __DECL_DFIR_VAR
  1870. #undef __DFIR_CFG_NR
  1871. #undef __DFIR_CFG_MASK
  1872. }
  1873. static void rtw8852c_set_tx_shape(struct rtw89_dev *rtwdev,
  1874. const struct rtw89_chan *chan,
  1875. enum rtw89_phy_idx phy_idx)
  1876. {
  1877. u8 band = chan->band_type;
  1878. u8 regd = rtw89_regd_get(rtwdev, band);
  1879. u8 tx_shape_cck = rtw89_8852c_tx_shape[band][RTW89_RS_CCK][regd];
  1880. u8 tx_shape_ofdm = rtw89_8852c_tx_shape[band][RTW89_RS_OFDM][regd];
  1881. if (band == RTW89_BAND_2G)
  1882. rtw8852c_bb_set_tx_shape_dfir(rtwdev, tx_shape_cck, phy_idx);
  1883. rtw89_phy_tssi_ctrl_set_bandedge_cfg(rtwdev,
  1884. (enum rtw89_mac_idx)phy_idx,
  1885. tx_shape_ofdm);
  1886. }
  1887. static void rtw8852c_set_txpwr_limit(struct rtw89_dev *rtwdev,
  1888. const struct rtw89_chan *chan,
  1889. enum rtw89_phy_idx phy_idx)
  1890. {
  1891. #define __MAC_TXPWR_LMT_PAGE_SIZE 40
  1892. u8 ch = chan->channel;
  1893. u8 bw = chan->band_width;
  1894. struct rtw89_txpwr_limit lmt[NTX_NUM_8852C];
  1895. u32 addr, val;
  1896. const s8 *ptr;
  1897. u8 i, j;
  1898. rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
  1899. "[TXPWR] set txpwr limit with ch=%d bw=%d\n", ch, bw);
  1900. for (i = 0; i < NTX_NUM_8852C; i++) {
  1901. rtw89_phy_fill_txpwr_limit(rtwdev, chan, &lmt[i], i);
  1902. for (j = 0; j < __MAC_TXPWR_LMT_PAGE_SIZE; j += 4) {
  1903. addr = R_AX_PWR_LMT + j + __MAC_TXPWR_LMT_PAGE_SIZE * i;
  1904. ptr = (s8 *)&lmt[i] + j;
  1905. val = FIELD_PREP(GENMASK(7, 0), ptr[0]) |
  1906. FIELD_PREP(GENMASK(15, 8), ptr[1]) |
  1907. FIELD_PREP(GENMASK(23, 16), ptr[2]) |
  1908. FIELD_PREP(GENMASK(31, 24), ptr[3]);
  1909. rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, val);
  1910. }
  1911. }
  1912. #undef __MAC_TXPWR_LMT_PAGE_SIZE
  1913. }
  1914. static void rtw8852c_set_txpwr_limit_ru(struct rtw89_dev *rtwdev,
  1915. const struct rtw89_chan *chan,
  1916. enum rtw89_phy_idx phy_idx)
  1917. {
  1918. #define __MAC_TXPWR_LMT_RU_PAGE_SIZE 24
  1919. u8 ch = chan->channel;
  1920. u8 bw = chan->band_width;
  1921. struct rtw89_txpwr_limit_ru lmt_ru[NTX_NUM_8852C];
  1922. u32 addr, val;
  1923. const s8 *ptr;
  1924. u8 i, j;
  1925. rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
  1926. "[TXPWR] set txpwr limit ru with ch=%d bw=%d\n", ch, bw);
  1927. for (i = 0; i < NTX_NUM_8852C; i++) {
  1928. rtw89_phy_fill_txpwr_limit_ru(rtwdev, chan, &lmt_ru[i], i);
  1929. for (j = 0; j < __MAC_TXPWR_LMT_RU_PAGE_SIZE; j += 4) {
  1930. addr = R_AX_PWR_RU_LMT + j +
  1931. __MAC_TXPWR_LMT_RU_PAGE_SIZE * i;
  1932. ptr = (s8 *)&lmt_ru[i] + j;
  1933. val = FIELD_PREP(GENMASK(7, 0), ptr[0]) |
  1934. FIELD_PREP(GENMASK(15, 8), ptr[1]) |
  1935. FIELD_PREP(GENMASK(23, 16), ptr[2]) |
  1936. FIELD_PREP(GENMASK(31, 24), ptr[3]);
  1937. rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, val);
  1938. }
  1939. }
  1940. #undef __MAC_TXPWR_LMT_RU_PAGE_SIZE
  1941. }
  1942. static void rtw8852c_set_txpwr(struct rtw89_dev *rtwdev,
  1943. const struct rtw89_chan *chan,
  1944. enum rtw89_phy_idx phy_idx)
  1945. {
  1946. rtw8852c_set_txpwr_byrate(rtwdev, chan, phy_idx);
  1947. rtw8852c_set_txpwr_offset(rtwdev, chan, phy_idx);
  1948. rtw8852c_set_tx_shape(rtwdev, chan, phy_idx);
  1949. rtw8852c_set_txpwr_limit(rtwdev, chan, phy_idx);
  1950. rtw8852c_set_txpwr_limit_ru(rtwdev, chan, phy_idx);
  1951. }
  1952. static void rtw8852c_set_txpwr_ctrl(struct rtw89_dev *rtwdev,
  1953. enum rtw89_phy_idx phy_idx)
  1954. {
  1955. rtw8852c_set_txpwr_ref(rtwdev, phy_idx);
  1956. }
  1957. static void
  1958. rtw8852c_init_tssi_ctrl(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
  1959. {
  1960. static const struct rtw89_reg2_def ctrl_ini[] = {
  1961. {0xD938, 0x00010100},
  1962. {0xD93C, 0x0500D500},
  1963. {0xD940, 0x00000500},
  1964. {0xD944, 0x00000005},
  1965. {0xD94C, 0x00220000},
  1966. {0xD950, 0x00030000},
  1967. };
  1968. u32 addr;
  1969. int i;
  1970. for (addr = R_AX_TSSI_CTRL_HEAD; addr <= R_AX_TSSI_CTRL_TAIL; addr += 4)
  1971. rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, 0);
  1972. for (i = 0; i < ARRAY_SIZE(ctrl_ini); i++)
  1973. rtw89_mac_txpwr_write32(rtwdev, phy_idx, ctrl_ini[i].addr,
  1974. ctrl_ini[i].data);
  1975. rtw89_phy_tssi_ctrl_set_bandedge_cfg(rtwdev,
  1976. (enum rtw89_mac_idx)phy_idx,
  1977. RTW89_TSSI_BANDEDGE_FLAT);
  1978. }
  1979. static int
  1980. rtw8852c_init_txpwr_unit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
  1981. {
  1982. int ret;
  1983. ret = rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_AX_PWR_UL_CTRL2, 0x07763333);
  1984. if (ret)
  1985. return ret;
  1986. ret = rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_AX_PWR_COEXT_CTRL, 0x01ebf000);
  1987. if (ret)
  1988. return ret;
  1989. ret = rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_AX_PWR_UL_CTRL0, 0x0002f8ff);
  1990. if (ret)
  1991. return ret;
  1992. rtw8852c_set_txpwr_ul_tb_offset(rtwdev, 0, phy_idx == RTW89_PHY_1 ?
  1993. RTW89_MAC_1 :
  1994. RTW89_MAC_0);
  1995. rtw8852c_init_tssi_ctrl(rtwdev, phy_idx);
  1996. return 0;
  1997. }
  1998. static void rtw8852c_bb_cfg_rx_path(struct rtw89_dev *rtwdev, u8 rx_path)
  1999. {
  2000. const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
  2001. u8 band = chan->band_type;
  2002. u32 rst_mask0 = B_P0_TXPW_RSTB_MANON | B_P0_TXPW_RSTB_TSSI;
  2003. u32 rst_mask1 = B_P1_TXPW_RSTB_MANON | B_P1_TXPW_RSTB_TSSI;
  2004. if (rtwdev->dbcc_en) {
  2005. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD, B_ANT_RX_SEG0, 1);
  2006. rtw89_phy_write32_idx(rtwdev, R_CHBW_MOD, B_ANT_RX_SEG0, 2,
  2007. RTW89_PHY_1);
  2008. rtw89_phy_write32_mask(rtwdev, R_FC0_BW, B_ANT_RX_1RCCA_SEG0,
  2009. 1);
  2010. rtw89_phy_write32_mask(rtwdev, R_FC0_BW, B_ANT_RX_1RCCA_SEG1,
  2011. 1);
  2012. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_ANT_RX_1RCCA_SEG0, 2,
  2013. RTW89_PHY_1);
  2014. rtw89_phy_write32_idx(rtwdev, R_FC0_BW, B_ANT_RX_1RCCA_SEG1, 2,
  2015. RTW89_PHY_1);
  2016. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT,
  2017. B_RXHT_MCS_LIMIT, 0);
  2018. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT,
  2019. B_RXVHT_MCS_LIMIT, 0);
  2020. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_USER_MAX, 8);
  2021. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 0);
  2022. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 0);
  2023. rtw89_phy_write32_idx(rtwdev, R_RXHT_MCS_LIMIT,
  2024. B_RXHT_MCS_LIMIT, 0, RTW89_PHY_1);
  2025. rtw89_phy_write32_idx(rtwdev, R_RXVHT_MCS_LIMIT,
  2026. B_RXVHT_MCS_LIMIT, 0, RTW89_PHY_1);
  2027. rtw89_phy_write32_idx(rtwdev, R_RXHE, B_RXHE_USER_MAX, 1,
  2028. RTW89_PHY_1);
  2029. rtw89_phy_write32_idx(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 0,
  2030. RTW89_PHY_1);
  2031. rtw89_phy_write32_idx(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 0,
  2032. RTW89_PHY_1);
  2033. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, rst_mask0, 1);
  2034. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB, rst_mask0, 3);
  2035. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, rst_mask1, 1);
  2036. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB, rst_mask1, 3);
  2037. } else {
  2038. if (rx_path == RF_PATH_A) {
  2039. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD,
  2040. B_ANT_RX_SEG0, 1);
  2041. rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
  2042. B_ANT_RX_1RCCA_SEG0, 1);
  2043. rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
  2044. B_ANT_RX_1RCCA_SEG1, 1);
  2045. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT,
  2046. B_RXHT_MCS_LIMIT, 0);
  2047. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT,
  2048. B_RXVHT_MCS_LIMIT, 0);
  2049. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS,
  2050. 0);
  2051. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS,
  2052. 0);
  2053. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
  2054. rst_mask0, 1);
  2055. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
  2056. rst_mask0, 3);
  2057. } else if (rx_path == RF_PATH_B) {
  2058. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD,
  2059. B_ANT_RX_SEG0, 2);
  2060. rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
  2061. B_ANT_RX_1RCCA_SEG0, 2);
  2062. rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
  2063. B_ANT_RX_1RCCA_SEG1, 2);
  2064. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT,
  2065. B_RXHT_MCS_LIMIT, 0);
  2066. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT,
  2067. B_RXVHT_MCS_LIMIT, 0);
  2068. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS,
  2069. 0);
  2070. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS,
  2071. 0);
  2072. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB,
  2073. rst_mask1, 1);
  2074. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB,
  2075. rst_mask1, 3);
  2076. } else {
  2077. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD,
  2078. B_ANT_RX_SEG0, 3);
  2079. rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
  2080. B_ANT_RX_1RCCA_SEG0, 3);
  2081. rtw89_phy_write32_mask(rtwdev, R_FC0_BW,
  2082. B_ANT_RX_1RCCA_SEG1, 3);
  2083. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT,
  2084. B_RXHT_MCS_LIMIT, 1);
  2085. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT,
  2086. B_RXVHT_MCS_LIMIT, 1);
  2087. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS,
  2088. 1);
  2089. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS,
  2090. 1);
  2091. rtw8852c_ctrl_btg(rtwdev, band == RTW89_BAND_2G);
  2092. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
  2093. rst_mask0, 1);
  2094. rtw89_phy_write32_mask(rtwdev, R_P0_TXPW_RSTB,
  2095. rst_mask0, 3);
  2096. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB,
  2097. rst_mask1, 1);
  2098. rtw89_phy_write32_mask(rtwdev, R_P1_TXPW_RSTB,
  2099. rst_mask1, 3);
  2100. }
  2101. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_USER_MAX, 8);
  2102. }
  2103. }
  2104. static void rtw8852c_ctrl_tx_path_tmac(struct rtw89_dev *rtwdev, u8 tx_path,
  2105. enum rtw89_mac_idx mac_idx)
  2106. {
  2107. struct rtw89_reg2_def path_com[] = {
  2108. {R_AX_PATH_COM0, AX_PATH_COM0_DFVAL},
  2109. {R_AX_PATH_COM1, AX_PATH_COM1_DFVAL},
  2110. {R_AX_PATH_COM2, AX_PATH_COM2_DFVAL},
  2111. {R_AX_PATH_COM3, AX_PATH_COM3_DFVAL},
  2112. {R_AX_PATH_COM4, AX_PATH_COM4_DFVAL},
  2113. {R_AX_PATH_COM5, AX_PATH_COM5_DFVAL},
  2114. {R_AX_PATH_COM6, AX_PATH_COM6_DFVAL},
  2115. {R_AX_PATH_COM7, AX_PATH_COM7_DFVAL},
  2116. {R_AX_PATH_COM8, AX_PATH_COM8_DFVAL},
  2117. {R_AX_PATH_COM9, AX_PATH_COM9_DFVAL},
  2118. {R_AX_PATH_COM10, AX_PATH_COM10_DFVAL},
  2119. {R_AX_PATH_COM11, AX_PATH_COM11_DFVAL},
  2120. };
  2121. u32 addr;
  2122. u32 reg;
  2123. u8 cr_size = ARRAY_SIZE(path_com);
  2124. u8 i = 0;
  2125. rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL_MOD, 0, RTW89_PHY_0);
  2126. rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL_MOD, 0, RTW89_PHY_1);
  2127. for (addr = R_AX_MACID_ANT_TABLE;
  2128. addr <= R_AX_MACID_ANT_TABLE_LAST; addr += 4) {
  2129. reg = rtw89_mac_reg_by_idx(addr, mac_idx);
  2130. rtw89_write32(rtwdev, reg, 0);
  2131. }
  2132. if (tx_path == RF_A) {
  2133. path_com[0].data = AX_PATH_COM0_PATHA;
  2134. path_com[1].data = AX_PATH_COM1_PATHA;
  2135. path_com[2].data = AX_PATH_COM2_PATHA;
  2136. path_com[7].data = AX_PATH_COM7_PATHA;
  2137. path_com[8].data = AX_PATH_COM8_PATHA;
  2138. } else if (tx_path == RF_B) {
  2139. path_com[0].data = AX_PATH_COM0_PATHB;
  2140. path_com[1].data = AX_PATH_COM1_PATHB;
  2141. path_com[2].data = AX_PATH_COM2_PATHB;
  2142. path_com[7].data = AX_PATH_COM7_PATHB;
  2143. path_com[8].data = AX_PATH_COM8_PATHB;
  2144. } else if (tx_path == RF_AB) {
  2145. path_com[0].data = AX_PATH_COM0_PATHAB;
  2146. path_com[1].data = AX_PATH_COM1_PATHAB;
  2147. path_com[2].data = AX_PATH_COM2_PATHAB;
  2148. path_com[7].data = AX_PATH_COM7_PATHAB;
  2149. path_com[8].data = AX_PATH_COM8_PATHAB;
  2150. } else {
  2151. rtw89_warn(rtwdev, "[Invalid Tx Path]Tx Path: %d\n", tx_path);
  2152. return;
  2153. }
  2154. for (i = 0; i < cr_size; i++) {
  2155. rtw89_debug(rtwdev, RTW89_DBG_TSSI, "0x%x = 0x%x\n",
  2156. path_com[i].addr, path_com[i].data);
  2157. reg = rtw89_mac_reg_by_idx(path_com[i].addr, mac_idx);
  2158. rtw89_write32(rtwdev, reg, path_com[i].data);
  2159. }
  2160. }
  2161. static void rtw8852c_bb_ctrl_btc_preagc(struct rtw89_dev *rtwdev, bool bt_en)
  2162. {
  2163. if (bt_en) {
  2164. rtw89_phy_write32_mask(rtwdev, R_PATH0_FRC_FIR_TYPE_V1,
  2165. B_PATH0_FRC_FIR_TYPE_MSK_V1, 0x3);
  2166. rtw89_phy_write32_mask(rtwdev, R_PATH1_FRC_FIR_TYPE_V1,
  2167. B_PATH1_FRC_FIR_TYPE_MSK_V1, 0x3);
  2168. rtw89_phy_write32_mask(rtwdev, R_PATH0_RXBB_V1,
  2169. B_PATH0_RXBB_MSK_V1, 0xf);
  2170. rtw89_phy_write32_mask(rtwdev, R_PATH1_RXBB_V1,
  2171. B_PATH1_RXBB_MSK_V1, 0xf);
  2172. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_LNA6_OP1DB_V1,
  2173. B_PATH0_G_LNA6_OP1DB_V1, 0x80);
  2174. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_LNA6_OP1DB_V1,
  2175. B_PATH1_G_LNA6_OP1DB_V1, 0x80);
  2176. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA0_LNA6_OP1DB_V1,
  2177. B_PATH0_G_TIA0_LNA6_OP1DB_V1, 0x80);
  2178. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA1_LNA6_OP1DB_V1,
  2179. B_PATH0_G_TIA1_LNA6_OP1DB_V1, 0x80);
  2180. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA0_LNA6_OP1DB_V1,
  2181. B_PATH1_G_TIA0_LNA6_OP1DB_V1, 0x80);
  2182. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA1_LNA6_OP1DB_V1,
  2183. B_PATH1_G_TIA1_LNA6_OP1DB_V1, 0x80);
  2184. rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_BACKOFF_V1,
  2185. B_PATH0_BT_BACKOFF_V1, 0x780D1E);
  2186. rtw89_phy_write32_mask(rtwdev, R_PATH1_BT_BACKOFF_V1,
  2187. B_PATH1_BT_BACKOFF_V1, 0x780D1E);
  2188. rtw89_phy_write32_mask(rtwdev, R_P0_BACKOFF_IBADC_V1,
  2189. B_P0_BACKOFF_IBADC_V1, 0x34);
  2190. rtw89_phy_write32_mask(rtwdev, R_P1_BACKOFF_IBADC_V1,
  2191. B_P1_BACKOFF_IBADC_V1, 0x34);
  2192. } else {
  2193. rtw89_phy_write32_mask(rtwdev, R_PATH0_FRC_FIR_TYPE_V1,
  2194. B_PATH0_FRC_FIR_TYPE_MSK_V1, 0x0);
  2195. rtw89_phy_write32_mask(rtwdev, R_PATH1_FRC_FIR_TYPE_V1,
  2196. B_PATH1_FRC_FIR_TYPE_MSK_V1, 0x0);
  2197. rtw89_phy_write32_mask(rtwdev, R_PATH0_RXBB_V1,
  2198. B_PATH0_RXBB_MSK_V1, 0x60);
  2199. rtw89_phy_write32_mask(rtwdev, R_PATH1_RXBB_V1,
  2200. B_PATH1_RXBB_MSK_V1, 0x60);
  2201. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_LNA6_OP1DB_V1,
  2202. B_PATH0_G_LNA6_OP1DB_V1, 0x1a);
  2203. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_LNA6_OP1DB_V1,
  2204. B_PATH1_G_LNA6_OP1DB_V1, 0x1a);
  2205. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA0_LNA6_OP1DB_V1,
  2206. B_PATH0_G_TIA0_LNA6_OP1DB_V1, 0x2a);
  2207. rtw89_phy_write32_mask(rtwdev, R_PATH0_G_TIA1_LNA6_OP1DB_V1,
  2208. B_PATH0_G_TIA1_LNA6_OP1DB_V1, 0x2a);
  2209. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA0_LNA6_OP1DB_V1,
  2210. B_PATH1_G_TIA0_LNA6_OP1DB_V1, 0x2a);
  2211. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA1_LNA6_OP1DB_V1,
  2212. B_PATH1_G_TIA1_LNA6_OP1DB_V1, 0x2a);
  2213. rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_BACKOFF_V1,
  2214. B_PATH0_BT_BACKOFF_V1, 0x79E99E);
  2215. rtw89_phy_write32_mask(rtwdev, R_PATH1_BT_BACKOFF_V1,
  2216. B_PATH1_BT_BACKOFF_V1, 0x79E99E);
  2217. rtw89_phy_write32_mask(rtwdev, R_P0_BACKOFF_IBADC_V1,
  2218. B_P0_BACKOFF_IBADC_V1, 0x26);
  2219. rtw89_phy_write32_mask(rtwdev, R_P1_BACKOFF_IBADC_V1,
  2220. B_P1_BACKOFF_IBADC_V1, 0x26);
  2221. }
  2222. }
  2223. static void rtw8852c_bb_cfg_txrx_path(struct rtw89_dev *rtwdev)
  2224. {
  2225. struct rtw89_hal *hal = &rtwdev->hal;
  2226. rtw8852c_bb_cfg_rx_path(rtwdev, RF_PATH_AB);
  2227. if (hal->rx_nss == 1) {
  2228. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT, B_RXHT_MCS_LIMIT, 0);
  2229. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT, B_RXVHT_MCS_LIMIT, 0);
  2230. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 0);
  2231. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 0);
  2232. } else {
  2233. rtw89_phy_write32_mask(rtwdev, R_RXHT_MCS_LIMIT, B_RXHT_MCS_LIMIT, 1);
  2234. rtw89_phy_write32_mask(rtwdev, R_RXVHT_MCS_LIMIT, B_RXVHT_MCS_LIMIT, 1);
  2235. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHE_MAX_NSS, 1);
  2236. rtw89_phy_write32_mask(rtwdev, R_RXHE, B_RXHETB_MAX_NSS, 1);
  2237. }
  2238. }
  2239. static u8 rtw8852c_get_thermal(struct rtw89_dev *rtwdev, enum rtw89_rf_path rf_path)
  2240. {
  2241. rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
  2242. rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x0);
  2243. rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
  2244. fsleep(200);
  2245. return rtw89_read_rf(rtwdev, rf_path, RR_TM, RR_TM_VAL);
  2246. }
  2247. static void rtw8852c_btc_set_rfe(struct rtw89_dev *rtwdev)
  2248. {
  2249. struct rtw89_btc *btc = &rtwdev->btc;
  2250. struct rtw89_btc_module *module = &btc->mdinfo;
  2251. module->rfe_type = rtwdev->efuse.rfe_type;
  2252. module->cv = rtwdev->hal.cv;
  2253. module->bt_solo = 0;
  2254. module->switch_type = BTC_SWITCH_INTERNAL;
  2255. if (module->rfe_type > 0)
  2256. module->ant.num = (module->rfe_type % 2 ? 2 : 3);
  2257. else
  2258. module->ant.num = 2;
  2259. module->ant.diversity = 0;
  2260. module->ant.isolation = 10;
  2261. if (module->ant.num == 3) {
  2262. module->ant.type = BTC_ANT_DEDICATED;
  2263. module->bt_pos = BTC_BT_ALONE;
  2264. } else {
  2265. module->ant.type = BTC_ANT_SHARED;
  2266. module->bt_pos = BTC_BT_BTG;
  2267. }
  2268. }
  2269. static void rtw8852c_ctrl_btg(struct rtw89_dev *rtwdev, bool btg)
  2270. {
  2271. if (btg) {
  2272. rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_SHARE_V1,
  2273. B_PATH0_BT_SHARE_V1, 0x1);
  2274. rtw89_phy_write32_mask(rtwdev, R_PATH0_BTG_PATH_V1,
  2275. B_PATH0_BTG_PATH_V1, 0x0);
  2276. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_LNA6_OP1DB_V1,
  2277. B_PATH1_G_LNA6_OP1DB_V1, 0x20);
  2278. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA0_LNA6_OP1DB_V1,
  2279. B_PATH1_G_TIA0_LNA6_OP1DB_V1, 0x30);
  2280. rtw89_phy_write32_mask(rtwdev, R_PATH1_BT_SHARE_V1,
  2281. B_PATH1_BT_SHARE_V1, 0x1);
  2282. rtw89_phy_write32_mask(rtwdev, R_PATH1_BTG_PATH_V1,
  2283. B_PATH1_BTG_PATH_V1, 0x1);
  2284. rtw89_phy_write32_mask(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0x0);
  2285. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD, B_BT_SHARE, 0x1);
  2286. rtw89_phy_write32_mask(rtwdev, R_FC0_BW, B_ANT_RX_BT_SEG0, 0x2);
  2287. rtw89_phy_write32_mask(rtwdev, R_BT_DYN_DC_EST_EN,
  2288. B_BT_DYN_DC_EST_EN_MSK, 0x1);
  2289. rtw89_phy_write32_mask(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
  2290. 0x1);
  2291. } else {
  2292. rtw89_phy_write32_mask(rtwdev, R_PATH0_BT_SHARE_V1,
  2293. B_PATH0_BT_SHARE_V1, 0x0);
  2294. rtw89_phy_write32_mask(rtwdev, R_PATH0_BTG_PATH_V1,
  2295. B_PATH0_BTG_PATH_V1, 0x0);
  2296. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_LNA6_OP1DB_V1,
  2297. B_PATH1_G_LNA6_OP1DB_V1, 0x1a);
  2298. rtw89_phy_write32_mask(rtwdev, R_PATH1_G_TIA0_LNA6_OP1DB_V1,
  2299. B_PATH1_G_TIA0_LNA6_OP1DB_V1, 0x2a);
  2300. rtw89_phy_write32_mask(rtwdev, R_PATH1_BT_SHARE_V1,
  2301. B_PATH1_BT_SHARE_V1, 0x0);
  2302. rtw89_phy_write32_mask(rtwdev, R_PATH1_BTG_PATH_V1,
  2303. B_PATH1_BTG_PATH_V1, 0x0);
  2304. rtw89_phy_write32_mask(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0xf);
  2305. rtw89_phy_write32_mask(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P2, 0x4);
  2306. rtw89_phy_write32_mask(rtwdev, R_CHBW_MOD, B_BT_SHARE, 0x0);
  2307. rtw89_phy_write32_mask(rtwdev, R_FC0_BW, B_ANT_RX_BT_SEG0, 0x0);
  2308. rtw89_phy_write32_mask(rtwdev, R_BT_DYN_DC_EST_EN,
  2309. B_BT_DYN_DC_EST_EN_MSK, 0x0);
  2310. rtw89_phy_write32_mask(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
  2311. 0x0);
  2312. }
  2313. }
  2314. static
  2315. void rtw8852c_set_trx_mask(struct rtw89_dev *rtwdev, u8 path, u8 group, u32 val)
  2316. {
  2317. rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0x20000);
  2318. rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, group);
  2319. rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, val);
  2320. rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0x0);
  2321. }
  2322. static void rtw8852c_btc_init_cfg(struct rtw89_dev *rtwdev)
  2323. {
  2324. struct rtw89_btc *btc = &rtwdev->btc;
  2325. struct rtw89_btc_module *module = &btc->mdinfo;
  2326. const struct rtw89_chip_info *chip = rtwdev->chip;
  2327. const struct rtw89_mac_ax_coex coex_params = {
  2328. .pta_mode = RTW89_MAC_AX_COEX_RTK_MODE,
  2329. .direction = RTW89_MAC_AX_COEX_INNER,
  2330. };
  2331. /* PTA init */
  2332. rtw89_mac_coex_init_v1(rtwdev, &coex_params);
  2333. /* set WL Tx response = Hi-Pri */
  2334. chip->ops->btc_set_wl_pri(rtwdev, BTC_PRI_MASK_TX_RESP, true);
  2335. chip->ops->btc_set_wl_pri(rtwdev, BTC_PRI_MASK_BEACON, true);
  2336. /* set rf gnt debug off */
  2337. rtw89_write_rf(rtwdev, RF_PATH_A, RR_WLSEL, RFREG_MASK, 0x0);
  2338. rtw89_write_rf(rtwdev, RF_PATH_B, RR_WLSEL, RFREG_MASK, 0x0);
  2339. /* set WL Tx thru in TRX mask table if GNT_WL = 0 && BT_S1 = ss group */
  2340. if (module->ant.type == BTC_ANT_SHARED) {
  2341. rtw8852c_set_trx_mask(rtwdev,
  2342. RF_PATH_A, BTC_BT_SS_GROUP, 0x5ff);
  2343. rtw8852c_set_trx_mask(rtwdev,
  2344. RF_PATH_B, BTC_BT_SS_GROUP, 0x5ff);
  2345. /* set path-A(S0) Tx/Rx no-mask if GNT_WL=0 && BT_S1=tx group */
  2346. rtw8852c_set_trx_mask(rtwdev,
  2347. RF_PATH_A, BTC_BT_TX_GROUP, 0x5ff);
  2348. } else { /* set WL Tx stb if GNT_WL = 0 && BT_S1 = ss group for 3-ant */
  2349. rtw8852c_set_trx_mask(rtwdev,
  2350. RF_PATH_A, BTC_BT_SS_GROUP, 0x5df);
  2351. rtw8852c_set_trx_mask(rtwdev,
  2352. RF_PATH_B, BTC_BT_SS_GROUP, 0x5df);
  2353. }
  2354. /* set PTA break table */
  2355. rtw89_write32(rtwdev, R_AX_BT_BREAK_TABLE, BTC_BREAK_PARAM);
  2356. /* enable BT counter 0xda10[1:0] = 2b'11 */
  2357. rtw89_write32_set(rtwdev,
  2358. R_AX_BT_CNT_CFG, B_AX_BT_CNT_EN |
  2359. B_AX_BT_CNT_RST_V1);
  2360. btc->cx.wl.status.map.init_ok = true;
  2361. }
  2362. static
  2363. void rtw8852c_btc_set_wl_pri(struct rtw89_dev *rtwdev, u8 map, bool state)
  2364. {
  2365. u32 bitmap = 0;
  2366. u32 reg = 0;
  2367. switch (map) {
  2368. case BTC_PRI_MASK_TX_RESP:
  2369. reg = R_BTC_COEX_WL_REQ;
  2370. bitmap = B_BTC_RSP_ACK_HI;
  2371. break;
  2372. case BTC_PRI_MASK_BEACON:
  2373. reg = R_BTC_COEX_WL_REQ;
  2374. bitmap = B_BTC_TX_BCN_HI;
  2375. break;
  2376. default:
  2377. return;
  2378. }
  2379. if (state)
  2380. rtw89_write32_set(rtwdev, reg, bitmap);
  2381. else
  2382. rtw89_write32_clr(rtwdev, reg, bitmap);
  2383. }
  2384. union rtw8852c_btc_wl_txpwr_ctrl {
  2385. u32 txpwr_val;
  2386. struct {
  2387. union {
  2388. u16 ctrl_all_time;
  2389. struct {
  2390. s16 data:9;
  2391. u16 rsvd:6;
  2392. u16 flag:1;
  2393. } all_time;
  2394. };
  2395. union {
  2396. u16 ctrl_gnt_bt;
  2397. struct {
  2398. s16 data:9;
  2399. u16 rsvd:7;
  2400. } gnt_bt;
  2401. };
  2402. };
  2403. } __packed;
  2404. static void
  2405. rtw8852c_btc_set_wl_txpwr_ctrl(struct rtw89_dev *rtwdev, u32 txpwr_val)
  2406. {
  2407. union rtw8852c_btc_wl_txpwr_ctrl arg = { .txpwr_val = txpwr_val };
  2408. s32 val;
  2409. #define __write_ctrl(_reg, _msk, _val, _en, _cond) \
  2410. do { \
  2411. u32 _wrt = FIELD_PREP(_msk, _val); \
  2412. BUILD_BUG_ON((_msk & _en) != 0); \
  2413. if (_cond) \
  2414. _wrt |= _en; \
  2415. else \
  2416. _wrt &= ~_en; \
  2417. rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, _reg, \
  2418. _msk | _en, _wrt); \
  2419. } while (0)
  2420. switch (arg.ctrl_all_time) {
  2421. case 0xffff:
  2422. val = 0;
  2423. break;
  2424. default:
  2425. val = arg.all_time.data;
  2426. break;
  2427. }
  2428. __write_ctrl(R_AX_PWR_RATE_CTRL, B_AX_FORCE_PWR_BY_RATE_VALUE_MASK,
  2429. val, B_AX_FORCE_PWR_BY_RATE_EN,
  2430. arg.ctrl_all_time != 0xffff);
  2431. switch (arg.ctrl_gnt_bt) {
  2432. case 0xffff:
  2433. val = 0;
  2434. break;
  2435. default:
  2436. val = arg.gnt_bt.data;
  2437. break;
  2438. }
  2439. __write_ctrl(R_AX_PWR_COEXT_CTRL, B_AX_TXAGC_BT_MASK, val,
  2440. B_AX_TXAGC_BT_EN, arg.ctrl_gnt_bt != 0xffff);
  2441. #undef __write_ctrl
  2442. }
  2443. static
  2444. s8 rtw8852c_btc_get_bt_rssi(struct rtw89_dev *rtwdev, s8 val)
  2445. {
  2446. return clamp_t(s8, val, -100, 0) + 100;
  2447. }
  2448. static const struct rtw89_btc_rf_trx_para rtw89_btc_8852c_rf_ul[] = {
  2449. {255, 0, 0, 7}, /* 0 -> original */
  2450. {255, 2, 0, 7}, /* 1 -> for BT-connected ACI issue && BTG co-rx */
  2451. {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
  2452. {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
  2453. {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
  2454. {255, 0, 0, 7}, /* the below id is for non-shared-antenna free-run */
  2455. {6, 1, 0, 7},
  2456. {13, 1, 0, 7},
  2457. {13, 1, 0, 7}
  2458. };
  2459. static const struct rtw89_btc_rf_trx_para rtw89_btc_8852c_rf_dl[] = {
  2460. {255, 0, 0, 7}, /* 0 -> original */
  2461. {255, 2, 0, 7}, /* 1 -> reserved for shared-antenna */
  2462. {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
  2463. {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
  2464. {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
  2465. {255, 0, 0, 7}, /* the below id is for non-shared-antenna free-run */
  2466. {255, 1, 0, 7},
  2467. {255, 1, 0, 7},
  2468. {255, 1, 0, 7}
  2469. };
  2470. static const u8 rtw89_btc_8852c_wl_rssi_thres[BTC_WL_RSSI_THMAX] = {60, 50, 40, 30};
  2471. static const u8 rtw89_btc_8852c_bt_rssi_thres[BTC_BT_RSSI_THMAX] = {40, 36, 31, 28};
  2472. static const struct rtw89_btc_fbtc_mreg rtw89_btc_8852c_mon_reg[] = {
  2473. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda00),
  2474. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda04),
  2475. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda24),
  2476. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda30),
  2477. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda34),
  2478. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda38),
  2479. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda44),
  2480. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda48),
  2481. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xda4c),
  2482. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xd200),
  2483. RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xd220),
  2484. RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x980),
  2485. };
  2486. static
  2487. void rtw8852c_btc_bt_aci_imp(struct rtw89_dev *rtwdev)
  2488. {
  2489. struct rtw89_btc *btc = &rtwdev->btc;
  2490. struct rtw89_btc_dm *dm = &btc->dm;
  2491. struct rtw89_btc_bt_info *bt = &btc->cx.bt;
  2492. struct rtw89_btc_bt_link_info *b = &bt->link_info;
  2493. /* fix LNA2 = level-5 for BT ACI issue at BTG */
  2494. if (btc->dm.wl_btg_rx && b->profile_cnt.now != 0)
  2495. dm->trx_para_level = 1;
  2496. }
  2497. static
  2498. void rtw8852c_btc_update_bt_cnt(struct rtw89_dev *rtwdev)
  2499. {
  2500. /* Feature move to firmware */
  2501. }
  2502. static
  2503. void rtw8852c_btc_wl_s1_standby(struct rtw89_dev *rtwdev, bool state)
  2504. {
  2505. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000);
  2506. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
  2507. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x620);
  2508. /* set WL standby = Rx for GNT_BT_Tx = 1->0 settle issue */
  2509. if (state)
  2510. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0,
  2511. RFREG_MASK, 0x179c);
  2512. else
  2513. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0,
  2514. RFREG_MASK, 0x208);
  2515. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x0);
  2516. }
  2517. static void rtw8852c_set_wl_lna2(struct rtw89_dev *rtwdev, u8 level)
  2518. {
  2519. /* level=0 Default: TIA 1/0= (LNA2,TIAN6) = (7,1)/(5,1) = 21dB/12dB
  2520. * level=1 Fix LNA2=5: TIA 1/0= (LNA2,TIAN6) = (5,0)/(5,1) = 18dB/12dB
  2521. * To improve BT ACI in co-rx
  2522. */
  2523. switch (level) {
  2524. case 0: /* default */
  2525. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x1000);
  2526. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x0);
  2527. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x15);
  2528. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
  2529. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x17);
  2530. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x2);
  2531. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x15);
  2532. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x3);
  2533. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x17);
  2534. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x0);
  2535. break;
  2536. case 1: /* Fix LNA2=5 */
  2537. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x1000);
  2538. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x0);
  2539. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x15);
  2540. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
  2541. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x5);
  2542. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x2);
  2543. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x15);
  2544. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x3);
  2545. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x5);
  2546. rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x0);
  2547. break;
  2548. }
  2549. }
  2550. static void rtw8852c_btc_set_wl_rx_gain(struct rtw89_dev *rtwdev, u32 level)
  2551. {
  2552. switch (level) {
  2553. case 0: /* original */
  2554. rtw8852c_bb_ctrl_btc_preagc(rtwdev, false);
  2555. rtw8852c_set_wl_lna2(rtwdev, 0);
  2556. break;
  2557. case 1: /* for FDD free-run */
  2558. rtw8852c_bb_ctrl_btc_preagc(rtwdev, true);
  2559. rtw8852c_set_wl_lna2(rtwdev, 0);
  2560. break;
  2561. case 2: /* for BTG Co-Rx*/
  2562. rtw8852c_bb_ctrl_btc_preagc(rtwdev, false);
  2563. rtw8852c_set_wl_lna2(rtwdev, 1);
  2564. break;
  2565. }
  2566. }
  2567. static void rtw8852c_fill_freq_with_ppdu(struct rtw89_dev *rtwdev,
  2568. struct rtw89_rx_phy_ppdu *phy_ppdu,
  2569. struct ieee80211_rx_status *status)
  2570. {
  2571. u8 chan_idx = phy_ppdu->chan_idx;
  2572. enum nl80211_band band;
  2573. u8 ch;
  2574. if (chan_idx == 0)
  2575. return;
  2576. rtw8852c_decode_chan_idx(rtwdev, chan_idx, &ch, &band);
  2577. status->freq = ieee80211_channel_to_frequency(ch, band);
  2578. status->band = band;
  2579. }
  2580. static void rtw8852c_query_ppdu(struct rtw89_dev *rtwdev,
  2581. struct rtw89_rx_phy_ppdu *phy_ppdu,
  2582. struct ieee80211_rx_status *status)
  2583. {
  2584. u8 path;
  2585. u8 *rx_power = phy_ppdu->rssi;
  2586. status->signal = RTW89_RSSI_RAW_TO_DBM(max(rx_power[RF_PATH_A], rx_power[RF_PATH_B]));
  2587. for (path = 0; path < rtwdev->chip->rf_path_num; path++) {
  2588. status->chains |= BIT(path);
  2589. status->chain_signal[path] = RTW89_RSSI_RAW_TO_DBM(rx_power[path]);
  2590. }
  2591. if (phy_ppdu->valid)
  2592. rtw8852c_fill_freq_with_ppdu(rtwdev, phy_ppdu, status);
  2593. }
  2594. static int rtw8852c_mac_enable_bb_rf(struct rtw89_dev *rtwdev)
  2595. {
  2596. int ret;
  2597. rtw89_write8_set(rtwdev, R_AX_SYS_FUNC_EN,
  2598. B_AX_FEN_BBRSTB | B_AX_FEN_BB_GLB_RSTN);
  2599. rtw89_write32_set(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
  2600. rtw89_write32_clr(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
  2601. rtw89_write32_set(rtwdev, R_AX_WLRF_CTRL, B_AX_AFC_AFEDIG);
  2602. rtw89_write32_mask(rtwdev, R_AX_AFE_OFF_CTRL1, B_AX_S0_LDO_VSEL_F_MASK, 0x1);
  2603. rtw89_write32_mask(rtwdev, R_AX_AFE_OFF_CTRL1, B_AX_S1_LDO_VSEL_F_MASK, 0x1);
  2604. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL0, 0x7, FULL_BIT_MASK);
  2605. if (ret)
  2606. return ret;
  2607. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x6c, FULL_BIT_MASK);
  2608. if (ret)
  2609. return ret;
  2610. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xc7, FULL_BIT_MASK);
  2611. if (ret)
  2612. return ret;
  2613. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xc7, FULL_BIT_MASK);
  2614. if (ret)
  2615. return ret;
  2616. ret = rtw89_mac_write_xtal_si(rtwdev, XTAL3, 0xd, FULL_BIT_MASK);
  2617. if (ret)
  2618. return ret;
  2619. return 0;
  2620. }
  2621. static int rtw8852c_mac_disable_bb_rf(struct rtw89_dev *rtwdev)
  2622. {
  2623. rtw89_write8_clr(rtwdev, R_AX_SYS_FUNC_EN,
  2624. B_AX_FEN_BBRSTB | B_AX_FEN_BB_GLB_RSTN);
  2625. return 0;
  2626. }
  2627. static const struct rtw89_chip_ops rtw8852c_chip_ops = {
  2628. .enable_bb_rf = rtw8852c_mac_enable_bb_rf,
  2629. .disable_bb_rf = rtw8852c_mac_disable_bb_rf,
  2630. .bb_reset = rtw8852c_bb_reset,
  2631. .bb_sethw = rtw8852c_bb_sethw,
  2632. .read_rf = rtw89_phy_read_rf_v1,
  2633. .write_rf = rtw89_phy_write_rf_v1,
  2634. .set_channel = rtw8852c_set_channel,
  2635. .set_channel_help = rtw8852c_set_channel_help,
  2636. .read_efuse = rtw8852c_read_efuse,
  2637. .read_phycap = rtw8852c_read_phycap,
  2638. .fem_setup = NULL,
  2639. .rfk_init = rtw8852c_rfk_init,
  2640. .rfk_channel = rtw8852c_rfk_channel,
  2641. .rfk_band_changed = rtw8852c_rfk_band_changed,
  2642. .rfk_scan = rtw8852c_rfk_scan,
  2643. .rfk_track = rtw8852c_rfk_track,
  2644. .power_trim = rtw8852c_power_trim,
  2645. .set_txpwr = rtw8852c_set_txpwr,
  2646. .set_txpwr_ctrl = rtw8852c_set_txpwr_ctrl,
  2647. .init_txpwr_unit = rtw8852c_init_txpwr_unit,
  2648. .get_thermal = rtw8852c_get_thermal,
  2649. .ctrl_btg = rtw8852c_ctrl_btg,
  2650. .query_ppdu = rtw8852c_query_ppdu,
  2651. .bb_ctrl_btc_preagc = rtw8852c_bb_ctrl_btc_preagc,
  2652. .cfg_txrx_path = rtw8852c_bb_cfg_txrx_path,
  2653. .set_txpwr_ul_tb_offset = rtw8852c_set_txpwr_ul_tb_offset,
  2654. .pwr_on_func = rtw8852c_pwr_on_func,
  2655. .pwr_off_func = rtw8852c_pwr_off_func,
  2656. .fill_txdesc = rtw89_core_fill_txdesc_v1,
  2657. .fill_txdesc_fwcmd = rtw89_core_fill_txdesc_fwcmd_v1,
  2658. .cfg_ctrl_path = rtw89_mac_cfg_ctrl_path_v1,
  2659. .mac_cfg_gnt = rtw89_mac_cfg_gnt_v1,
  2660. .stop_sch_tx = rtw89_mac_stop_sch_tx_v1,
  2661. .resume_sch_tx = rtw89_mac_resume_sch_tx_v1,
  2662. .h2c_dctl_sec_cam = rtw89_fw_h2c_dctl_sec_cam_v1,
  2663. .btc_set_rfe = rtw8852c_btc_set_rfe,
  2664. .btc_init_cfg = rtw8852c_btc_init_cfg,
  2665. .btc_set_wl_pri = rtw8852c_btc_set_wl_pri,
  2666. .btc_set_wl_txpwr_ctrl = rtw8852c_btc_set_wl_txpwr_ctrl,
  2667. .btc_get_bt_rssi = rtw8852c_btc_get_bt_rssi,
  2668. .btc_bt_aci_imp = rtw8852c_btc_bt_aci_imp,
  2669. .btc_update_bt_cnt = rtw8852c_btc_update_bt_cnt,
  2670. .btc_wl_s1_standby = rtw8852c_btc_wl_s1_standby,
  2671. .btc_set_wl_rx_gain = rtw8852c_btc_set_wl_rx_gain,
  2672. .btc_set_policy = rtw89_btc_set_policy_v1,
  2673. };
  2674. const struct rtw89_chip_info rtw8852c_chip_info = {
  2675. .chip_id = RTL8852C,
  2676. .ops = &rtw8852c_chip_ops,
  2677. .fw_name = "rtw89/rtw8852c_fw.bin",
  2678. .fifo_size = 458752,
  2679. .dle_scc_rsvd_size = 0,
  2680. .max_amsdu_limit = 8000,
  2681. .dis_2g_40m_ul_ofdma = false,
  2682. .rsvd_ple_ofst = 0x6f800,
  2683. .hfc_param_ini = rtw8852c_hfc_param_ini_pcie,
  2684. .dle_mem = rtw8852c_dle_mem_pcie,
  2685. .rf_base_addr = {0xe000, 0xf000},
  2686. .pwr_on_seq = NULL,
  2687. .pwr_off_seq = NULL,
  2688. .bb_table = &rtw89_8852c_phy_bb_table,
  2689. .bb_gain_table = &rtw89_8852c_phy_bb_gain_table,
  2690. .rf_table = {&rtw89_8852c_phy_radiob_table,
  2691. &rtw89_8852c_phy_radioa_table,},
  2692. .nctl_table = &rtw89_8852c_phy_nctl_table,
  2693. .byr_table = &rtw89_8852c_byr_table,
  2694. .txpwr_lmt_2g = &rtw89_8852c_txpwr_lmt_2g,
  2695. .txpwr_lmt_5g = &rtw89_8852c_txpwr_lmt_5g,
  2696. .txpwr_lmt_6g = &rtw89_8852c_txpwr_lmt_6g,
  2697. .txpwr_lmt_ru_2g = &rtw89_8852c_txpwr_lmt_ru_2g,
  2698. .txpwr_lmt_ru_5g = &rtw89_8852c_txpwr_lmt_ru_5g,
  2699. .txpwr_lmt_ru_6g = &rtw89_8852c_txpwr_lmt_ru_6g,
  2700. .txpwr_factor_rf = 2,
  2701. .txpwr_factor_mac = 1,
  2702. .dig_table = NULL,
  2703. .dig_regs = &rtw8852c_dig_regs,
  2704. .tssi_dbw_table = &rtw89_8852c_tssi_dbw_table,
  2705. .support_chanctx_num = 1,
  2706. .support_bands = BIT(NL80211_BAND_2GHZ) |
  2707. BIT(NL80211_BAND_5GHZ) |
  2708. BIT(NL80211_BAND_6GHZ),
  2709. .support_bw160 = true,
  2710. .hw_sec_hdr = true,
  2711. .rf_path_num = 2,
  2712. .tx_nss = 2,
  2713. .rx_nss = 2,
  2714. .acam_num = 128,
  2715. .bcam_num = 20,
  2716. .scam_num = 128,
  2717. .bacam_num = 8,
  2718. .bacam_dynamic_num = 8,
  2719. .bacam_v1 = true,
  2720. .sec_ctrl_efuse_size = 4,
  2721. .physical_efuse_size = 1216,
  2722. .logical_efuse_size = 2048,
  2723. .limit_efuse_size = 1280,
  2724. .dav_phy_efuse_size = 96,
  2725. .dav_log_efuse_size = 16,
  2726. .phycap_addr = 0x590,
  2727. .phycap_size = 0x60,
  2728. .para_ver = 0x1,
  2729. .wlcx_desired = 0x06000000,
  2730. .btcx_desired = 0x7,
  2731. .scbd = 0x1,
  2732. .mailbox = 0x1,
  2733. .btc_fwinfo_buf = 1280,
  2734. .fcxbtcrpt_ver = 4,
  2735. .fcxtdma_ver = 3,
  2736. .fcxslots_ver = 1,
  2737. .fcxcysta_ver = 3,
  2738. .fcxstep_ver = 3,
  2739. .fcxnullsta_ver = 2,
  2740. .fcxmreg_ver = 1,
  2741. .fcxgpiodbg_ver = 1,
  2742. .fcxbtver_ver = 1,
  2743. .fcxbtscan_ver = 1,
  2744. .fcxbtafh_ver = 1,
  2745. .fcxbtdevinfo_ver = 1,
  2746. .afh_guard_ch = 6,
  2747. .wl_rssi_thres = rtw89_btc_8852c_wl_rssi_thres,
  2748. .bt_rssi_thres = rtw89_btc_8852c_bt_rssi_thres,
  2749. .rssi_tol = 2,
  2750. .mon_reg_num = ARRAY_SIZE(rtw89_btc_8852c_mon_reg),
  2751. .mon_reg = rtw89_btc_8852c_mon_reg,
  2752. .rf_para_ulink_num = ARRAY_SIZE(rtw89_btc_8852c_rf_ul),
  2753. .rf_para_ulink = rtw89_btc_8852c_rf_ul,
  2754. .rf_para_dlink_num = ARRAY_SIZE(rtw89_btc_8852c_rf_dl),
  2755. .rf_para_dlink = rtw89_btc_8852c_rf_dl,
  2756. .ps_mode_supported = BIT(RTW89_PS_MODE_RFOFF) |
  2757. BIT(RTW89_PS_MODE_CLK_GATED) |
  2758. BIT(RTW89_PS_MODE_PWR_GATED),
  2759. .low_power_hci_modes = BIT(RTW89_PS_MODE_CLK_GATED) |
  2760. BIT(RTW89_PS_MODE_PWR_GATED),
  2761. .h2c_cctl_func_id = H2C_FUNC_MAC_CCTLINFO_UD_V1,
  2762. .hci_func_en_addr = R_AX_HCI_FUNC_EN_V1,
  2763. .h2c_desc_size = sizeof(struct rtw89_rxdesc_short),
  2764. .txwd_body_size = sizeof(struct rtw89_txwd_body_v1),
  2765. .h2c_ctrl_reg = R_AX_H2CREG_CTRL_V1,
  2766. .h2c_regs = rtw8852c_h2c_regs,
  2767. .c2h_ctrl_reg = R_AX_C2HREG_CTRL_V1,
  2768. .c2h_regs = rtw8852c_c2h_regs,
  2769. .page_regs = &rtw8852c_page_regs,
  2770. .dcfo_comp = &rtw8852c_dcfo_comp,
  2771. .dcfo_comp_sft = 5,
  2772. .imr_info = &rtw8852c_imr_info,
  2773. .rrsr_cfgs = &rtw8852c_rrsr_cfgs,
  2774. .dma_ch_mask = 0,
  2775. };
  2776. EXPORT_SYMBOL(rtw8852c_chip_info);
  2777. MODULE_FIRMWARE("rtw89/rtw8852c_fw.bin");
  2778. MODULE_AUTHOR("Realtek Corporation");
  2779. MODULE_DESCRIPTION("Realtek 802.11ax wireless 8852C driver");
  2780. MODULE_LICENSE("Dual BSD/GPL");