1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0) */
2 /* Copyright (C) 2014-2017 aQuantia Corporation. */
4 /* File hw_atl_llh.h: Declarations of bitfield and register access functions for
11 #include "../atl_types.h"
17 /* set global microprocessor semaphore */
18 void hw_atl_reg_glb_cpu_sem_set(struct aq_hw_s
*aq_hw
, u32 glb_cpu_sem
,
21 /* get global microprocessor semaphore */
22 u32
hw_atl_reg_glb_cpu_sem_get(struct aq_hw_s
*aq_hw
, u32 semaphore
);
24 /* set global register reset disable */
25 void hw_atl_glb_glb_reg_res_dis_set(struct aq_hw_s
*aq_hw
, u32 glb_reg_res_dis
);
28 void hw_atl_glb_soft_res_set(struct aq_hw_s
*aq_hw
, u32 soft_res
);
31 u32
hw_atl_glb_soft_res_get(struct aq_hw_s
*aq_hw
);
35 u32
hw_atl_rpb_rx_dma_drop_pkt_cnt_get(struct aq_hw_s
*aq_hw
);
37 /* get rx dma good octet counter lsw */
38 u32
hw_atl_stats_rx_dma_good_octet_counterlsw_get(struct aq_hw_s
*aq_hw
);
40 /* get rx dma good packet counter lsw */
41 u32
hw_atl_stats_rx_dma_good_pkt_counterlsw_get(struct aq_hw_s
*aq_hw
);
43 /* get tx dma good octet counter lsw */
44 u32
hw_atl_stats_tx_dma_good_octet_counterlsw_get(struct aq_hw_s
*aq_hw
);
46 /* get tx dma good packet counter lsw */
47 u32
hw_atl_stats_tx_dma_good_pkt_counterlsw_get(struct aq_hw_s
*aq_hw
);
49 /* get rx dma good octet counter msw */
50 u32
hw_atl_stats_rx_dma_good_octet_countermsw_get(struct aq_hw_s
*aq_hw
);
52 /* get rx dma good packet counter msw */
53 u32
hw_atl_stats_rx_dma_good_pkt_countermsw_get(struct aq_hw_s
*aq_hw
);
55 /* get tx dma good octet counter msw */
56 u32
hw_atl_stats_tx_dma_good_octet_countermsw_get(struct aq_hw_s
*aq_hw
);
58 /* get tx dma good packet counter msw */
59 u32
hw_atl_stats_tx_dma_good_pkt_countermsw_get(struct aq_hw_s
*aq_hw
);
61 /* get msm rx errors counter register */
62 u32
hw_atl_reg_mac_msm_rx_errs_cnt_get(struct aq_hw_s
*aq_hw
);
64 /* get msm rx unicast frames counter register */
65 u32
hw_atl_reg_mac_msm_rx_ucst_frm_cnt_get(struct aq_hw_s
*aq_hw
);
67 /* get msm rx multicast frames counter register */
68 u32
hw_atl_reg_mac_msm_rx_mcst_frm_cnt_get(struct aq_hw_s
*aq_hw
);
70 /* get msm rx broadcast frames counter register */
71 u32
hw_atl_reg_mac_msm_rx_bcst_frm_cnt_get(struct aq_hw_s
*aq_hw
);
73 /* get msm rx broadcast octets counter register 1 */
74 u32
hw_atl_reg_mac_msm_rx_bcst_octets_counter1get(struct aq_hw_s
*aq_hw
);
76 /* get msm rx unicast octets counter register 0 */
77 u32
hw_atl_reg_mac_msm_rx_ucst_octets_counter0get(struct aq_hw_s
*aq_hw
);
79 /* get rx dma statistics counter 7 */
80 u32
hw_atl_reg_rx_dma_stat_counter7get(struct aq_hw_s
*aq_hw
);
82 /* get msm tx errors counter register */
83 u32
hw_atl_reg_mac_msm_tx_errs_cnt_get(struct aq_hw_s
*aq_hw
);
85 /* get msm tx unicast frames counter register */
86 u32
hw_atl_reg_mac_msm_tx_ucst_frm_cnt_get(struct aq_hw_s
*aq_hw
);
88 /* get msm tx multicast frames counter register */
89 u32
hw_atl_reg_mac_msm_tx_mcst_frm_cnt_get(struct aq_hw_s
*aq_hw
);
91 /* get msm tx broadcast frames counter register */
92 u32
hw_atl_reg_mac_msm_tx_bcst_frm_cnt_get(struct aq_hw_s
*aq_hw
);
94 /* get msm tx multicast octets counter register 1 */
95 u32
hw_atl_reg_mac_msm_tx_mcst_octets_counter1get(struct aq_hw_s
*aq_hw
);
97 /* get msm tx broadcast octets counter register 1 */
98 u32
hw_atl_reg_mac_msm_tx_bcst_octets_counter1get(struct aq_hw_s
*aq_hw
);
100 /* get msm tx unicast octets counter register 0 */
101 u32
hw_atl_reg_mac_msm_tx_ucst_octets_counter0get(struct aq_hw_s
*aq_hw
);
103 /* get global mif identification */
104 u32
hw_atl_reg_glb_mif_id_get(struct aq_hw_s
*aq_hw
);
108 /* set interrupt auto mask lsw */
109 void hw_atl_itr_irq_auto_masklsw_set(struct aq_hw_s
*aq_hw
,
110 u32 irq_auto_masklsw
);
112 /* set interrupt mapping enable rx */
113 void hw_atl_itr_irq_map_en_rx_set(struct aq_hw_s
*aq_hw
, u32 irq_map_en_rx
,
116 /* set interrupt mapping enable tx */
117 void hw_atl_itr_irq_map_en_tx_set(struct aq_hw_s
*aq_hw
, u32 irq_map_en_tx
,
120 /* set interrupt mapping rx */
121 void hw_atl_itr_irq_map_rx_set(struct aq_hw_s
*aq_hw
, u32 irq_map_rx
, u32 rx
);
123 /* set interrupt mapping tx */
124 void hw_atl_itr_irq_map_tx_set(struct aq_hw_s
*aq_hw
, u32 irq_map_tx
, u32 tx
);
126 /* set interrupt mask clear lsw */
127 void hw_atl_itr_irq_msk_clearlsw_set(struct aq_hw_s
*aq_hw
,
128 u32 irq_msk_clearlsw
);
130 /* set interrupt mask set lsw */
131 void hw_atl_itr_irq_msk_setlsw_set(struct aq_hw_s
*aq_hw
, u32 irq_msk_setlsw
);
133 /* set interrupt register reset disable */
134 void hw_atl_itr_irq_reg_res_dis_set(struct aq_hw_s
*aq_hw
, u32 irq_reg_res_dis
);
136 /* set interrupt status clear lsw */
137 void hw_atl_itr_irq_status_clearlsw_set(struct aq_hw_s
*aq_hw
,
138 u32 irq_status_clearlsw
);
140 /* get interrupt status lsw */
141 u32
hw_atl_itr_irq_statuslsw_get(struct aq_hw_s
*aq_hw
);
143 /* get reset interrupt */
144 u32
hw_atl_itr_res_irq_get(struct aq_hw_s
*aq_hw
);
146 /* set reset interrupt */
147 void hw_atl_itr_res_irq_set(struct aq_hw_s
*aq_hw
, u32 res_irq
);
152 void hw_atl_rdm_cpu_id_set(struct aq_hw_s
*aq_hw
, u32 cpuid
, u32 dca
);
154 /* set rx dca enable */
155 void hw_atl_rdm_rx_dca_en_set(struct aq_hw_s
*aq_hw
, u32 rx_dca_en
);
157 /* set rx dca mode */
158 void hw_atl_rdm_rx_dca_mode_set(struct aq_hw_s
*aq_hw
, u32 rx_dca_mode
);
160 /* set rx descriptor data buffer size */
161 void hw_atl_rdm_rx_desc_data_buff_size_set(struct aq_hw_s
*aq_hw
,
162 u32 rx_desc_data_buff_size
,
165 /* set rx descriptor dca enable */
166 void hw_atl_rdm_rx_desc_dca_en_set(struct aq_hw_s
*aq_hw
, u32 rx_desc_dca_en
,
169 /* set rx descriptor enable */
170 void hw_atl_rdm_rx_desc_en_set(struct aq_hw_s
*aq_hw
, u32 rx_desc_en
,
173 /* set rx descriptor header splitting */
174 void hw_atl_rdm_rx_desc_head_splitting_set(struct aq_hw_s
*aq_hw
,
175 u32 rx_desc_head_splitting
,
178 /* get rx descriptor head pointer */
179 u32
hw_atl_rdm_rx_desc_head_ptr_get(struct aq_hw_s
*aq_hw
, u32 descriptor
);
181 /* set rx descriptor length */
182 void hw_atl_rdm_rx_desc_len_set(struct aq_hw_s
*aq_hw
, u32 rx_desc_len
,
185 /* set rx descriptor write-back interrupt enable */
186 void hw_atl_rdm_rx_desc_wr_wb_irq_en_set(struct aq_hw_s
*aq_hw
,
187 u32 rx_desc_wr_wb_irq_en
);
189 /* set rx header dca enable */
190 void hw_atl_rdm_rx_head_dca_en_set(struct aq_hw_s
*aq_hw
, u32 rx_head_dca_en
,
193 /* set rx payload dca enable */
194 void hw_atl_rdm_rx_pld_dca_en_set(struct aq_hw_s
*aq_hw
, u32 rx_pld_dca_en
,
197 /* set rx descriptor header buffer size */
198 void hw_atl_rdm_rx_desc_head_buff_size_set(struct aq_hw_s
*aq_hw
,
199 u32 rx_desc_head_buff_size
,
202 /* set rx descriptor reset */
203 void hw_atl_rdm_rx_desc_res_set(struct aq_hw_s
*aq_hw
, u32 rx_desc_res
,
206 /* Set RDM Interrupt Moderation Enable */
207 void hw_atl_rdm_rdm_intr_moder_en_set(struct aq_hw_s
*aq_hw
,
208 u32 rdm_intr_moder_en
);
212 /* set general interrupt mapping register */
213 void hw_atl_reg_gen_irq_map_set(struct aq_hw_s
*aq_hw
, u32 gen_intr_map
,
216 /* get general interrupt status register */
217 u32
hw_atl_reg_gen_irq_status_get(struct aq_hw_s
*aq_hw
);
219 /* set interrupt global control register */
220 void hw_atl_reg_irq_glb_ctl_set(struct aq_hw_s
*aq_hw
, u32 intr_glb_ctl
);
222 /* set interrupt throttle register */
223 void hw_atl_reg_irq_thr_set(struct aq_hw_s
*aq_hw
, u32 intr_thr
, u32 throttle
);
225 /* set rx dma descriptor base address lsw */
226 void hw_atl_reg_rx_dma_desc_base_addresslswset(struct aq_hw_s
*aq_hw
,
227 u32 rx_dma_desc_base_addrlsw
,
230 /* set rx dma descriptor base address msw */
231 void hw_atl_reg_rx_dma_desc_base_addressmswset(struct aq_hw_s
*aq_hw
,
232 u32 rx_dma_desc_base_addrmsw
,
235 /* get rx dma descriptor status register */
236 u32
hw_atl_reg_rx_dma_desc_status_get(struct aq_hw_s
*aq_hw
, u32 descriptor
);
238 /* set rx dma descriptor tail pointer register */
239 void hw_atl_reg_rx_dma_desc_tail_ptr_set(struct aq_hw_s
*aq_hw
,
240 u32 rx_dma_desc_tail_ptr
,
243 /* set rx filter multicast filter mask register */
244 void hw_atl_reg_rx_flr_mcst_flr_msk_set(struct aq_hw_s
*aq_hw
,
245 u32 rx_flr_mcst_flr_msk
);
247 /* set rx filter multicast filter register */
248 void hw_atl_reg_rx_flr_mcst_flr_set(struct aq_hw_s
*aq_hw
, u32 rx_flr_mcst_flr
,
251 /* set rx filter rss control register 1 */
252 void hw_atl_reg_rx_flr_rss_control1set(struct aq_hw_s
*aq_hw
,
253 u32 rx_flr_rss_control1
);
255 /* Set RX Filter Control Register 2 */
256 void hw_atl_reg_rx_flr_control2_set(struct aq_hw_s
*aq_hw
, u32 rx_flr_control2
);
258 /* Set RX Interrupt Moderation Control Register */
259 void hw_atl_reg_rx_intr_moder_ctrl_set(struct aq_hw_s
*aq_hw
,
260 u32 rx_intr_moderation_ctl
,
263 /* set tx dma debug control */
264 void hw_atl_reg_tx_dma_debug_ctl_set(struct aq_hw_s
*aq_hw
,
265 u32 tx_dma_debug_ctl
);
267 /* set tx dma descriptor base address lsw */
268 void hw_atl_reg_tx_dma_desc_base_addresslswset(struct aq_hw_s
*aq_hw
,
269 u32 tx_dma_desc_base_addrlsw
,
272 /* set tx dma descriptor base address msw */
273 void hw_atl_reg_tx_dma_desc_base_addressmswset(struct aq_hw_s
*aq_hw
,
274 u32 tx_dma_desc_base_addrmsw
,
277 /* set tx dma descriptor tail pointer register */
278 void hw_atl_reg_tx_dma_desc_tail_ptr_set(struct aq_hw_s
*aq_hw
,
279 u32 tx_dma_desc_tail_ptr
,
282 /* Set TX Interrupt Moderation Control Register */
283 void hw_atl_reg_tx_intr_moder_ctrl_set(struct aq_hw_s
*aq_hw
,
284 u32 tx_intr_moderation_ctl
,
287 /* set global microprocessor scratch pad */
288 void hw_atl_reg_glb_cpu_scratch_scp_set(struct aq_hw_s
*aq_hw
,
289 u32 glb_cpu_scratch_scp
,
294 /* set dma system loopback */
295 void hw_atl_rpb_dma_sys_lbk_set(struct aq_hw_s
*aq_hw
, u32 dma_sys_lbk
);
297 /* set rx traffic class mode */
298 void hw_atl_rpb_rpf_rx_traf_class_mode_set(struct aq_hw_s
*aq_hw
,
299 u32 rx_traf_class_mode
);
301 /* get rx traffic class mode */
302 u32
hw_atl_rpb_rpf_rx_traf_class_mode_get(struct aq_hw_s
*aq_hw
);
304 /* set rx buffer enable */
305 void hw_atl_rpb_rx_buff_en_set(struct aq_hw_s
*aq_hw
, u32 rx_buff_en
);
307 /* set rx buffer high threshold (per tc) */
308 void hw_atl_rpb_rx_buff_hi_threshold_per_tc_set(struct aq_hw_s
*aq_hw
,
309 u32 rx_buff_hi_threshold_per_tc
,
312 /* set rx buffer low threshold (per tc) */
313 void hw_atl_rpb_rx_buff_lo_threshold_per_tc_set(struct aq_hw_s
*aq_hw
,
314 u32 rx_buff_lo_threshold_per_tc
,
317 /* set rx flow control mode */
318 void hw_atl_rpb_rx_flow_ctl_mode_set(struct aq_hw_s
*aq_hw
,
319 u32 rx_flow_ctl_mode
);
321 /* set rx packet buffer size (per tc) */
322 void hw_atl_rpb_rx_pkt_buff_size_per_tc_set(struct aq_hw_s
*aq_hw
,
323 u32 rx_pkt_buff_size_per_tc
,
326 /* set rx xoff enable (per tc) */
327 void hw_atl_rpb_rx_xoff_en_per_tc_set(struct aq_hw_s
*aq_hw
,
328 u32 rx_xoff_en_per_tc
,
333 /* set l2 broadcast count threshold */
334 void hw_atl_rpfl2broadcast_count_threshold_set(struct aq_hw_s
*aq_hw
,
335 u32 l2broadcast_count_threshold
);
337 /* set l2 broadcast enable */
338 void hw_atl_rpfl2broadcast_en_set(struct aq_hw_s
*aq_hw
, u32 l2broadcast_en
);
340 /* set l2 broadcast filter action */
341 void hw_atl_rpfl2broadcast_flr_act_set(struct aq_hw_s
*aq_hw
,
342 u32 l2broadcast_flr_act
);
344 /* set l2 multicast filter enable */
345 void hw_atl_rpfl2multicast_flr_en_set(struct aq_hw_s
*aq_hw
,
346 u32 l2multicast_flr_en
,
349 /* set l2 promiscuous mode enable */
350 void hw_atl_rpfl2promiscuous_mode_en_set(struct aq_hw_s
*aq_hw
,
351 u32 l2promiscuous_mode_en
);
353 /* set l2 unicast filter action */
354 void hw_atl_rpfl2unicast_flr_act_set(struct aq_hw_s
*aq_hw
,
355 u32 l2unicast_flr_act
,
358 /* set l2 unicast filter enable */
359 void hw_atl_rpfl2_uc_flr_en_set(struct aq_hw_s
*aq_hw
, u32 l2unicast_flr_en
,
362 /* set l2 unicast destination address lsw */
363 void hw_atl_rpfl2unicast_dest_addresslsw_set(struct aq_hw_s
*aq_hw
,
364 u32 l2unicast_dest_addresslsw
,
367 /* set l2 unicast destination address msw */
368 void hw_atl_rpfl2unicast_dest_addressmsw_set(struct aq_hw_s
*aq_hw
,
369 u32 l2unicast_dest_addressmsw
,
372 /* Set L2 Accept all Multicast packets */
373 void hw_atl_rpfl2_accept_all_mc_packets_set(struct aq_hw_s
*aq_hw
,
374 u32 l2_accept_all_mc_packets
);
376 /* set user-priority tc mapping */
377 void hw_atl_rpf_rpb_user_priority_tc_map_set(struct aq_hw_s
*aq_hw
,
378 u32 user_priority_tc_map
, u32 tc
);
380 /* set rss key address */
381 void hw_atl_rpf_rss_key_addr_set(struct aq_hw_s
*aq_hw
, u32 rss_key_addr
);
383 /* set rss key write data */
384 void hw_atl_rpf_rss_key_wr_data_set(struct aq_hw_s
*aq_hw
, u32 rss_key_wr_data
);
386 /* get rss key write enable */
387 u32
hw_atl_rpf_rss_key_wr_en_get(struct aq_hw_s
*aq_hw
);
389 /* set rss key write enable */
390 void hw_atl_rpf_rss_key_wr_en_set(struct aq_hw_s
*aq_hw
, u32 rss_key_wr_en
);
392 /* set rss redirection table address */
393 void hw_atl_rpf_rss_redir_tbl_addr_set(struct aq_hw_s
*aq_hw
,
394 u32 rss_redir_tbl_addr
);
396 /* set rss redirection table write data */
397 void hw_atl_rpf_rss_redir_tbl_wr_data_set(struct aq_hw_s
*aq_hw
,
398 u32 rss_redir_tbl_wr_data
);
400 /* get rss redirection write enable */
401 u32
hw_atl_rpf_rss_redir_wr_en_get(struct aq_hw_s
*aq_hw
);
403 /* set rss redirection write enable */
404 void hw_atl_rpf_rss_redir_wr_en_set(struct aq_hw_s
*aq_hw
, u32 rss_redir_wr_en
);
406 /* set tpo to rpf system loopback */
407 void hw_atl_rpf_tpo_to_rpf_sys_lbk_set(struct aq_hw_s
*aq_hw
,
408 u32 tpo_to_rpf_sys_lbk
);
410 /* set vlan inner ethertype */
411 void hw_atl_rpf_vlan_inner_etht_set(struct aq_hw_s
*aq_hw
, u32 vlan_inner_etht
);
413 /* set vlan outer ethertype */
414 void hw_atl_rpf_vlan_outer_etht_set(struct aq_hw_s
*aq_hw
, u32 vlan_outer_etht
);
416 /* set vlan promiscuous mode enable */
417 void hw_atl_rpf_vlan_prom_mode_en_set(struct aq_hw_s
*aq_hw
,
418 u32 vlan_prom_mode_en
);
420 /* Set VLAN untagged action */
421 void hw_atl_rpf_vlan_untagged_act_set(struct aq_hw_s
*aq_hw
,
422 u32 vlan_untagged_act
);
424 /* Set VLAN accept untagged packets */
425 void hw_atl_rpf_vlan_accept_untagged_packets_set(struct aq_hw_s
*aq_hw
,
426 u32 vlan_acc_untagged_packets
);
428 /* Set VLAN filter enable */
429 void hw_atl_rpf_vlan_flr_en_set(struct aq_hw_s
*aq_hw
, u32 vlan_flr_en
,
432 /* Set VLAN Filter Action */
433 void hw_atl_rpf_vlan_flr_act_set(struct aq_hw_s
*aq_hw
, u32 vlan_filter_act
,
436 /* Set VLAN ID Filter */
437 void hw_atl_rpf_vlan_id_flr_set(struct aq_hw_s
*aq_hw
, u32 vlan_id_flr
,
440 /* set ethertype filter enable */
441 void hw_atl_rpf_etht_flr_en_set(struct aq_hw_s
*aq_hw
, u32 etht_flr_en
,
444 /* set ethertype user-priority enable */
445 void hw_atl_rpf_etht_user_priority_en_set(struct aq_hw_s
*aq_hw
,
446 u32 etht_user_priority_en
,
449 /* set ethertype rx queue enable */
450 void hw_atl_rpf_etht_rx_queue_en_set(struct aq_hw_s
*aq_hw
,
451 u32 etht_rx_queue_en
,
454 /* set ethertype rx queue */
455 void hw_atl_rpf_etht_rx_queue_set(struct aq_hw_s
*aq_hw
, u32 etht_rx_queue
,
458 /* set ethertype user-priority */
459 void hw_atl_rpf_etht_user_priority_set(struct aq_hw_s
*aq_hw
,
460 u32 etht_user_priority
,
463 /* set ethertype management queue */
464 void hw_atl_rpf_etht_mgt_queue_set(struct aq_hw_s
*aq_hw
, u32 etht_mgt_queue
,
467 /* set ethertype filter action */
468 void hw_atl_rpf_etht_flr_act_set(struct aq_hw_s
*aq_hw
, u32 etht_flr_act
,
471 /* set ethertype filter */
472 void hw_atl_rpf_etht_flr_set(struct aq_hw_s
*aq_hw
, u32 etht_flr
, u32 filter
);
476 /* set ipv4 header checksum offload enable */
477 void hw_atl_rpo_ipv4header_crc_offload_en_set(struct aq_hw_s
*aq_hw
,
478 u32 ipv4header_crc_offload_en
);
480 /* set rx descriptor vlan stripping */
481 void hw_atl_rpo_rx_desc_vlan_stripping_set(struct aq_hw_s
*aq_hw
,
482 u32 rx_desc_vlan_stripping
,
485 /* set tcp/udp checksum offload enable */
486 void hw_atl_rpo_tcp_udp_crc_offload_en_set(struct aq_hw_s
*aq_hw
,
487 u32 tcp_udp_crc_offload_en
);
489 /* Set LRO Patch Optimization Enable. */
490 void hw_atl_rpo_lro_patch_optimization_en_set(struct aq_hw_s
*aq_hw
,
491 u32 lro_patch_optimization_en
);
493 /* Set Large Receive Offload Enable */
494 void hw_atl_rpo_lro_en_set(struct aq_hw_s
*aq_hw
, u32 lro_en
);
496 /* Set LRO Q Sessions Limit */
497 void hw_atl_rpo_lro_qsessions_lim_set(struct aq_hw_s
*aq_hw
,
498 u32 lro_qsessions_lim
);
500 /* Set LRO Total Descriptor Limit */
501 void hw_atl_rpo_lro_total_desc_lim_set(struct aq_hw_s
*aq_hw
,
502 u32 lro_total_desc_lim
);
504 /* Set LRO Min Payload of First Packet */
505 void hw_atl_rpo_lro_min_pay_of_first_pkt_set(struct aq_hw_s
*aq_hw
,
506 u32 lro_min_pld_of_first_pkt
);
508 /* Set LRO Packet Limit */
509 void hw_atl_rpo_lro_pkt_lim_set(struct aq_hw_s
*aq_hw
, u32 lro_packet_lim
);
511 /* Set LRO Max Number of Descriptors */
512 void hw_atl_rpo_lro_max_num_of_descriptors_set(struct aq_hw_s
*aq_hw
,
513 u32 lro_max_desc_num
, u32 lro
);
515 /* Set LRO Time Base Divider */
516 void hw_atl_rpo_lro_time_base_divider_set(struct aq_hw_s
*aq_hw
,
517 u32 lro_time_base_divider
);
519 /*Set LRO Inactive Interval */
520 void hw_atl_rpo_lro_inactive_interval_set(struct aq_hw_s
*aq_hw
,
521 u32 lro_inactive_interval
);
523 /*Set LRO Max Coalescing Interval */
524 void hw_atl_rpo_lro_max_coalescing_interval_set(struct aq_hw_s
*aq_hw
,
525 u32 lro_max_coal_interval
);
529 /* set rx register reset disable */
530 void hw_atl_rx_rx_reg_res_dis_set(struct aq_hw_s
*aq_hw
, u32 rx_reg_res_dis
);
535 void hw_atl_tdm_cpu_id_set(struct aq_hw_s
*aq_hw
, u32 cpuid
, u32 dca
);
537 /* set large send offload enable */
538 void hw_atl_tdm_large_send_offload_en_set(struct aq_hw_s
*aq_hw
,
539 u32 large_send_offload_en
);
541 /* set tx descriptor enable */
542 void hw_atl_tdm_tx_desc_en_set(struct aq_hw_s
*aq_hw
, u32 tx_desc_en
,
545 /* set tx dca enable */
546 void hw_atl_tdm_tx_dca_en_set(struct aq_hw_s
*aq_hw
, u32 tx_dca_en
);
548 /* set tx dca mode */
549 void hw_atl_tdm_tx_dca_mode_set(struct aq_hw_s
*aq_hw
, u32 tx_dca_mode
);
551 /* set tx descriptor dca enable */
552 void hw_atl_tdm_tx_desc_dca_en_set(struct aq_hw_s
*aq_hw
, u32 tx_desc_dca_en
,
555 /* get tx descriptor head pointer */
556 u32
hw_atl_tdm_tx_desc_head_ptr_get(struct aq_hw_s
*aq_hw
, u32 descriptor
);
558 /* set tx descriptor length */
559 void hw_atl_tdm_tx_desc_len_set(struct aq_hw_s
*aq_hw
, u32 tx_desc_len
,
562 /* set tx descriptor write-back interrupt enable */
563 void hw_atl_tdm_tx_desc_wr_wb_irq_en_set(struct aq_hw_s
*aq_hw
,
564 u32 tx_desc_wr_wb_irq_en
);
566 /* set tx descriptor write-back threshold */
567 void hw_atl_tdm_tx_desc_wr_wb_threshold_set(struct aq_hw_s
*aq_hw
,
568 u32 tx_desc_wr_wb_threshold
,
571 /* Set TDM Interrupt Moderation Enable */
572 void hw_atl_tdm_tdm_intr_moder_en_set(struct aq_hw_s
*aq_hw
,
573 u32 tdm_irq_moderation_en
);
576 /* set lso tcp flag of first packet */
577 void hw_atl_thm_lso_tcp_flag_of_first_pkt_set(struct aq_hw_s
*aq_hw
,
578 u32 lso_tcp_flag_of_first_pkt
);
580 /* set lso tcp flag of last packet */
581 void hw_atl_thm_lso_tcp_flag_of_last_pkt_set(struct aq_hw_s
*aq_hw
,
582 u32 lso_tcp_flag_of_last_pkt
);
584 /* set lso tcp flag of middle packet */
585 void hw_atl_thm_lso_tcp_flag_of_middle_pkt_set(struct aq_hw_s
*aq_hw
,
586 u32 lso_tcp_flag_of_middle_pkt
);
590 /* set TX Traffic Class Mode */
591 void hw_atl_rpb_tps_tx_tc_mode_set(struct aq_hw_s
*aq_hw
,
592 u32 tx_traf_class_mode
);
594 /* get TX Traffic Class Mode */
595 u32
hw_atl_rpb_tps_tx_tc_mode_get(struct aq_hw_s
*aq_hw
);
597 /* set tx buffer enable */
598 void hw_atl_tpb_tx_buff_en_set(struct aq_hw_s
*aq_hw
, u32 tx_buff_en
);
600 /* set tx buffer high threshold (per tc) */
601 void hw_atl_tpb_tx_buff_hi_threshold_per_tc_set(struct aq_hw_s
*aq_hw
,
602 u32 tx_buff_hi_threshold_per_tc
,
605 /* set tx buffer low threshold (per tc) */
606 void hw_atl_tpb_tx_buff_lo_threshold_per_tc_set(struct aq_hw_s
*aq_hw
,
607 u32 tx_buff_lo_threshold_per_tc
,
610 /* set tx dma system loopback enable */
611 void hw_atl_tpb_tx_dma_sys_lbk_en_set(struct aq_hw_s
*aq_hw
,
612 u32 tx_dma_sys_lbk_en
);
614 /* set tx packet buffer size (per tc) */
615 void hw_atl_tpb_tx_pkt_buff_size_per_tc_set(struct aq_hw_s
*aq_hw
,
616 u32 tx_pkt_buff_size_per_tc
,
619 /* set tx path pad insert enable */
620 void hw_atl_tpb_tx_path_scp_ins_en_set(struct aq_hw_s
*aq_hw
,
621 u32 tx_path_scp_ins_en
);
625 /* set ipv4 header checksum offload enable */
626 void hw_atl_tpo_ipv4header_crc_offload_en_set(struct aq_hw_s
*aq_hw
,
627 u32 ipv4header_crc_offload_en
);
629 /* set tcp/udp checksum offload enable */
630 void hw_atl_tpo_tcp_udp_crc_offload_en_set(struct aq_hw_s
*aq_hw
,
631 u32 tcp_udp_crc_offload_en
);
633 /* set tx pkt system loopback enable */
634 void hw_atl_tpo_tx_pkt_sys_lbk_en_set(struct aq_hw_s
*aq_hw
,
635 u32 tx_pkt_sys_lbk_en
);
639 /* set tx packet scheduler data arbitration mode */
640 void hw_atl_tps_tx_pkt_shed_data_arb_mode_set(struct aq_hw_s
*aq_hw
,
641 u32 tx_pkt_shed_data_arb_mode
);
643 /* set tx packet scheduler descriptor rate current time reset */
644 void hw_atl_tps_tx_pkt_shed_desc_rate_curr_time_res_set(struct aq_hw_s
*aq_hw
,
647 /* set tx packet scheduler descriptor rate limit */
648 void hw_atl_tps_tx_pkt_shed_desc_rate_lim_set(struct aq_hw_s
*aq_hw
,
649 u32 tx_pkt_shed_desc_rate_lim
);
651 /* set tx packet scheduler descriptor tc arbitration mode */
652 void hw_atl_tps_tx_pkt_shed_desc_tc_arb_mode_set(struct aq_hw_s
*aq_hw
,
655 /* set tx packet scheduler descriptor tc max credit */
656 void hw_atl_tps_tx_pkt_shed_desc_tc_max_credit_set(struct aq_hw_s
*aq_hw
,
660 /* set tx packet scheduler descriptor tc weight */
661 void hw_atl_tps_tx_pkt_shed_desc_tc_weight_set(struct aq_hw_s
*aq_hw
,
662 u32 tx_pkt_shed_desc_tc_weight
,
665 /* set tx packet scheduler descriptor vm arbitration mode */
666 void hw_atl_tps_tx_pkt_shed_desc_vm_arb_mode_set(struct aq_hw_s
*aq_hw
,
669 /* set tx packet scheduler tc data max credit */
670 void hw_atl_tps_tx_pkt_shed_tc_data_max_credit_set(struct aq_hw_s
*aq_hw
,
674 /* set tx packet scheduler tc data weight */
675 void hw_atl_tps_tx_pkt_shed_tc_data_weight_set(struct aq_hw_s
*aq_hw
,
676 u32 tx_pkt_shed_tc_data_weight
,
681 /* set tx register reset disable */
682 void hw_atl_tx_tx_reg_res_dis_set(struct aq_hw_s
*aq_hw
, u32 tx_reg_res_dis
);
686 /* get register access status */
687 u32
hw_atl_msm_reg_access_status_get(struct aq_hw_s
*aq_hw
);
689 /* set register address for indirect address */
690 void hw_atl_msm_reg_addr_for_indirect_addr_set(struct aq_hw_s
*aq_hw
,
691 u32 reg_addr_for_indirect_addr
);
693 /* set register read strobe */
694 void hw_atl_msm_reg_rd_strobe_set(struct aq_hw_s
*aq_hw
, u32 reg_rd_strobe
);
696 /* get register read data */
697 u32
hw_atl_msm_reg_rd_data_get(struct aq_hw_s
*aq_hw
);
699 /* set register write data */
700 void hw_atl_msm_reg_wr_data_set(struct aq_hw_s
*aq_hw
, u32 reg_wr_data
);
702 /* set register write strobe */
703 void hw_atl_msm_reg_wr_strobe_set(struct aq_hw_s
*aq_hw
, u32 reg_wr_strobe
);
707 /* set pci register reset disable */
708 void hw_atl_pci_pci_reg_res_dis_set(struct aq_hw_s
*aq_hw
, u32 pci_reg_res_dis
);
710 /* set uP Force Interrupt */
711 void hw_atl_mcp_up_force_intr_set(struct aq_hw_s
*aq_hw
, u32 up_force_intr
);
714 #endif /* HW_ATL_LLH_H */