Bug Summary

File:dev/ic/ar5008.c
Warning:line 1817, column 2
Value stored to 'ds' is never read

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple amd64-unknown-openbsd7.0 -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name ar5008.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -mrelocation-model static -mframe-pointer=all -relaxed-aliasing -fno-rounding-math -mconstructor-aliases -ffreestanding -mcmodel=kernel -target-cpu x86-64 -target-feature +retpoline-indirect-calls -target-feature +retpoline-indirect-branches -target-feature -sse2 -target-feature -sse -target-feature -3dnow -target-feature -mmx -target-feature +save-args -disable-red-zone -no-implicit-float -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/usr/src/sys/arch/amd64/compile/GENERIC.MP/obj -nostdsysteminc -nobuiltininc -resource-dir /usr/local/lib/clang/13.0.0 -I /usr/src/sys -I /usr/src/sys/arch/amd64/compile/GENERIC.MP/obj -I /usr/src/sys/arch -I /usr/src/sys/dev/pci/drm/include -I /usr/src/sys/dev/pci/drm/include/uapi -I /usr/src/sys/dev/pci/drm/amd/include/asic_reg -I /usr/src/sys/dev/pci/drm/amd/include -I /usr/src/sys/dev/pci/drm/amd/amdgpu -I /usr/src/sys/dev/pci/drm/amd/display -I /usr/src/sys/dev/pci/drm/amd/display/include -I /usr/src/sys/dev/pci/drm/amd/display/dc -I /usr/src/sys/dev/pci/drm/amd/display/amdgpu_dm -I /usr/src/sys/dev/pci/drm/amd/pm/inc -I /usr/src/sys/dev/pci/drm/amd/pm/swsmu -I /usr/src/sys/dev/pci/drm/amd/pm/swsmu/smu11 -I /usr/src/sys/dev/pci/drm/amd/pm/swsmu/smu12 -I /usr/src/sys/dev/pci/drm/amd/pm/powerplay -I /usr/src/sys/dev/pci/drm/amd/pm/powerplay/hwmgr -I /usr/src/sys/dev/pci/drm/amd/pm/powerplay/smumgr -I /usr/src/sys/dev/pci/drm/amd/display/dc/inc -I /usr/src/sys/dev/pci/drm/amd/display/dc/inc/hw -I /usr/src/sys/dev/pci/drm/amd/display/dc/clk_mgr -I /usr/src/sys/dev/pci/drm/amd/display/modules/inc -I /usr/src/sys/dev/pci/drm/amd/display/modules/hdcp -I /usr/src/sys/dev/pci/drm/amd/display/dmub/inc -I /usr/src/sys/dev/pci/drm/i915 -D DDB -D DIAGNOSTIC -D KTRACE -D ACCOUNTING -D KMEMSTATS -D PTRACE -D POOL_DEBUG -D CRYPTO -D SYSVMSG -D SYSVSEM -D SYSVSHM -D UVM_SWAP_ENCRYPT -D FFS -D FFS2 -D FFS_SOFTUPDATES -D UFS_DIRHASH -D QUOTA -D EXT2FS -D MFS -D NFSCLIENT -D NFSSERVER -D CD9660 -D UDF -D MSDOSFS -D FIFO -D FUSE -D SOCKET_SPLICE -D TCP_ECN -D TCP_SIGNATURE -D INET6 -D IPSEC -D PPP_BSDCOMP -D PPP_DEFLATE -D PIPEX -D MROUTING -D MPLS -D BOOT_CONFIG -D USER_PCICONF -D APERTURE -D MTRR -D NTFS -D HIBERNATE -D PCIVERBOSE -D USBVERBOSE -D WSDISPLAY_COMPAT_USL -D WSDISPLAY_COMPAT_RAWKBD -D WSDISPLAY_DEFAULTSCREENS=6 -D X86EMU -D ONEWIREVERBOSE -D MULTIPROCESSOR -D MAXUSERS=80 -D _KERNEL -D CONFIG_DRM_AMD_DC_DCN3_0 -O2 -Wno-pointer-sign -Wno-address-of-packed-member -Wno-constant-conversion -Wno-unused-but-set-variable -Wno-gnu-folding-constant -fdebug-compilation-dir=/usr/src/sys/arch/amd64/compile/GENERIC.MP/obj -ferror-limit 19 -fwrapv -D_RET_PROTECTOR -ret-protector -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -fno-builtin-malloc -fno-builtin-calloc -fno-builtin-realloc -fno-builtin-valloc -fno-builtin-free -fno-builtin-strdup -fno-builtin-strndup -analyzer-output=html -faddrsig -o /usr/obj/sys/arch/amd64/compile/GENERIC.MP/scan-build/2022-01-12-131800-47421-1 -x c /usr/src/sys/dev/ic/ar5008.c
1/* $OpenBSD: ar5008.c,v 1.69 2021/10/11 09:01:05 stsp Exp $ */
2
3/*-
4 * Copyright (c) 2009 Damien Bergamini <damien.bergamini@free.fr>
5 * Copyright (c) 2008-2009 Atheros Communications Inc.
6 *
7 * Permission to use, copy, modify, and/or distribute this software for any
8 * purpose with or without fee is hereby granted, provided that the above
9 * copyright notice and this permission notice appear in all copies.
10 *
11 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
12 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
13 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
14 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
15 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
16 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
17 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
18 */
19
20/*
21 * Driver for Atheros 802.11a/g/n chipsets.
22 * Routines common to AR5008, AR9001 and AR9002 families.
23 */
24
25#include "bpfilter.h"
26
27#include <sys/param.h>
28#include <sys/sockio.h>
29#include <sys/mbuf.h>
30#include <sys/kernel.h>
31#include <sys/socket.h>
32#include <sys/systm.h>
33#include <sys/malloc.h>
34#include <sys/queue.h>
35#include <sys/timeout.h>
36#include <sys/conf.h>
37#include <sys/device.h>
38#include <sys/stdint.h> /* uintptr_t */
39#include <sys/endian.h>
40
41#include <machine/bus.h>
42
43#if NBPFILTER1 > 0
44#include <net/bpf.h>
45#endif
46#include <net/if.h>
47#include <net/if_media.h>
48
49#include <netinet/in.h>
50#include <netinet/if_ether.h>
51
52#include <net80211/ieee80211_var.h>
53#include <net80211/ieee80211_amrr.h>
54#include <net80211/ieee80211_ra.h>
55#include <net80211/ieee80211_radiotap.h>
56
57#include <dev/ic/athnreg.h>
58#include <dev/ic/athnvar.h>
59
60#include <dev/ic/ar5008reg.h>
61
62int ar5008_attach(struct athn_softc *);
63int ar5008_read_eep_word(struct athn_softc *, uint32_t, uint16_t *);
64int ar5008_read_rom(struct athn_softc *);
65void ar5008_swap_rom(struct athn_softc *);
66int ar5008_gpio_read(struct athn_softc *, int);
67void ar5008_gpio_write(struct athn_softc *, int, int);
68void ar5008_gpio_config_input(struct athn_softc *, int);
69void ar5008_gpio_config_output(struct athn_softc *, int, int);
70void ar5008_rfsilent_init(struct athn_softc *);
71int ar5008_dma_alloc(struct athn_softc *);
72void ar5008_dma_free(struct athn_softc *);
73int ar5008_tx_alloc(struct athn_softc *);
74void ar5008_tx_free(struct athn_softc *);
75int ar5008_rx_alloc(struct athn_softc *);
76void ar5008_rx_free(struct athn_softc *);
77void ar5008_rx_enable(struct athn_softc *);
78void ar5008_rx_radiotap(struct athn_softc *, struct mbuf *,
79 struct ar_rx_desc *);
80int ar5008_ccmp_decap(struct athn_softc *, struct mbuf *,
81 struct ieee80211_node *);
82void ar5008_rx_intr(struct athn_softc *);
83int ar5008_tx_process(struct athn_softc *, int);
84void ar5008_tx_intr(struct athn_softc *);
85int ar5008_swba_intr(struct athn_softc *);
86int ar5008_intr(struct athn_softc *);
87int ar5008_ccmp_encap(struct mbuf *, u_int, struct ieee80211_key *);
88int ar5008_tx(struct athn_softc *, struct mbuf *, struct ieee80211_node *,
89 int);
90void ar5008_set_rf_mode(struct athn_softc *, struct ieee80211_channel *);
91int ar5008_rf_bus_request(struct athn_softc *);
92void ar5008_rf_bus_release(struct athn_softc *);
93void ar5008_set_phy(struct athn_softc *, struct ieee80211_channel *,
94 struct ieee80211_channel *);
95void ar5008_set_delta_slope(struct athn_softc *, struct ieee80211_channel *,
96 struct ieee80211_channel *);
97void ar5008_enable_antenna_diversity(struct athn_softc *);
98void ar5008_init_baseband(struct athn_softc *);
99void ar5008_disable_phy(struct athn_softc *);
100void ar5008_init_chains(struct athn_softc *);
101void ar5008_set_rxchains(struct athn_softc *);
102void ar5008_read_noisefloor(struct athn_softc *, int16_t *, int16_t *);
103void ar5008_write_noisefloor(struct athn_softc *, int16_t *, int16_t *);
104int ar5008_get_noisefloor(struct athn_softc *);
105void ar5008_apply_noisefloor(struct athn_softc *);
106void ar5008_bb_load_noisefloor(struct athn_softc *);
107void ar5008_do_noisefloor_calib(struct athn_softc *);
108void ar5008_init_noisefloor_calib(struct athn_softc *);
109void ar5008_do_calib(struct athn_softc *);
110void ar5008_next_calib(struct athn_softc *);
111void ar5008_calib_iq(struct athn_softc *);
112void ar5008_calib_adc_gain(struct athn_softc *);
113void ar5008_calib_adc_dc_off(struct athn_softc *);
114void ar5008_write_txpower(struct athn_softc *, int16_t power[]);
115void ar5008_set_viterbi_mask(struct athn_softc *, int);
116void ar5008_hw_init(struct athn_softc *, struct ieee80211_channel *,
117 struct ieee80211_channel *);
118uint8_t ar5008_get_vpd(uint8_t, const uint8_t *, const uint8_t *, int);
119void ar5008_get_pdadcs(struct athn_softc *, uint8_t, struct athn_pier *,
120 struct athn_pier *, int, int, uint8_t, uint8_t *, uint8_t *);
121void ar5008_get_lg_tpow(struct athn_softc *, struct ieee80211_channel *,
122 uint8_t, const struct ar_cal_target_power_leg *, int, uint8_t[]);
123void ar5008_get_ht_tpow(struct athn_softc *, struct ieee80211_channel *,
124 uint8_t, const struct ar_cal_target_power_ht *, int, uint8_t[]);
125void ar5008_set_noise_immunity_level(struct athn_softc *, int);
126void ar5008_enable_ofdm_weak_signal(struct athn_softc *);
127void ar5008_disable_ofdm_weak_signal(struct athn_softc *);
128void ar5008_set_cck_weak_signal(struct athn_softc *, int);
129void ar5008_set_firstep_level(struct athn_softc *, int);
130void ar5008_set_spur_immunity_level(struct athn_softc *, int);
131
132/* Extern functions. */
133void athn_stop(struct ifnet *, int);
134int athn_interpolate(int, int, int, int, int);
135int athn_txtime(struct athn_softc *, int, int, u_int);
136void athn_inc_tx_trigger_level(struct athn_softc *);
137int athn_tx_pending(struct athn_softc *, int);
138void athn_stop_tx_dma(struct athn_softc *, int);
139void athn_get_delta_slope(uint32_t, uint32_t *, uint32_t *);
140void athn_config_pcie(struct athn_softc *);
141void athn_config_nonpcie(struct athn_softc *);
142uint8_t athn_chan2fbin(struct ieee80211_channel *);
143uint8_t ar5416_get_rf_rev(struct athn_softc *);
144void ar5416_reset_addac(struct athn_softc *, struct ieee80211_channel *);
145void ar5416_rf_reset(struct athn_softc *, struct ieee80211_channel *);
146void ar5416_reset_bb_gain(struct athn_softc *, struct ieee80211_channel *);
147void ar9280_reset_rx_gain(struct athn_softc *, struct ieee80211_channel *);
148void ar9280_reset_tx_gain(struct athn_softc *, struct ieee80211_channel *);
149
150
151int
152ar5008_attach(struct athn_softc *sc)
153{
154 struct athn_ops *ops = &sc->ops;
155 struct ieee80211com *ic = &sc->sc_ic;
156 struct ar_base_eep_header *base;
157 uint8_t eep_ver, kc_entries_log;
158 int error;
159
160 /* Set callbacks for AR5008, AR9001 and AR9002 families. */
161 ops->gpio_read = ar5008_gpio_read;
162 ops->gpio_write = ar5008_gpio_write;
163 ops->gpio_config_input = ar5008_gpio_config_input;
164 ops->gpio_config_output = ar5008_gpio_config_output;
165 ops->rfsilent_init = ar5008_rfsilent_init;
166
167 ops->dma_alloc = ar5008_dma_alloc;
168 ops->dma_free = ar5008_dma_free;
169 ops->rx_enable = ar5008_rx_enable;
170 ops->intr = ar5008_intr;
171 ops->tx = ar5008_tx;
172
173 ops->set_rf_mode = ar5008_set_rf_mode;
174 ops->rf_bus_request = ar5008_rf_bus_request;
175 ops->rf_bus_release = ar5008_rf_bus_release;
176 ops->set_phy = ar5008_set_phy;
177 ops->set_delta_slope = ar5008_set_delta_slope;
178 ops->enable_antenna_diversity = ar5008_enable_antenna_diversity;
179 ops->init_baseband = ar5008_init_baseband;
180 ops->disable_phy = ar5008_disable_phy;
181 ops->set_rxchains = ar5008_set_rxchains;
182 ops->noisefloor_calib = ar5008_do_noisefloor_calib;
183 ops->init_noisefloor_calib = ar5008_init_noisefloor_calib;
184 ops->get_noisefloor = ar5008_get_noisefloor;
185 ops->apply_noisefloor = ar5008_apply_noisefloor;
186 ops->do_calib = ar5008_do_calib;
187 ops->next_calib = ar5008_next_calib;
188 ops->hw_init = ar5008_hw_init;
189
190 ops->set_noise_immunity_level = ar5008_set_noise_immunity_level;
191 ops->enable_ofdm_weak_signal = ar5008_enable_ofdm_weak_signal;
192 ops->disable_ofdm_weak_signal = ar5008_disable_ofdm_weak_signal;
193 ops->set_cck_weak_signal = ar5008_set_cck_weak_signal;
194 ops->set_firstep_level = ar5008_set_firstep_level;
195 ops->set_spur_immunity_level = ar5008_set_spur_immunity_level;
196
197 /* Set MAC registers offsets. */
198 sc->obs_off = AR_OBS0x4080;
199 sc->gpio_input_en_off = AR_GPIO_INPUT_EN_VAL0x4054;
200
201 if (!(sc->flags & ATHN_FLAG_PCIE(1 << 0)))
202 athn_config_nonpcie(sc);
203 else
204 athn_config_pcie(sc);
205
206 /* Read entire ROM content in memory. */
207 if ((error = ar5008_read_rom(sc)) != 0) {
208 printf("%s: could not read ROM\n", sc->sc_dev.dv_xname);
209 return (error);
210 }
211
212 /* Get RF revision. */
213 sc->rf_rev = ar5416_get_rf_rev(sc);
214
215 base = sc->eep;
216 eep_ver = (base->version >> 12) & 0xf;
217 sc->eep_rev = (base->version & 0xfff);
218 if (eep_ver != AR_EEP_VER0xe || sc->eep_rev == 0) {
219 printf("%s: unsupported ROM version %d.%d\n",
220 sc->sc_dev.dv_xname, eep_ver, sc->eep_rev);
221 return (EINVAL22);
222 }
223
224 if (base->opCapFlags & AR_OPFLAGS_11A0x01) {
225 sc->flags |= ATHN_FLAG_11A(1 << 9);
226 if ((base->opCapFlags & AR_OPFLAGS_11N_5G200x10) == 0)
227 sc->flags |= ATHN_FLAG_11N(1 << 11);
228#ifdef notyet
229 if ((base->opCapFlags & AR_OPFLAGS_11N_5G400x04) == 0)
230 sc->flags |= ATHN_FLAG_11N(1 << 11);
231#endif
232 }
233 if (base->opCapFlags & AR_OPFLAGS_11G0x02) {
234 sc->flags |= ATHN_FLAG_11G(1 << 10);
235 if ((base->opCapFlags & AR_OPFLAGS_11N_2G200x20) == 0)
236 sc->flags |= ATHN_FLAG_11N(1 << 11);
237#ifdef notyet
238 if ((base->opCapFlags & AR_OPFLAGS_11N_2G400x08) == 0)
239 sc->flags |= ATHN_FLAG_11N(1 << 11);
240#endif
241 }
242
243 IEEE80211_ADDR_COPY(ic->ic_myaddr, base->macAddr)__builtin_memcpy((ic->ic_myaddr), (base->macAddr), (6));
244
245 /* Check if we have a hardware radio switch. */
246 if (base->rfSilent & AR_EEP_RFSILENT_ENABLED0x0001) {
247 sc->flags |= ATHN_FLAG_RFSILENT(1 << 5);
248 /* Get GPIO pin used by hardware radio switch. */
249 sc->rfsilent_pin = MS(base->rfSilent,(((uint32_t)(base->rfSilent) & 0x001c) >> 2)
250 AR_EEP_RFSILENT_GPIO_SEL)(((uint32_t)(base->rfSilent) & 0x001c) >> 2);
251 /* Get polarity of hardware radio switch. */
252 if (base->rfSilent & AR_EEP_RFSILENT_POLARITY0x0002)
253 sc->flags |= ATHN_FLAG_RFSILENT_REVERSED(1 << 6);
254 }
255
256 /* Get the number of HW key cache entries. */
257 kc_entries_log = MS(base->deviceCap, AR_EEP_DEVCAP_KC_ENTRIES)(((uint32_t)(base->deviceCap) & 0xf000) >> 12);
258 sc->kc_entries = (kc_entries_log != 0) ?
259 1 << kc_entries_log : AR_KEYTABLE_SIZE128;
260 if (sc->kc_entries > AR_KEYTABLE_SIZE128)
261 sc->kc_entries = AR_KEYTABLE_SIZE128;
262
263 sc->txchainmask = base->txMask;
264 if (sc->mac_ver == AR_SREV_VERSION_5416_PCI0x00d &&
265 !(base->opCapFlags & AR_OPFLAGS_11A0x01)) {
266 /* For single-band AR5416 PCI, use GPIO pin 0. */
267 sc->rxchainmask = ar5008_gpio_read(sc, 0) ? 0x5 : 0x7;
268 } else
269 sc->rxchainmask = base->rxMask;
270
271 ops->setup(sc);
272 return (0);
273}
274
275/*
276 * Read 16-bit word from ROM.
277 */
278int
279ar5008_read_eep_word(struct athn_softc *sc, uint32_t addr, uint16_t *val)
280{
281 uint32_t reg;
282 int ntries;
283
284 reg = AR_READ(sc, AR_EEPROM_OFFSET(addr))(sc)->ops.read((sc), ((0x2000 + (addr) * 4)));
285 for (ntries = 0; ntries < 1000; ntries++) {
286 reg = AR_READ(sc, AR_EEPROM_STATUS_DATA)(sc)->ops.read((sc), (0x407c));
287 if (!(reg & (AR_EEPROM_STATUS_DATA_BUSY0x00010000 |
288 AR_EEPROM_STATUS_DATA_PROT_ACCESS0x00040000))) {
289 *val = MS(reg, AR_EEPROM_STATUS_DATA_VAL)(((uint32_t)(reg) & 0x0000ffff) >> 0);
290 return (0);
291 }
292 DELAY(10)(*delay_func)(10);
293 }
294 *val = 0xffff;
295 return (ETIMEDOUT60);
296}
297
298int
299ar5008_read_rom(struct athn_softc *sc)
300{
301 uint32_t addr, end;
302 uint16_t magic, sum, *eep;
303 int need_swap = 0;
304 int error;
305
306 /* Determine ROM endianness. */
307 error = ar5008_read_eep_word(sc, AR_EEPROM_MAGIC_OFFSET0x0000, &magic);
308 if (error != 0)
309 return (error);
310 if (magic != AR_EEPROM_MAGIC0xa55a) {
311 if (magic != swap16(AR_EEPROM_MAGIC)(__uint16_t)(__builtin_constant_p(0xa55a) ? (__uint16_t)(((__uint16_t
)(0xa55a) & 0xffU) << 8 | ((__uint16_t)(0xa55a) &
0xff00U) >> 8) : __swap16md(0xa55a))
) {
312 DPRINTF(("invalid ROM magic 0x%x != 0x%x\n",
313 magic, AR_EEPROM_MAGIC));
314 return (EIO5);
315 }
316 DPRINTF(("non-native ROM endianness\n"));
317 need_swap = 1;
318 }
319
320 /* Allocate space to store ROM in host memory. */
321 sc->eep = malloc(sc->eep_size, M_DEVBUF2, M_NOWAIT0x0002);
322 if (sc->eep == NULL((void *)0))
323 return (ENOMEM12);
324
325 /* Read entire ROM and compute checksum. */
326 sum = 0;
327 eep = sc->eep;
328 end = sc->eep_base + sc->eep_size / sizeof(uint16_t);
329 for (addr = sc->eep_base; addr < end; addr++, eep++) {
330 if ((error = ar5008_read_eep_word(sc, addr, eep)) != 0) {
331 DPRINTF(("could not read ROM at 0x%x\n", addr));
332 return (error);
333 }
334 if (need_swap)
335 *eep = swap16(*eep)(__uint16_t)(__builtin_constant_p(*eep) ? (__uint16_t)(((__uint16_t
)(*eep) & 0xffU) << 8 | ((__uint16_t)(*eep) & 0xff00U
) >> 8) : __swap16md(*eep))
;
336 sum ^= *eep;
337 }
338 if (sum != 0xffff) {
339 printf("%s: bad ROM checksum 0x%04x\n",
340 sc->sc_dev.dv_xname, sum);
341 return (EIO5);
342 }
343 if (need_swap)
344 ar5008_swap_rom(sc);
345
346 return (0);
347}
348
349void
350ar5008_swap_rom(struct athn_softc *sc)
351{
352 struct ar_base_eep_header *base = sc->eep;
353
354 /* Swap common fields first. */
355 base->length = swap16(base->length)(__uint16_t)(__builtin_constant_p(base->length) ? (__uint16_t
)(((__uint16_t)(base->length) & 0xffU) << 8 | ((
__uint16_t)(base->length) & 0xff00U) >> 8) : __swap16md
(base->length))
;
356 base->version = swap16(base->version)(__uint16_t)(__builtin_constant_p(base->version) ? (__uint16_t
)(((__uint16_t)(base->version) & 0xffU) << 8 | (
(__uint16_t)(base->version) & 0xff00U) >> 8) : __swap16md
(base->version))
;
357 base->regDmn[0] = swap16(base->regDmn[0])(__uint16_t)(__builtin_constant_p(base->regDmn[0]) ? (__uint16_t
)(((__uint16_t)(base->regDmn[0]) & 0xffU) << 8 |
((__uint16_t)(base->regDmn[0]) & 0xff00U) >> 8)
: __swap16md(base->regDmn[0]))
;
358 base->regDmn[1] = swap16(base->regDmn[1])(__uint16_t)(__builtin_constant_p(base->regDmn[1]) ? (__uint16_t
)(((__uint16_t)(base->regDmn[1]) & 0xffU) << 8 |
((__uint16_t)(base->regDmn[1]) & 0xff00U) >> 8)
: __swap16md(base->regDmn[1]))
;
359 base->rfSilent = swap16(base->rfSilent)(__uint16_t)(__builtin_constant_p(base->rfSilent) ? (__uint16_t
)(((__uint16_t)(base->rfSilent) & 0xffU) << 8 | (
(__uint16_t)(base->rfSilent) & 0xff00U) >> 8) : __swap16md
(base->rfSilent))
;
360 base->blueToothOptions = swap16(base->blueToothOptions)(__uint16_t)(__builtin_constant_p(base->blueToothOptions) ?
(__uint16_t)(((__uint16_t)(base->blueToothOptions) & 0xffU
) << 8 | ((__uint16_t)(base->blueToothOptions) &
0xff00U) >> 8) : __swap16md(base->blueToothOptions)
)
;
361 base->deviceCap = swap16(base->deviceCap)(__uint16_t)(__builtin_constant_p(base->deviceCap) ? (__uint16_t
)(((__uint16_t)(base->deviceCap) & 0xffU) << 8 |
((__uint16_t)(base->deviceCap) & 0xff00U) >> 8)
: __swap16md(base->deviceCap))
;
362
363 /* Swap device-dependent fields. */
364 sc->ops.swap_rom(sc);
365}
366
367/*
368 * Access to General Purpose Input/Output ports.
369 */
370int
371ar5008_gpio_read(struct athn_softc *sc, int pin)
372{
373 KASSERT(pin < sc->ngpiopins)((pin < sc->ngpiopins) ? (void)0 : __assert("diagnostic "
, "/usr/src/sys/dev/ic/ar5008.c", 373, "pin < sc->ngpiopins"
))
;
374 if ((sc->flags & ATHN_FLAG_USB(1 << 1)) && !AR_SREV_9271(sc)((sc)->mac_ver == 0x140))
375 return (!((AR_READ(sc, AR7010_GPIO_IN)(sc)->ops.read((sc), (0x52004)) >> pin) & 1));
376 return ((AR_READ(sc, AR_GPIO_IN_OUT)(sc)->ops.read((sc), (0x4048)) >> (sc->ngpiopins + pin)) & 1);
377}
378
379void
380ar5008_gpio_write(struct athn_softc *sc, int pin, int set)
381{
382 uint32_t reg;
383
384 KASSERT(pin < sc->ngpiopins)((pin < sc->ngpiopins) ? (void)0 : __assert("diagnostic "
, "/usr/src/sys/dev/ic/ar5008.c", 384, "pin < sc->ngpiopins"
))
;
385
386 if (sc->flags & ATHN_FLAG_USB(1 << 1))
387 set = !set; /* AR9271/AR7010 is reversed. */
388
389 if ((sc->flags & ATHN_FLAG_USB(1 << 1)) && !AR_SREV_9271(sc)((sc)->mac_ver == 0x140)) {
390 /* Special case for AR7010. */
391 reg = AR_READ(sc, AR7010_GPIO_OUT)(sc)->ops.read((sc), (0x52008));
392 if (set)
393 reg |= 1 << pin;
394 else
395 reg &= ~(1 << pin);
396 AR_WRITE(sc, AR7010_GPIO_OUT, reg)(sc)->ops.write((sc), (0x52008), (reg));
397 } else {
398 reg = AR_READ(sc, AR_GPIO_IN_OUT)(sc)->ops.read((sc), (0x4048));
399 if (set)
400 reg |= 1 << pin;
401 else
402 reg &= ~(1 << pin);
403 AR_WRITE(sc, AR_GPIO_IN_OUT, reg)(sc)->ops.write((sc), (0x4048), (reg));
404 }
405 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
406}
407
408void
409ar5008_gpio_config_input(struct athn_softc *sc, int pin)
410{
411 uint32_t reg;
412
413 if ((sc->flags & ATHN_FLAG_USB(1 << 1)) && !AR_SREV_9271(sc)((sc)->mac_ver == 0x140)) {
414 /* Special case for AR7010. */
415 AR_SETBITS(sc, AR7010_GPIO_OE, 1 << pin)(sc)->ops.write((sc), (0x52000), ((sc)->ops.read((sc), (
0x52000)) | (1 << pin)))
;
416 } else {
417 reg = AR_READ(sc, AR_GPIO_OE_OUT)(sc)->ops.read((sc), (0x404c));
418 reg &= ~(AR_GPIO_OE_OUT_DRV_M0x00000003 << (pin * 2));
419 reg |= AR_GPIO_OE_OUT_DRV_NO0 << (pin * 2);
420 AR_WRITE(sc, AR_GPIO_OE_OUT, reg)(sc)->ops.write((sc), (0x404c), (reg));
421 }
422 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
423}
424
425void
426ar5008_gpio_config_output(struct athn_softc *sc, int pin, int type)
427{
428 uint32_t reg;
429 int mux, off;
430
431 if ((sc->flags & ATHN_FLAG_USB(1 << 1)) && !AR_SREV_9271(sc)((sc)->mac_ver == 0x140)) {
432 /* Special case for AR7010. */
433 AR_CLRBITS(sc, AR7010_GPIO_OE, 1 << pin)(sc)->ops.write((sc), (0x52000), ((sc)->ops.read((sc), (
0x52000)) & ~(1 << pin)))
;
434 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
435 return;
436 }
437 mux = pin / 6;
438 off = pin % 6;
439
440 reg = AR_READ(sc, AR_GPIO_OUTPUT_MUX(mux))(sc)->ops.read((sc), ((0x4060 + (mux) * 4)));
441 if (!AR_SREV_9280_20_OR_LATER(sc)((sc)->mac_ver > 0x080 || (((sc)->mac_ver == 0x080) &&
(sc)->mac_rev >= 1))
&& mux == 0)
442 reg = (reg & ~0x1f0) | (reg & 0x1f0) << 1;
443 reg &= ~(0x1f << (off * 5));
444 reg |= (type & 0x1f) << (off * 5);
445 AR_WRITE(sc, AR_GPIO_OUTPUT_MUX(mux), reg)(sc)->ops.write((sc), ((0x4060 + (mux) * 4)), (reg));
446
447 reg = AR_READ(sc, AR_GPIO_OE_OUT)(sc)->ops.read((sc), (0x404c));
448 reg &= ~(AR_GPIO_OE_OUT_DRV_M0x00000003 << (pin * 2));
449 reg |= AR_GPIO_OE_OUT_DRV_ALL3 << (pin * 2);
450 AR_WRITE(sc, AR_GPIO_OE_OUT, reg)(sc)->ops.write((sc), (0x404c), (reg));
451 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
452}
453
454void
455ar5008_rfsilent_init(struct athn_softc *sc)
456{
457 uint32_t reg;
458
459 /* Configure hardware radio switch. */
460 AR_SETBITS(sc, AR_GPIO_INPUT_EN_VAL, AR_GPIO_INPUT_EN_VAL_RFSILENT_BB)(sc)->ops.write((sc), (0x4054), ((sc)->ops.read((sc), (
0x4054)) | (0x00008000)))
;
461 reg = AR_READ(sc, AR_GPIO_INPUT_MUX2)(sc)->ops.read((sc), (0x405c));
462 reg = RW(reg, AR_GPIO_INPUT_MUX2_RFSILENT, 0)(((reg) & ~0x000000f0) | (((uint32_t)(0) << 4) &
0x000000f0))
;
463 AR_WRITE(sc, AR_GPIO_INPUT_MUX2, reg)(sc)->ops.write((sc), (0x405c), (reg));
464 ar5008_gpio_config_input(sc, sc->rfsilent_pin);
465 AR_SETBITS(sc, AR_PHY_TEST, AR_PHY_TEST_RFSILENT_BB)(sc)->ops.write((sc), (0x9800), ((sc)->ops.read((sc), (
0x9800)) | (0x00002000)))
;
466 if (!(sc->flags & ATHN_FLAG_RFSILENT_REVERSED(1 << 6))) {
467 AR_SETBITS(sc, AR_GPIO_INTR_POL,(sc)->ops.write((sc), (0x4050), ((sc)->ops.read((sc), (
0x4050)) | ((1 << (sc->rfsilent_pin)))))
468 AR_GPIO_INTR_POL_PIN(sc->rfsilent_pin))(sc)->ops.write((sc), (0x4050), ((sc)->ops.read((sc), (
0x4050)) | ((1 << (sc->rfsilent_pin)))))
;
469 }
470 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
471}
472
473int
474ar5008_dma_alloc(struct athn_softc *sc)
475{
476 int error;
477
478 error = ar5008_tx_alloc(sc);
479 if (error != 0)
480 return (error);
481
482 error = ar5008_rx_alloc(sc);
483 if (error != 0)
484 return (error);
485
486 return (0);
487}
488
489void
490ar5008_dma_free(struct athn_softc *sc)
491{
492 ar5008_tx_free(sc);
493 ar5008_rx_free(sc);
494}
495
496int
497ar5008_tx_alloc(struct athn_softc *sc)
498{
499 struct athn_tx_buf *bf;
500 bus_size_t size;
501 int error, nsegs, i;
502
503 /*
504 * Allocate a pool of Tx descriptors shared between all Tx queues.
505 */
506 size = ATHN_NTXBUFS64 * AR5008_MAX_SCATTER16 * sizeof(struct ar_tx_desc);
507
508 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,(*(sc->sc_dmat)->_dmamap_create)((sc->sc_dmat), (size
), (1), (size), (0), (0x0001), (&sc->map))
509 BUS_DMA_NOWAIT, &sc->map)(*(sc->sc_dmat)->_dmamap_create)((sc->sc_dmat), (size
), (1), (size), (0), (0x0001), (&sc->map))
;
510 if (error != 0)
511 goto fail;
512
513 error = bus_dmamem_alloc(sc->sc_dmat, size, 4, 0, &sc->seg, 1,(*(sc->sc_dmat)->_dmamem_alloc)((sc->sc_dmat), (size
), (4), (0), (&sc->seg), (1), (&nsegs), (0x0001 | 0x1000
))
514 &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO)(*(sc->sc_dmat)->_dmamem_alloc)((sc->sc_dmat), (size
), (4), (0), (&sc->seg), (1), (&nsegs), (0x0001 | 0x1000
))
;
515 if (error != 0)
516 goto fail;
517
518 error = bus_dmamem_map(sc->sc_dmat, &sc->seg, 1, size,(*(sc->sc_dmat)->_dmamem_map)((sc->sc_dmat), (&sc
->seg), (1), (size), ((caddr_t *)&sc->descs), (0x0001
| 0x0004))
519 (caddr_t *)&sc->descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT)(*(sc->sc_dmat)->_dmamem_map)((sc->sc_dmat), (&sc
->seg), (1), (size), ((caddr_t *)&sc->descs), (0x0001
| 0x0004))
;
520 if (error != 0)
521 goto fail;
522
523 error = bus_dmamap_load_raw(sc->sc_dmat, sc->map, &sc->seg, 1, size,(*(sc->sc_dmat)->_dmamap_load_raw)((sc->sc_dmat), (sc
->map), (&sc->seg), (1), (size), (0x0001))
524 BUS_DMA_NOWAIT)(*(sc->sc_dmat)->_dmamap_load_raw)((sc->sc_dmat), (sc
->map), (&sc->seg), (1), (size), (0x0001))
;
525 if (error != 0)
526 goto fail;
527
528 SIMPLEQ_INIT(&sc->txbufs)do { (&sc->txbufs)->sqh_first = ((void *)0); (&
sc->txbufs)->sqh_last = &(&sc->txbufs)->sqh_first
; } while (0)
;
529 for (i = 0; i < ATHN_NTXBUFS64; i++) {
530 bf = &sc->txpool[i];
531
532 error = bus_dmamap_create(sc->sc_dmat, ATHN_TXBUFSZ,(*(sc->sc_dmat)->_dmamap_create)((sc->sc_dmat), (4096
), (16), (4096), (0), (0x0001), (&bf->bf_map))
533 AR5008_MAX_SCATTER, ATHN_TXBUFSZ, 0, BUS_DMA_NOWAIT,(*(sc->sc_dmat)->_dmamap_create)((sc->sc_dmat), (4096
), (16), (4096), (0), (0x0001), (&bf->bf_map))
534 &bf->bf_map)(*(sc->sc_dmat)->_dmamap_create)((sc->sc_dmat), (4096
), (16), (4096), (0), (0x0001), (&bf->bf_map))
;
535 if (error != 0) {
536 printf("%s: could not create Tx buf DMA map\n",
537 sc->sc_dev.dv_xname);
538 goto fail;
539 }
540
541 bf->bf_descs =
542 &((struct ar_tx_desc *)sc->descs)[i * AR5008_MAX_SCATTER16];
543 bf->bf_daddr = sc->map->dm_segs[0].ds_addr +
544 i * AR5008_MAX_SCATTER16 * sizeof(struct ar_tx_desc);
545
546 SIMPLEQ_INSERT_TAIL(&sc->txbufs, bf, bf_list)do { (bf)->bf_list.sqe_next = ((void *)0); *(&sc->txbufs
)->sqh_last = (bf); (&sc->txbufs)->sqh_last = &
(bf)->bf_list.sqe_next; } while (0)
;
547 }
548 return (0);
549 fail:
550 ar5008_tx_free(sc);
551 return (error);
552}
553
554void
555ar5008_tx_free(struct athn_softc *sc)
556{
557 struct athn_tx_buf *bf;
558 int i;
559
560 for (i = 0; i < ATHN_NTXBUFS64; i++) {
561 bf = &sc->txpool[i];
562
563 if (bf->bf_map != NULL((void *)0))
564 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map)(*(sc->sc_dmat)->_dmamap_destroy)((sc->sc_dmat), (bf
->bf_map))
;
565 }
566 /* Free Tx descriptors. */
567 if (sc->map != NULL((void *)0)) {
568 if (sc->descs != NULL((void *)0)) {
569 bus_dmamap_unload(sc->sc_dmat, sc->map)(*(sc->sc_dmat)->_dmamap_unload)((sc->sc_dmat), (sc->
map))
;
570 bus_dmamem_unmap(sc->sc_dmat, (caddr_t)sc->descs,(*(sc->sc_dmat)->_dmamem_unmap)((sc->sc_dmat), ((caddr_t
)sc->descs), (64 * 16 * sizeof(struct ar_tx_desc)))
571 ATHN_NTXBUFS * AR5008_MAX_SCATTER *(*(sc->sc_dmat)->_dmamem_unmap)((sc->sc_dmat), ((caddr_t
)sc->descs), (64 * 16 * sizeof(struct ar_tx_desc)))
572 sizeof(struct ar_tx_desc))(*(sc->sc_dmat)->_dmamem_unmap)((sc->sc_dmat), ((caddr_t
)sc->descs), (64 * 16 * sizeof(struct ar_tx_desc)))
;
573 bus_dmamem_free(sc->sc_dmat, &sc->seg, 1)(*(sc->sc_dmat)->_dmamem_free)((sc->sc_dmat), (&
sc->seg), (1))
;
574 }
575 bus_dmamap_destroy(sc->sc_dmat, sc->map)(*(sc->sc_dmat)->_dmamap_destroy)((sc->sc_dmat), (sc
->map))
;
576 }
577}
578
579int
580ar5008_rx_alloc(struct athn_softc *sc)
581{
582 struct athn_rxq *rxq = &sc->rxq[0];
583 struct athn_rx_buf *bf;
584 struct ar_rx_desc *ds;
585 bus_size_t size;
586 int error, nsegs, i;
587
588 rxq->bf = mallocarray(ATHN_NRXBUFS64, sizeof(*bf), M_DEVBUF2,
589 M_NOWAIT0x0002 | M_ZERO0x0008);
590 if (rxq->bf == NULL((void *)0))
591 return (ENOMEM12);
592
593 size = ATHN_NRXBUFS64 * sizeof(struct ar_rx_desc);
594
595 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,(*(sc->sc_dmat)->_dmamap_create)((sc->sc_dmat), (size
), (1), (size), (0), (0x0001), (&rxq->map))
596 BUS_DMA_NOWAIT, &rxq->map)(*(sc->sc_dmat)->_dmamap_create)((sc->sc_dmat), (size
), (1), (size), (0), (0x0001), (&rxq->map))
;
597 if (error != 0)
598 goto fail;
599
600 error = bus_dmamem_alloc(sc->sc_dmat, size, 0, 0, &rxq->seg, 1,(*(sc->sc_dmat)->_dmamem_alloc)((sc->sc_dmat), (size
), (0), (0), (&rxq->seg), (1), (&nsegs), (0x0001 |
0x1000))
601 &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO)(*(sc->sc_dmat)->_dmamem_alloc)((sc->sc_dmat), (size
), (0), (0), (&rxq->seg), (1), (&nsegs), (0x0001 |
0x1000))
;
602 if (error != 0)
603 goto fail;
604
605 error = bus_dmamem_map(sc->sc_dmat, &rxq->seg, 1, size,(*(sc->sc_dmat)->_dmamem_map)((sc->sc_dmat), (&rxq
->seg), (1), (size), ((caddr_t *)&rxq->descs), (0x0001
| 0x0004))
606 (caddr_t *)&rxq->descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT)(*(sc->sc_dmat)->_dmamem_map)((sc->sc_dmat), (&rxq
->seg), (1), (size), ((caddr_t *)&rxq->descs), (0x0001
| 0x0004))
;
607 if (error != 0)
608 goto fail;
609
610 error = bus_dmamap_load_raw(sc->sc_dmat, rxq->map, &rxq->seg, 1,(*(sc->sc_dmat)->_dmamap_load_raw)((sc->sc_dmat), (rxq
->map), (&rxq->seg), (1), (size), (0x0001))
611 size, BUS_DMA_NOWAIT)(*(sc->sc_dmat)->_dmamap_load_raw)((sc->sc_dmat), (rxq
->map), (&rxq->seg), (1), (size), (0x0001))
;
612 if (error != 0)
613 goto fail;
614
615 for (i = 0; i < ATHN_NRXBUFS64; i++) {
616 bf = &rxq->bf[i];
617 ds = &((struct ar_rx_desc *)rxq->descs)[i];
618
619 error = bus_dmamap_create(sc->sc_dmat, ATHN_RXBUFSZ, 1,(*(sc->sc_dmat)->_dmamap_create)((sc->sc_dmat), (3872
), (1), (3872), (0), (0x0001 | 0x0002), (&bf->bf_map))
620 ATHN_RXBUFSZ, 0, BUS_DMA_NOWAIT | BUS_DMA_ALLOCNOW,(*(sc->sc_dmat)->_dmamap_create)((sc->sc_dmat), (3872
), (1), (3872), (0), (0x0001 | 0x0002), (&bf->bf_map))
621 &bf->bf_map)(*(sc->sc_dmat)->_dmamap_create)((sc->sc_dmat), (3872
), (1), (3872), (0), (0x0001 | 0x0002), (&bf->bf_map))
;
622 if (error != 0) {
623 printf("%s: could not create Rx buf DMA map\n",
624 sc->sc_dev.dv_xname);
625 goto fail;
626 }
627 /*
628 * Assumes MCLGETL returns cache-line-size aligned buffers.
629 */
630 bf->bf_m = MCLGETL(NULL, M_DONTWAIT, ATHN_RXBUFSZ)m_clget((((void *)0)), (0x0002), (3872));
631 if (bf->bf_m == NULL((void *)0)) {
632 printf("%s: could not allocate Rx mbuf\n",
633 sc->sc_dev.dv_xname);
634 error = ENOBUFS55;
635 goto fail;
636 }
637
638 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,(*(sc->sc_dmat)->_dmamap_load)((sc->sc_dmat), (bf->
bf_map), (((void *)((bf->bf_m)->m_hdr.mh_data))), (3872
), (((void *)0)), (0x0001 | 0x0200))
639 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,(*(sc->sc_dmat)->_dmamap_load)((sc->sc_dmat), (bf->
bf_map), (((void *)((bf->bf_m)->m_hdr.mh_data))), (3872
), (((void *)0)), (0x0001 | 0x0200))
640 BUS_DMA_NOWAIT | BUS_DMA_READ)(*(sc->sc_dmat)->_dmamap_load)((sc->sc_dmat), (bf->
bf_map), (((void *)((bf->bf_m)->m_hdr.mh_data))), (3872
), (((void *)0)), (0x0001 | 0x0200))
;
641 if (error != 0) {
642 printf("%s: could not DMA map Rx buffer\n",
643 sc->sc_dev.dv_xname);
644 goto fail;
645 }
646
647 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (3872), (0x01))
648 BUS_DMASYNC_PREREAD)(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (3872), (0x01))
;
649
650 bf->bf_desc = ds;
651 bf->bf_daddr = rxq->map->dm_segs[0].ds_addr +
652 i * sizeof(struct ar_rx_desc);
653 }
654 return (0);
655 fail:
656 ar5008_rx_free(sc);
657 return (error);
658}
659
660void
661ar5008_rx_free(struct athn_softc *sc)
662{
663 struct athn_rxq *rxq = &sc->rxq[0];
664 struct athn_rx_buf *bf;
665 int i;
666
667 if (rxq->bf == NULL((void *)0))
668 return;
669 for (i = 0; i < ATHN_NRXBUFS64; i++) {
670 bf = &rxq->bf[i];
671
672 if (bf->bf_map != NULL((void *)0))
673 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map)(*(sc->sc_dmat)->_dmamap_destroy)((sc->sc_dmat), (bf
->bf_map))
;
674 m_freem(bf->bf_m);
675 }
676 free(rxq->bf, M_DEVBUF2, 0);
677
678 /* Free Rx descriptors. */
679 if (rxq->map != NULL((void *)0)) {
680 if (rxq->descs != NULL((void *)0)) {
681 bus_dmamap_unload(sc->sc_dmat, rxq->map)(*(sc->sc_dmat)->_dmamap_unload)((sc->sc_dmat), (rxq
->map))
;
682 bus_dmamem_unmap(sc->sc_dmat, (caddr_t)rxq->descs,(*(sc->sc_dmat)->_dmamem_unmap)((sc->sc_dmat), ((caddr_t
)rxq->descs), (64 * sizeof(struct ar_rx_desc)))
683 ATHN_NRXBUFS * sizeof(struct ar_rx_desc))(*(sc->sc_dmat)->_dmamem_unmap)((sc->sc_dmat), ((caddr_t
)rxq->descs), (64 * sizeof(struct ar_rx_desc)))
;
684 bus_dmamem_free(sc->sc_dmat, &rxq->seg, 1)(*(sc->sc_dmat)->_dmamem_free)((sc->sc_dmat), (&
rxq->seg), (1))
;
685 }
686 bus_dmamap_destroy(sc->sc_dmat, rxq->map)(*(sc->sc_dmat)->_dmamap_destroy)((sc->sc_dmat), (rxq
->map))
;
687 }
688}
689
690void
691ar5008_rx_enable(struct athn_softc *sc)
692{
693 struct athn_rxq *rxq = &sc->rxq[0];
694 struct athn_rx_buf *bf;
695 struct ar_rx_desc *ds;
696 int i;
697
698 /* Setup and link Rx descriptors. */
699 SIMPLEQ_INIT(&rxq->head)do { (&rxq->head)->sqh_first = ((void *)0); (&rxq
->head)->sqh_last = &(&rxq->head)->sqh_first
; } while (0)
;
700 rxq->lastds = NULL((void *)0);
701 for (i = 0; i < ATHN_NRXBUFS64; i++) {
702 bf = &rxq->bf[i];
703 ds = bf->bf_desc;
704
705 memset(ds, 0, sizeof(*ds))__builtin_memset((ds), (0), (sizeof(*ds)));
706 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
707 ds->ds_ctl1 = SM(AR_RXC1_BUF_LEN, ATHN_RXBUFSZ)(((uint32_t)(3872) << 0) & 0x00000fff);
708
709 if (rxq->lastds != NULL((void *)0)) {
710 ((struct ar_rx_desc *)rxq->lastds)->ds_link =
711 bf->bf_daddr;
712 }
713 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list)do { (bf)->bf_list.sqe_next = ((void *)0); *(&rxq->
head)->sqh_last = (bf); (&rxq->head)->sqh_last =
&(bf)->bf_list.sqe_next; } while (0)
;
714 rxq->lastds = ds;
715 }
716 bus_dmamap_sync(sc->sc_dmat, rxq->map, 0, rxq->map->dm_mapsize,(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (rxq->
map), (0), (rxq->map->dm_mapsize), (0x01))
717 BUS_DMASYNC_PREREAD)(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (rxq->
map), (0), (rxq->map->dm_mapsize), (0x01))
;
718
719 /* Enable Rx. */
720 AR_WRITE(sc, AR_RXDP, SIMPLEQ_FIRST(&rxq->head)->bf_daddr)(sc)->ops.write((sc), (0x000c), (((&rxq->head)->
sqh_first)->bf_daddr))
;
721 AR_WRITE(sc, AR_CR, AR_CR_RXE)(sc)->ops.write((sc), (0x0008), ((((sc)->mac_ver > 0x1c0
|| (((sc)->mac_ver == 0x1c0) && (sc)->mac_rev >=
2)) ? 0x000c : 0x0004)))
;
722 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
723}
724
725#if NBPFILTER1 > 0
726void
727ar5008_rx_radiotap(struct athn_softc *sc, struct mbuf *m,
728 struct ar_rx_desc *ds)
729{
730#define IEEE80211_RADIOTAP_F_SHORTGI0x80 0x80 /* XXX from FBSD */
731
732 struct athn_rx_radiotap_header *tap = &sc->sc_rxtapsc_rxtapu.th;
733 struct ieee80211com *ic = &sc->sc_ic;
734 uint64_t tsf;
735 uint32_t tstamp;
736 uint8_t rate;
737
738 /* Extend the 15-bit timestamp from Rx descriptor to 64-bit TSF. */
739 tstamp = ds->ds_status2;
740 tsf = AR_READ(sc, AR_TSF_U32)(sc)->ops.read((sc), (0x8050));
741 tsf = tsf << 32 | AR_READ(sc, AR_TSF_L32)(sc)->ops.read((sc), (0x804c));
742 if ((tsf & 0x7fff) < tstamp)
743 tsf -= 0x8000;
744 tsf = (tsf & ~0x7fff) | tstamp;
745
746 tap->wr_flags = IEEE80211_RADIOTAP_F_FCS0x10;
747 tap->wr_tsft = htole64(tsf)((__uint64_t)(tsf));
748 tap->wr_chan_freq = htole16(ic->ic_bss->ni_chan->ic_freq)((__uint16_t)(ic->ic_bss->ni_chan->ic_freq));
749 tap->wr_chan_flags = htole16(ic->ic_bss->ni_chan->ic_flags)((__uint16_t)(ic->ic_bss->ni_chan->ic_flags));
750 tap->wr_dbm_antsignal = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED)(((uint32_t)(ds->ds_status4) & 0xff000000) >> 24
)
;
751 /* XXX noise. */
752 tap->wr_antenna = MS(ds->ds_status3, AR_RXS3_ANTENNA)(((uint32_t)(ds->ds_status3) & 0xffffff00) >> 8);
753 tap->wr_rate = 0; /* In case it can't be found below. */
754 if (AR_SREV_5416_20_OR_LATER(sc)((((sc)->mac_ver == 0x00d || (sc)->mac_ver == 0x00c) &&
(sc)->mac_rev >= 1) || (sc)->mac_ver >= 0x014)
)
755 rate = MS(ds->ds_status0, AR_RXS0_RATE)(((uint32_t)(ds->ds_status0) & 0xff000000) >> 24
)
;
756 else
757 rate = MS(ds->ds_status3, AR_RXS3_RATE)(((uint32_t)(ds->ds_status3) & 0x000003fc) >> 2);
758 if (rate & 0x80) { /* HT. */
759 /* Bit 7 set means HT MCS instead of rate. */
760 tap->wr_rate = rate;
761 if (!(ds->ds_status3 & AR_RXS3_GI0x00000001))
762 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTGI0x80;
763
764 } else if (rate & 0x10) { /* CCK. */
765 if (rate & 0x04)
766 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTPRE0x02;
767 switch (rate & ~0x14) {
768 case 0xb: tap->wr_rate = 2; break;
769 case 0xa: tap->wr_rate = 4; break;
770 case 0x9: tap->wr_rate = 11; break;
771 case 0x8: tap->wr_rate = 22; break;
772 }
773 } else { /* OFDM. */
774 switch (rate) {
775 case 0xb: tap->wr_rate = 12; break;
776 case 0xf: tap->wr_rate = 18; break;
777 case 0xa: tap->wr_rate = 24; break;
778 case 0xe: tap->wr_rate = 36; break;
779 case 0x9: tap->wr_rate = 48; break;
780 case 0xd: tap->wr_rate = 72; break;
781 case 0x8: tap->wr_rate = 96; break;
782 case 0xc: tap->wr_rate = 108; break;
783 }
784 }
785 bpf_mtap_hdr(sc->sc_drvbpf, tap, sc->sc_rxtap_len, m, BPF_DIRECTION_IN(1 << 0));
786}
787#endif
788
789int
790ar5008_ccmp_decap(struct athn_softc *sc, struct mbuf *m, struct ieee80211_node *ni)
791{
792 struct ieee80211com *ic = &sc->sc_ic;
793 struct ieee80211_key *k;
794 struct ieee80211_frame *wh;
795 struct ieee80211_rx_ba *ba;
796 uint64_t pn, *prsc;
797 u_int8_t *ivp;
798 uint8_t tid;
799 int hdrlen, hasqos;
800 uintptr_t entry;
801
802 wh = mtod(m, struct ieee80211_frame *)((struct ieee80211_frame *)((m)->m_hdr.mh_data));
803 hdrlen = ieee80211_get_hdrlen(wh);
804 ivp = mtod(m, u_int8_t *)((u_int8_t *)((m)->m_hdr.mh_data)) + hdrlen;
805
806 /* find key for decryption */
807 k = ieee80211_get_rxkey(ic, m, ni);
808 if (k == NULL((void *)0) || k->k_cipher != IEEE80211_CIPHER_CCMP)
809 return 1;
810
811 /* Sanity checks to ensure this is really a key we installed. */
812 entry = (uintptr_t)k->k_priv;
813 if (k->k_flags & IEEE80211_KEY_GROUP0x00000001) {
814 if (k->k_id >= IEEE80211_WEP_NKID4 ||
815 entry != k->k_id)
816 return 1;
817 } else {
818#ifndef IEEE80211_STA_ONLY
819 if (ic->ic_opmode == IEEE80211_M_HOSTAP) {
820 if (entry != IEEE80211_WEP_NKID4 +
821 IEEE80211_AID(ni->ni_associd)((ni->ni_associd) &~ 0xc000))
822 return 1;
823 } else
824#endif
825 if (entry != IEEE80211_WEP_NKID4)
826 return 1;
827 }
828
829 /* Check that ExtIV bit is set. */
830 if (!(ivp[3] & IEEE80211_WEP_EXTIV0x20))
831 return 1;
832
833 hasqos = ieee80211_has_qos(wh);
834 tid = hasqos ? ieee80211_get_qos(wh) & IEEE80211_QOS_TID0x000f : 0;
835 ba = hasqos ? &ni->ni_rx_ba[tid] : NULL((void *)0);
836 prsc = &k->k_rsc[tid];
837
838 /* Extract the 48-bit PN from the CCMP header. */
839 pn = (uint64_t)ivp[0] |
840 (uint64_t)ivp[1] << 8 |
841 (uint64_t)ivp[4] << 16 |
842 (uint64_t)ivp[5] << 24 |
843 (uint64_t)ivp[6] << 32 |
844 (uint64_t)ivp[7] << 40;
845 if (pn <= *prsc) {
846 ic->ic_stats.is_ccmp_replays++;
847 return 1;
848 }
849 /* Last seen packet number is updated in ieee80211_inputm(). */
850
851 /* Strip MIC. IV will be stripped by ieee80211_inputm(). */
852 m_adj(m, -IEEE80211_CCMP_MICLEN8);
853 return 0;
854}
855
856static __inline int
857ar5008_rx_process(struct athn_softc *sc, struct mbuf_list *ml)
858{
859 struct ieee80211com *ic = &sc->sc_ic;
860 struct ifnet *ifp = &ic->ic_ific_ac.ac_if;
861 struct athn_rxq *rxq = &sc->rxq[0];
862 struct athn_rx_buf *bf, *nbf;
863 struct ar_rx_desc *ds;
864 struct ieee80211_frame *wh;
865 struct ieee80211_rxinfo rxi;
866 struct ieee80211_node *ni;
867 struct mbuf *m, *m1;
868 int error, len, michael_mic_failure = 0;
869
870 bf = SIMPLEQ_FIRST(&rxq->head)((&rxq->head)->sqh_first);
871 if (__predict_false(bf == NULL)__builtin_expect(((bf == ((void *)0)) != 0), 0)) { /* Should not happen. */
872 printf("%s: Rx queue is empty!\n", sc->sc_dev.dv_xname);
873 return (ENOENT2);
874 }
875 ds = bf->bf_desc;
876
877 if (!(ds->ds_status8 & AR_RXS8_DONE0x00000001)) {
878 /*
879 * On some parts, the status words can get corrupted
880 * (including the "done" bit), so we check the next
881 * descriptor "done" bit. If it is set, it is a good
882 * indication that the status words are corrupted, so
883 * we skip this descriptor and drop the frame.
884 */
885 nbf = SIMPLEQ_NEXT(bf, bf_list)((bf)->bf_list.sqe_next);
886 if (nbf != NULL((void *)0) &&
887 (((struct ar_rx_desc *)nbf->bf_desc)->ds_status8 &
888 AR_RXS8_DONE0x00000001)) {
889 DPRINTF(("corrupted descriptor status=0x%x\n",
890 ds->ds_status8));
891 /* HW will not "move" RXDP in this case, so do it. */
892 AR_WRITE(sc, AR_RXDP, nbf->bf_daddr)(sc)->ops.write((sc), (0x000c), (nbf->bf_daddr));
893 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
894 ifp->if_ierrorsif_data.ifi_ierrors++;
895 goto skip;
896 }
897 return (EBUSY16);
898 }
899
900 if (__predict_false(ds->ds_status1 & AR_RXS1_MORE)__builtin_expect(((ds->ds_status1 & 0x00001000) != 0),
0)
) {
901 /* Drop frames that span multiple Rx descriptors. */
902 DPRINTF(("dropping split frame\n"));
903 ifp->if_ierrorsif_data.ifi_ierrors++;
904 goto skip;
905 }
906 if (!(ds->ds_status8 & AR_RXS8_FRAME_OK0x00000002)) {
907 if (ds->ds_status8 & AR_RXS8_CRC_ERR0x00000004)
908 DPRINTFN(6, ("CRC error\n"));
909 else if (ds->ds_status8 & AR_RXS8_PHY_ERR0x00000010)
910 DPRINTFN(6, ("PHY error=0x%x\n",
911 MS(ds->ds_status8, AR_RXS8_PHY_ERR_CODE)));
912 else if (ds->ds_status8 & (AR_RXS8_DECRYPT_CRC_ERR0x00000008 |
913 AR_RXS8_KEY_MISS0x80000000 | AR_RXS8_DECRYPT_BUSY_ERR0x40000000)) {
914 DPRINTFN(6, ("Decryption CRC error\n"));
915 ic->ic_stats.is_ccmp_dec_errs++;
916 } else if (ds->ds_status8 & AR_RXS8_MICHAEL_ERR0x00000020) {
917 DPRINTFN(2, ("Michael MIC failure\n"));
918 michael_mic_failure = 1;
919 }
920 if (!michael_mic_failure) {
921 ifp->if_ierrorsif_data.ifi_ierrors++;
922 goto skip;
923 }
924 } else {
925 if (ds->ds_status8 & (AR_RXS8_CRC_ERR0x00000004 | AR_RXS8_PHY_ERR0x00000010 |
926 AR_RXS8_DECRYPT_CRC_ERR0x00000008 | AR_RXS8_MICHAEL_ERR0x00000020)) {
927 ifp->if_ierrorsif_data.ifi_ierrors++;
928 goto skip;
929 }
930 }
931
932 len = MS(ds->ds_status1, AR_RXS1_DATA_LEN)(((uint32_t)(ds->ds_status1) & 0x00000fff) >> 0);
933 if (__predict_false(len < IEEE80211_MIN_LEN || len > ATHN_RXBUFSZ)__builtin_expect(((len < (sizeof(struct ieee80211_frame_min
) + 4) || len > 3872) != 0), 0)
) {
934 DPRINTF(("corrupted descriptor length=%d\n", len));
935 ifp->if_ierrorsif_data.ifi_ierrors++;
936 goto skip;
937 }
938
939 /* Allocate a new Rx buffer. */
940 m1 = MCLGETL(NULL, M_DONTWAIT, ATHN_RXBUFSZ)m_clget((((void *)0)), (0x0002), (3872));
941 if (__predict_false(m1 == NULL)__builtin_expect(((m1 == ((void *)0)) != 0), 0)) {
942 ic->ic_stats.is_rx_nombuf++;
943 ifp->if_ierrorsif_data.ifi_ierrors++;
944 goto skip;
945 }
946
947 /* Sync and unmap the old Rx buffer. */
948 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (3872), (0x02))
949 BUS_DMASYNC_POSTREAD)(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (3872), (0x02))
;
950 bus_dmamap_unload(sc->sc_dmat, bf->bf_map)(*(sc->sc_dmat)->_dmamap_unload)((sc->sc_dmat), (bf->
bf_map))
;
951
952 /* Map the new Rx buffer. */
953 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map, mtod(m1, void *),(*(sc->sc_dmat)->_dmamap_load)((sc->sc_dmat), (bf->
bf_map), (((void *)((m1)->m_hdr.mh_data))), (3872), (((void
*)0)), (0x0001 | 0x0200))
954 ATHN_RXBUFSZ, NULL, BUS_DMA_NOWAIT | BUS_DMA_READ)(*(sc->sc_dmat)->_dmamap_load)((sc->sc_dmat), (bf->
bf_map), (((void *)((m1)->m_hdr.mh_data))), (3872), (((void
*)0)), (0x0001 | 0x0200))
;
955 if (__predict_false(error != 0)__builtin_expect(((error != 0) != 0), 0)) {
956 m_freem(m1);
957
958 /* Remap the old Rx buffer or panic. */
959 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,(*(sc->sc_dmat)->_dmamap_load)((sc->sc_dmat), (bf->
bf_map), (((void *)((bf->bf_m)->m_hdr.mh_data))), (3872
), (((void *)0)), (0x0001 | 0x0200))
960 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,(*(sc->sc_dmat)->_dmamap_load)((sc->sc_dmat), (bf->
bf_map), (((void *)((bf->bf_m)->m_hdr.mh_data))), (3872
), (((void *)0)), (0x0001 | 0x0200))
961 BUS_DMA_NOWAIT | BUS_DMA_READ)(*(sc->sc_dmat)->_dmamap_load)((sc->sc_dmat), (bf->
bf_map), (((void *)((bf->bf_m)->m_hdr.mh_data))), (3872
), (((void *)0)), (0x0001 | 0x0200))
;
962 KASSERT(error != 0)((error != 0) ? (void)0 : __assert("diagnostic ", "/usr/src/sys/dev/ic/ar5008.c"
, 962, "error != 0"))
;
963 ifp->if_ierrorsif_data.ifi_ierrors++;
964 goto skip;
965 }
966
967 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (3872), (0x01))
968 BUS_DMASYNC_PREREAD)(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (3872), (0x01))
;
969
970 /* Write physical address of new Rx buffer. */
971 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
972
973 m = bf->bf_m;
974 bf->bf_m = m1;
975
976 /* Finalize mbuf. */
977 m->m_pkthdrM_dat.MH.MH_pkthdr.len = m->m_lenm_hdr.mh_len = len;
978
979 wh = mtod(m, struct ieee80211_frame *)((struct ieee80211_frame *)((m)->m_hdr.mh_data));
980
981 if (michael_mic_failure) {
982 /*
983 * Check that it is not a control frame
984 * (invalid MIC failures on valid ctl frames).
985 * Validate the transmitter's address to avoid passing
986 * corrupt frames with bogus addresses to net80211.
987 */
988 if ((wh->i_fc[0] & IEEE80211_FC0_TYPE_CTL0x04)) {
989 switch (ic->ic_opmode) {
990#ifndef IEEE80211_STA_ONLY
991 case IEEE80211_M_HOSTAP:
992 if (ieee80211_find_node(ic, wh->i_addr2))
993 michael_mic_failure = 0;
994 break;
995#endif
996 case IEEE80211_M_STA:
997 if (IEEE80211_ADDR_EQ(wh->i_addr2,(__builtin_memcmp((wh->i_addr2), (ic->ic_bss->ni_macaddr
), (6)) == 0)
998 ic->ic_bss->ni_macaddr)(__builtin_memcmp((wh->i_addr2), (ic->ic_bss->ni_macaddr
), (6)) == 0)
)
999 michael_mic_failure = 0;
1000 break;
1001 case IEEE80211_M_MONITOR:
1002 michael_mic_failure = 0;
1003 break;
1004 default:
1005 break;
1006 }
1007 }
1008
1009 if (michael_mic_failure) {
1010 /* Report Michael MIC failures to net80211. */
1011 if ((ic->ic_rsnciphers & IEEE80211_CIPHER_TKIP) ||
1012 ic->ic_rsngroupcipher == IEEE80211_CIPHER_TKIP) {
1013 ic->ic_stats.is_rx_locmicfail++;
1014 ieee80211_michael_mic_failure(ic, 0);
1015 }
1016 ifp->if_ierrorsif_data.ifi_ierrors++;
1017 m_freem(m);
1018 goto skip;
1019 }
1020 }
1021
1022 /* Grab a reference to the source node. */
1023 ni = ieee80211_find_rxnode(ic, wh);
1024
1025 /* Remove any HW padding after the 802.11 header. */
1026 if (!(wh->i_fc[0] & IEEE80211_FC0_TYPE_CTL0x04)) {
1027 u_int hdrlen = ieee80211_get_hdrlen(wh);
1028 if (hdrlen & 3) {
1029 memmove((caddr_t)wh + 2, wh, hdrlen)__builtin_memmove(((caddr_t)wh + 2), (wh), (hdrlen));
1030 m_adj(m, 2);
1031 }
1032 wh = mtod(m, struct ieee80211_frame *)((struct ieee80211_frame *)((m)->m_hdr.mh_data));
1033 }
1034#if NBPFILTER1 > 0
1035 if (__predict_false(sc->sc_drvbpf != NULL)__builtin_expect(((sc->sc_drvbpf != ((void *)0)) != 0), 0))
1036 ar5008_rx_radiotap(sc, m, ds);
1037#endif
1038 /* Trim 802.11 FCS after radiotap. */
1039 m_adj(m, -IEEE80211_CRC_LEN4);
1040
1041 /* Send the frame to the 802.11 layer. */
1042 rxi.rxi_flags = 0; /* XXX */
1043 rxi.rxi_rssi = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED)(((uint32_t)(ds->ds_status4) & 0xff000000) >> 24
)
;
1044 rxi.rxi_rssi += AR_DEFAULT_NOISE_FLOOR(-100);
1045 rxi.rxi_tstamp = ds->ds_status2;
1046 if (!(wh->i_fc[0] & IEEE80211_FC0_TYPE_CTL0x04) &&
1047 (wh->i_fc[1] & IEEE80211_FC1_PROTECTED0x40) &&
1048 (ic->ic_flags & IEEE80211_F_RSNON0x00200000) &&
1049 (ni->ni_flags & IEEE80211_NODE_RXPROT0x0008) &&
1050 ((!IEEE80211_IS_MULTICAST(wh->i_addr1)(*(wh->i_addr1) & 0x01) &&
1051 ni->ni_rsncipher == IEEE80211_CIPHER_CCMP) ||
1052 (IEEE80211_IS_MULTICAST(wh->i_addr1)(*(wh->i_addr1) & 0x01) &&
1053 ni->ni_rsngroupcipher == IEEE80211_CIPHER_CCMP))) {
1054 if (ar5008_ccmp_decap(sc, m, ni) != 0) {
1055 ifp->if_ierrorsif_data.ifi_ierrors++;
1056 ieee80211_release_node(ic, ni);
1057 m_freem(m);
1058 goto skip;
1059 }
1060 rxi.rxi_flags |= IEEE80211_RXI_HWDEC0x00000001;
1061 }
1062 ieee80211_inputm(ifp, m, ni, &rxi, ml);
1063
1064 /* Node is no longer needed. */
1065 ieee80211_release_node(ic, ni);
1066
1067 skip:
1068 /* Unlink this descriptor from head. */
1069 SIMPLEQ_REMOVE_HEAD(&rxq->head, bf_list)do { if (((&rxq->head)->sqh_first = (&rxq->head
)->sqh_first->bf_list.sqe_next) == ((void *)0)) (&rxq
->head)->sqh_last = &(&rxq->head)->sqh_first
; } while (0)
;
1070 memset(&ds->ds_status0, 0, 36)__builtin_memset((&ds->ds_status0), (0), (36)); /* XXX Really needed? */
1071 ds->ds_status8 &= ~AR_RXS8_DONE0x00000001;
1072 ds->ds_link = 0;
1073
1074 /* Re-use this descriptor and link it to tail. */
1075 if (__predict_true(!SIMPLEQ_EMPTY(&rxq->head))__builtin_expect(((!(((&rxq->head)->sqh_first) == (
(void *)0))) != 0), 1)
)
1076 ((struct ar_rx_desc *)rxq->lastds)->ds_link = bf->bf_daddr;
1077 else
1078 AR_WRITE(sc, AR_RXDP, bf->bf_daddr)(sc)->ops.write((sc), (0x000c), (bf->bf_daddr));
1079 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list)do { (bf)->bf_list.sqe_next = ((void *)0); *(&rxq->
head)->sqh_last = (bf); (&rxq->head)->sqh_last =
&(bf)->bf_list.sqe_next; } while (0)
;
1080 rxq->lastds = ds;
1081
1082 /* Re-enable Rx. */
1083 AR_WRITE(sc, AR_CR, AR_CR_RXE)(sc)->ops.write((sc), (0x0008), ((((sc)->mac_ver > 0x1c0
|| (((sc)->mac_ver == 0x1c0) && (sc)->mac_rev >=
2)) ? 0x000c : 0x0004)))
;
1084 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
1085 return (0);
1086}
1087
1088void
1089ar5008_rx_intr(struct athn_softc *sc)
1090{
1091 struct mbuf_list ml = MBUF_LIST_INITIALIZER(){ ((void *)0), ((void *)0), 0 };
1092 struct ieee80211com *ic = &sc->sc_ic;
1093 struct ifnet *ifp = &ic->ic_ific_ac.ac_if;
1094
1095 while (ar5008_rx_process(sc, &ml) == 0);
1096
1097 if_input(ifp, &ml);
1098}
1099
1100int
1101ar5008_tx_process(struct athn_softc *sc, int qid)
1102{
1103 struct ieee80211com *ic = &sc->sc_ic;
1104 struct ifnet *ifp = &ic->ic_ific_ac.ac_if;
1105 struct athn_txq *txq = &sc->txq[qid];
1106 struct athn_node *an;
1107 struct ieee80211_node *ni;
1108 struct athn_tx_buf *bf;
1109 struct ar_tx_desc *ds;
1110 uint8_t failcnt;
1111 int txfail = 0, rtscts;
1112
1113 bf = SIMPLEQ_FIRST(&txq->head)((&txq->head)->sqh_first);
1114 if (bf == NULL((void *)0))
1115 return (ENOENT2);
1116 /* Get descriptor of last DMA segment. */
1117 ds = &((struct ar_tx_desc *)bf->bf_descs)[bf->bf_map->dm_nsegs - 1];
1118
1119 if (!(ds->ds_status9 & AR_TXS9_DONE0x00000001))
1120 return (EBUSY16);
1121
1122 SIMPLEQ_REMOVE_HEAD(&txq->head, bf_list)do { if (((&txq->head)->sqh_first = (&txq->head
)->sqh_first->bf_list.sqe_next) == ((void *)0)) (&txq
->head)->sqh_last = &(&txq->head)->sqh_first
; } while (0)
;
1123
1124 sc->sc_tx_timer = 0;
1125
1126 /* These status bits are valid if “FRM_XMIT_OK” is clear. */
1127 if ((ds->ds_status1 & AR_TXS1_FRM_XMIT_OK0x00000001) == 0) {
1128 txfail = (ds->ds_status1 & AR_TXS1_EXCESSIVE_RETRIES0x00000002);
1129 if (txfail)
1130 ifp->if_oerrorsif_data.ifi_oerrors++;
1131 if (ds->ds_status1 & AR_TXS1_UNDERRUN(0x00000004 | 0x00010000 | 0x00020000))
1132 athn_inc_tx_trigger_level(sc);
1133 }
1134
1135 an = (struct athn_node *)bf->bf_ni;
1136 ni = (struct ieee80211_node *)bf->bf_ni;
1137
1138 /*
1139 * NB: the data fail count contains the number of un-acked tries
1140 * for the final series used. We must add the number of tries for
1141 * each series that was fully processed to punish transmit rates in
1142 * the earlier series which did not perform well.
1143 */
1144 failcnt = MS(ds->ds_status1, AR_TXS1_DATA_FAIL_CNT)(((uint32_t)(ds->ds_status1) & 0x00000f00) >> 8);
1145 /* Assume two tries per series, as per AR_TXC2_XMIT_DATA_TRIESx. */
1146 failcnt += MS(ds->ds_status9, AR_TXS9_FINAL_IDX)(((uint32_t)(ds->ds_status9) & 0x00600000) >> 21
)
* 2;
1147
1148 rtscts = (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE0x00400000 | AR_TXC0_CTS_ENABLE0x80000000));
1149
1150 /* Update rate control statistics. */
1151 if ((ni->ni_flags & IEEE80211_NODE_HT0x0400) && ic->ic_fixed_mcs == -1) {
1152 const struct ieee80211_ht_rateset *rs =
1153 ieee80211_ra_get_ht_rateset(bf->bf_txmcs, 0 /* chan40 */,
1154 ieee80211_node_supports_ht_sgi20(ni));
1155 unsigned int retries = 0, i;
1156 int mcs = bf->bf_txmcs;
1157
1158 /* With RTS/CTS each Tx series used the same MCS. */
1159 if (rtscts) {
1160 retries = failcnt;
1161 } else {
1162 for (i = 0; i < failcnt; i++) {
1163 if (mcs > rs->min_mcs) {
1164 ieee80211_ra_add_stats_ht(&an->rn,
1165 ic, ni, mcs, 1, 1);
1166 if (i % 2) /* two tries per series */
1167 mcs--;
1168 } else
1169 retries++;
1170 }
1171 }
1172
1173 if (txfail && retries == 0) {
1174 ieee80211_ra_add_stats_ht(&an->rn, ic, ni,
1175 mcs, 1, 1);
1176 } else {
1177 ieee80211_ra_add_stats_ht(&an->rn, ic, ni,
1178 mcs, retries + 1, retries);
1179 }
1180 if (ic->ic_state == IEEE80211_S_RUN) {
1181#ifndef IEEE80211_STA_ONLY
1182 if (ic->ic_opmode != IEEE80211_M_HOSTAP ||
1183 ni->ni_state == IEEE80211_STA_ASSOC)
1184#endif
1185 ieee80211_ra_choose(&an->rn, ic, ni);
1186 }
1187 } else if (ic->ic_fixed_rate == -1) {
1188 an->amn.amn_txcnt++;
1189 if (failcnt > 0)
1190 an->amn.amn_retrycnt++;
1191 }
1192 DPRINTFN(5, ("Tx done qid=%d status1=%d fail count=%d\n",
1193 qid, ds->ds_status1, failcnt));
1194
1195 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (bf->bf_map->dm_mapsize), (0x08))
1196 BUS_DMASYNC_POSTWRITE)(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (bf->bf_map->dm_mapsize), (0x08))
;
1197 bus_dmamap_unload(sc->sc_dmat, bf->bf_map)(*(sc->sc_dmat)->_dmamap_unload)((sc->sc_dmat), (bf->
bf_map))
;
1198
1199 m_freem(bf->bf_m);
1200 bf->bf_m = NULL((void *)0);
1201 ieee80211_release_node(ic, bf->bf_ni);
1202 bf->bf_ni = NULL((void *)0);
1203
1204 /* Link Tx buffer back to global free list. */
1205 SIMPLEQ_INSERT_TAIL(&sc->txbufs, bf, bf_list)do { (bf)->bf_list.sqe_next = ((void *)0); *(&sc->txbufs
)->sqh_last = (bf); (&sc->txbufs)->sqh_last = &
(bf)->bf_list.sqe_next; } while (0)
;
1206 return (0);
1207}
1208
1209void
1210ar5008_tx_intr(struct athn_softc *sc)
1211{
1212 struct ieee80211com *ic = &sc->sc_ic;
1213 struct ifnet *ifp = &ic->ic_ific_ac.ac_if;
1214 uint16_t mask = 0;
1215 uint32_t reg;
1216 int qid;
1217
1218 reg = AR_READ(sc, AR_ISR_S0_S)(sc)->ops.read((sc), (0x00c4));
1219 mask |= MS(reg, AR_ISR_S0_QCU_TXOK)(((uint32_t)(reg) & 0x000003ff) >> 0);
1220 mask |= MS(reg, AR_ISR_S0_QCU_TXDESC)(((uint32_t)(reg) & 0x03ff0000) >> 16);
1221
1222 reg = AR_READ(sc, AR_ISR_S1_S)(sc)->ops.read((sc), (0x00c8));
1223 mask |= MS(reg, AR_ISR_S1_QCU_TXERR)(((uint32_t)(reg) & 0x000003ff) >> 0);
1224 mask |= MS(reg, AR_ISR_S1_QCU_TXEOL)(((uint32_t)(reg) & 0x03ff0000) >> 16);
1225
1226 DPRINTFN(4, ("Tx interrupt mask=0x%x\n", mask));
1227 for (qid = 0; mask != 0; mask >>= 1, qid++) {
1228 if (mask & 1)
1229 while (ar5008_tx_process(sc, qid) == 0);
1230 }
1231 if (!SIMPLEQ_EMPTY(&sc->txbufs)(((&sc->txbufs)->sqh_first) == ((void *)0))) {
1232 ifq_clr_oactive(&ifp->if_snd);
1233 ifp->if_start(ifp);
1234 }
1235}
1236
1237#ifndef IEEE80211_STA_ONLY
1238/*
1239 * Process Software Beacon Alert interrupts.
1240 */
1241int
1242ar5008_swba_intr(struct athn_softc *sc)
1243{
1244 struct ieee80211com *ic = &sc->sc_ic;
1245 struct ifnet *ifp = &ic->ic_ific_ac.ac_if;
1246 struct ieee80211_node *ni = ic->ic_bss;
1247 struct athn_tx_buf *bf = sc->bcnbuf;
1248 struct ieee80211_frame *wh;
1249 struct ar_tx_desc *ds;
1250 struct mbuf *m;
1251 uint8_t ridx, hwrate;
1252 int error, totlen;
1253
1254 if (ic->ic_tim_mcast_pending &&
1255 mq_empty(&ni->ni_savedq)((&(&ni->ni_savedq)->mq_list)->ml_len == 0) &&
1256 SIMPLEQ_EMPTY(&sc->txq[ATHN_QID_CAB].head)(((&sc->txq[6].head)->sqh_first) == ((void *)0)))
1257 ic->ic_tim_mcast_pending = 0;
1258
1259 if (ic->ic_dtim_count == 0)
1260 ic->ic_dtim_count = ic->ic_dtim_period - 1;
1261 else
1262 ic->ic_dtim_count--;
1263
1264 /* Make sure previous beacon has been sent. */
1265 if (athn_tx_pending(sc, ATHN_QID_BEACON7)) {
1266 DPRINTF(("beacon stuck\n"));
1267 return (EBUSY16);
1268 }
1269 /* Get new beacon. */
1270 m = ieee80211_beacon_alloc(ic, ic->ic_bss);
1271 if (__predict_false(m == NULL)__builtin_expect(((m == ((void *)0)) != 0), 0))
1272 return (ENOBUFS55);
1273 /* Assign sequence number. */
1274 wh = mtod(m, struct ieee80211_frame *)((struct ieee80211_frame *)((m)->m_hdr.mh_data));
1275 *(uint16_t *)&wh->i_seq[0] =
1276 htole16(ic->ic_bss->ni_txseq << IEEE80211_SEQ_SEQ_SHIFT)((__uint16_t)(ic->ic_bss->ni_txseq << 4));
1277 ic->ic_bss->ni_txseq++;
1278
1279 /* Unmap and free old beacon if any. */
1280 if (__predict_true(bf->bf_m != NULL)__builtin_expect(((bf->bf_m != ((void *)0)) != 0), 1)) {
1281 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0,(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (bf->bf_map->dm_mapsize), (0x08))
1282 bf->bf_map->dm_mapsize, BUS_DMASYNC_POSTWRITE)(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (bf->bf_map->dm_mapsize), (0x08))
;
1283 bus_dmamap_unload(sc->sc_dmat, bf->bf_map)(*(sc->sc_dmat)->_dmamap_unload)((sc->sc_dmat), (bf->
bf_map))
;
1284 m_freem(bf->bf_m);
1285 bf->bf_m = NULL((void *)0);
1286 }
1287 /* DMA map new beacon. */
1288 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,(*(sc->sc_dmat)->_dmamap_load_mbuf)((sc->sc_dmat), (
bf->bf_map), (m), (0x0001 | 0x0400))
1289 BUS_DMA_NOWAIT | BUS_DMA_WRITE)(*(sc->sc_dmat)->_dmamap_load_mbuf)((sc->sc_dmat), (
bf->bf_map), (m), (0x0001 | 0x0400))
;
1290 if (__predict_false(error != 0)__builtin_expect(((error != 0) != 0), 0)) {
1291 m_freem(m);
1292 return (error);
1293 }
1294 bf->bf_m = m;
1295
1296 /* Setup Tx descriptor (simplified ar5008_tx()). */
1297 ds = bf->bf_descs;
1298 memset(ds, 0, sizeof(*ds))__builtin_memset((ds), (0), (sizeof(*ds)));
1299
1300 totlen = m->m_pkthdrM_dat.MH.MH_pkthdr.len + IEEE80211_CRC_LEN4;
1301 ds->ds_ctl0 = SM(AR_TXC0_FRAME_LEN, totlen)(((uint32_t)(totlen) << 0) & 0x00000fff);
1302 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, AR_MAX_RATE_POWER)(((uint32_t)(63) << 16) & 0x003f0000);
1303 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, AR_FRAME_TYPE_BEACON)(((uint32_t)(3) << 20) & 0x00f00000);
1304 ds->ds_ctl1 |= AR_TXC1_NO_ACK0x01000000;
1305 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, AR_ENCR_TYPE_CLEAR)(((uint32_t)(0) << 26) & 0x0c000000);
1306
1307 /* Write number of tries. */
1308 ds->ds_ctl2 = SM(AR_TXC2_XMIT_DATA_TRIES0, 1)(((uint32_t)(1) << 16) & 0x000f0000);
1309
1310 /* Write Tx rate. */
1311 ridx = IEEE80211_IS_CHAN_5GHZ(ni->ni_chan)(((ni->ni_chan)->ic_flags & 0x0100) != 0) ?
1312 ATHN_RIDX_OFDM64 : ATHN_RIDX_CCK10;
1313 hwrate = athn_rates[ridx].hwrate;
1314 ds->ds_ctl3 = SM(AR_TXC3_XMIT_RATE0, hwrate)(((uint32_t)(hwrate) << 0) & 0x000000ff);
1315
1316 /* Write Tx chains. */
1317 ds->ds_ctl7 = SM(AR_TXC7_CHAIN_SEL0, sc->txchainmask)(((uint32_t)(sc->txchainmask) << 2) & 0x0000001c
)
;
1318
1319 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
1320 /* Segment length must be a multiple of 4. */
1321 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,(((uint32_t)((bf->bf_map->dm_segs[0].ds_len + 3) & ~
3) << 0) & 0x00000fff)
1322 (bf->bf_map->dm_segs[0].ds_len + 3) & ~3)(((uint32_t)((bf->bf_map->dm_segs[0].ds_len + 3) & ~
3) << 0) & 0x00000fff)
;
1323
1324 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (bf->bf_map->dm_mapsize), (0x04))
1325 BUS_DMASYNC_PREWRITE)(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (bf->bf_map->dm_mapsize), (0x04))
;
1326
1327 /* Stop Tx DMA before putting the new beacon on the queue. */
1328 athn_stop_tx_dma(sc, ATHN_QID_BEACON7);
1329
1330 AR_WRITE(sc, AR_QTXDP(ATHN_QID_BEACON), bf->bf_daddr)(sc)->ops.write((sc), ((0x0800 + (7) * 4)), (bf->bf_daddr
))
;
1331
1332 for(;;) {
1333 if (SIMPLEQ_EMPTY(&sc->txbufs)(((&sc->txbufs)->sqh_first) == ((void *)0)))
1334 break;
1335
1336 m = mq_dequeue(&ni->ni_savedq);
1337 if (m == NULL((void *)0))
1338 break;
1339 if (!mq_empty(&ni->ni_savedq)((&(&ni->ni_savedq)->mq_list)->ml_len == 0)) {
1340 /* more queued frames, set the more data bit */
1341 wh = mtod(m, struct ieee80211_frame *)((struct ieee80211_frame *)((m)->m_hdr.mh_data));
1342 wh->i_fc[1] |= IEEE80211_FC1_MORE_DATA0x20;
1343 }
1344
1345 if (sc->ops.tx(sc, m, ni, ATHN_TXFLAG_CAB(1 << 1)) != 0) {
1346 ieee80211_release_node(ic, ni);
1347 ifp->if_oerrorsif_data.ifi_oerrors++;
1348 break;
1349 }
1350 }
1351
1352 /* Kick Tx. */
1353 AR_WRITE(sc, AR_Q_TXE, 1 << ATHN_QID_BEACON)(sc)->ops.write((sc), (0x0840), (1 << 7));
1354 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
1355 return (0);
1356}
1357#endif
1358
1359int
1360ar5008_intr(struct athn_softc *sc)
1361{
1362 uint32_t intr, intr2, intr5, sync;
1363
1364 /* Get pending interrupts. */
1365 intr = AR_READ(sc, AR_INTR_ASYNC_CAUSE)(sc)->ops.read((sc), (0x4038));
1366 if (!(intr & AR_INTR_MAC_IRQ0x00000002) || intr == AR_INTR_SPURIOUS0xffffffff) {
1367 intr = AR_READ(sc, AR_INTR_SYNC_CAUSE)(sc)->ops.read((sc), (0x4028));
1368 if (intr == AR_INTR_SPURIOUS0xffffffff || (intr & sc->isync) == 0)
1369 return (0); /* Not for us. */
1370 }
1371
1372 if ((AR_READ(sc, AR_INTR_ASYNC_CAUSE)(sc)->ops.read((sc), (0x4038)) & AR_INTR_MAC_IRQ0x00000002) &&
1373 (AR_READ(sc, AR_RTC_STATUS)(sc)->ops.read((sc), (0x7044)) & AR_RTC_STATUS_M0x0000000f) == AR_RTC_STATUS_ON0x00000002)
1374 intr = AR_READ(sc, AR_ISR)(sc)->ops.read((sc), (0x0080));
1375 else
1376 intr = 0;
1377 sync = AR_READ(sc, AR_INTR_SYNC_CAUSE)(sc)->ops.read((sc), (0x4028)) & sc->isync;
1378 if (intr == 0 && sync == 0)
1379 return (0); /* Not for us. */
1380
1381 if (intr != 0) {
1382 if (intr & AR_ISR_BCNMISC0x00800000) {
1383 intr2 = AR_READ(sc, AR_ISR_S2)(sc)->ops.read((sc), (0x008c));
1384 if (intr2 & AR_ISR_S2_TIM0x01000000)
1385 /* TBD */;
1386 if (intr2 & AR_ISR_S2_TSFOOR0x40000000)
1387 /* TBD */;
1388 }
1389 intr = AR_READ(sc, AR_ISR_RAC)(sc)->ops.read((sc), (0x00c0));
1390 if (intr == AR_INTR_SPURIOUS0xffffffff)
1391 return (1);
1392
1393#ifndef IEEE80211_STA_ONLY
1394 if (intr & AR_ISR_SWBA0x00010000)
1395 ar5008_swba_intr(sc);
1396#endif
1397 if (intr & (AR_ISR_RXMINTR0x01000000 | AR_ISR_RXINTM0x80000000))
1398 ar5008_rx_intr(sc);
1399 if (intr & (AR_ISR_RXOK0x00000001 | AR_ISR_RXERR0x00000004 | AR_ISR_RXORN0x00000020))
1400 ar5008_rx_intr(sc);
1401
1402 if (intr & (AR_ISR_TXOK0x00000040 | AR_ISR_TXDESC0x00000080 |
1403 AR_ISR_TXERR0x00000100 | AR_ISR_TXEOL0x00000400))
1404 ar5008_tx_intr(sc);
1405
1406 intr5 = AR_READ(sc, AR_ISR_S5_S)(sc)->ops.read((sc), (0x00d8));
1407 if (intr & AR_ISR_GENTMR0x10000000) {
1408 if (intr5 & AR_ISR_GENTMR0x10000000) {
1409 DPRINTF(("GENTMR trigger=%d thresh=%d\n",
1410 MS(intr5, AR_ISR_S5_GENTIMER_TRIG),
1411 MS(intr5, AR_ISR_S5_GENTIMER_THRESH)));
1412 }
1413 }
1414
1415 if (intr5 & AR_ISR_S5_TIM_TIMER0x00000010)
1416 /* TBD */;
1417 }
1418 if (sync != 0) {
1419 if (sync & (AR_INTR_SYNC_HOST1_FATAL0x00000020 |
1420 AR_INTR_SYNC_HOST1_PERR0x00000040))
1421 /* TBD */;
1422
1423 if (sync & AR_INTR_SYNC_RADM_CPL_TIMEOUT0x00001000) {
1424 AR_WRITE(sc, AR_RC, AR_RC_HOSTIF)(sc)->ops.write((sc), (0x4000), (0x00000100));
1425 AR_WRITE(sc, AR_RC, 0)(sc)->ops.write((sc), (0x4000), (0));
1426 }
1427
1428 if ((sc->flags & ATHN_FLAG_RFSILENT(1 << 5)) &&
1429 (sync & AR_INTR_SYNC_GPIO_PIN(sc->rfsilent_pin)(1 << (18 + (sc->rfsilent_pin))))) {
1430 struct ifnet *ifp = &sc->sc_ic.ic_ific_ac.ac_if;
1431
1432 printf("%s: radio switch turned off\n",
1433 sc->sc_dev.dv_xname);
1434 /* Turn the interface down. */
1435 athn_stop(ifp, 1);
1436 return (1);
1437 }
1438
1439 AR_WRITE(sc, AR_INTR_SYNC_CAUSE, sync)(sc)->ops.write((sc), (0x4028), (sync));
1440 (void)AR_READ(sc, AR_INTR_SYNC_CAUSE)(sc)->ops.read((sc), (0x4028));
1441 }
1442 return (1);
1443}
1444
1445int
1446ar5008_ccmp_encap(struct mbuf *m, u_int hdrlen, struct ieee80211_key *k)
1447{
1448 struct mbuf *n;
1449 uint8_t *ivp;
1450 int off;
1451
1452 /* Insert IV for CCMP hardware encryption. */
1453 n = m_makespace(m, hdrlen, IEEE80211_CCMP_HDRLEN8, &off);
1454 if (n == NULL((void *)0)) {
1455 m_freem(m);
1456 return (ENOBUFS55);
1457 }
1458 ivp = mtod(n, uint8_t *)((uint8_t *)((n)->m_hdr.mh_data)) + off;
1459 k->k_tsc++;
1460 ivp[0] = k->k_tsc;
1461 ivp[1] = k->k_tsc >> 8;
1462 ivp[2] = 0;
1463 ivp[3] = k->k_id << 6 | IEEE80211_WEP_EXTIV0x20;
1464 ivp[4] = k->k_tsc >> 16;
1465 ivp[5] = k->k_tsc >> 24;
1466 ivp[6] = k->k_tsc >> 32;
1467 ivp[7] = k->k_tsc >> 40;
1468
1469 return 0;
1470}
1471
1472int
1473ar5008_tx(struct athn_softc *sc, struct mbuf *m, struct ieee80211_node *ni,
1474 int txflags)
1475{
1476 struct ieee80211com *ic = &sc->sc_ic;
1477 struct ieee80211_key *k = NULL((void *)0);
1478 struct ieee80211_frame *wh;
1479 struct athn_series series[4];
1480 struct ar_tx_desc *ds, *lastds;
1481 struct athn_txq *txq;
1482 struct athn_tx_buf *bf;
1483 struct athn_node *an = (void *)ni;
1484 uintptr_t entry;
1485 uint16_t qos;
1486 uint8_t txpower, type, encrtype, tid, ridx[4];
1487 int i, error, totlen, hasqos, qid;
1488
1489 /* Grab a Tx buffer from our global free list. */
1490 bf = SIMPLEQ_FIRST(&sc->txbufs)((&sc->txbufs)->sqh_first);
1491 KASSERT(bf != NULL)((bf != ((void *)0)) ? (void)0 : __assert("diagnostic ", "/usr/src/sys/dev/ic/ar5008.c"
, 1491, "bf != NULL"))
;
1492
1493 /* Map 802.11 frame type to hardware frame type. */
1494 wh = mtod(m, struct ieee80211_frame *)((struct ieee80211_frame *)((m)->m_hdr.mh_data));
1495 if ((wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK0x0c) ==
1496 IEEE80211_FC0_TYPE_MGT0x00) {
1497 /* NB: Beacons do not use ar5008_tx(). */
1498 if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK0xf0) ==
1499 IEEE80211_FC0_SUBTYPE_PROBE_RESP0x50)
1500 type = AR_FRAME_TYPE_PROBE_RESP4;
1501 else if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK0xf0) ==
1502 IEEE80211_FC0_SUBTYPE_ATIM0x90)
1503 type = AR_FRAME_TYPE_ATIM1;
1504 else
1505 type = AR_FRAME_TYPE_NORMAL0;
1506 } else if ((wh->i_fc[0] &
1507 (IEEE80211_FC0_TYPE_MASK0x0c | IEEE80211_FC0_SUBTYPE_MASK0xf0)) ==
1508 (IEEE80211_FC0_TYPE_CTL0x04 | IEEE80211_FC0_SUBTYPE_PS_POLL0xa0)) {
1509 type = AR_FRAME_TYPE_PSPOLL2;
1510 } else
1511 type = AR_FRAME_TYPE_NORMAL0;
1512
1513 if (wh->i_fc[1] & IEEE80211_FC1_PROTECTED0x40) {
1514 k = ieee80211_get_txkey(ic, wh, ni);
1515 if (k->k_cipher == IEEE80211_CIPHER_CCMP) {
1516 u_int hdrlen = ieee80211_get_hdrlen(wh);
1517 if (ar5008_ccmp_encap(m, hdrlen, k) != 0)
1518 return (ENOBUFS55);
1519 } else {
1520 if ((m = ieee80211_encrypt(ic, m, k)) == NULL((void *)0))
1521 return (ENOBUFS55);
1522 k = NULL((void *)0); /* skip hardware crypto further below */
1523 }
1524 wh = mtod(m, struct ieee80211_frame *)((struct ieee80211_frame *)((m)->m_hdr.mh_data));
1525 }
1526
1527 /* XXX 2-byte padding for QoS and 4-addr headers. */
1528
1529 /* Select the HW Tx queue to use for this frame. */
1530 if ((hasqos = ieee80211_has_qos(wh))) {
1531 qos = ieee80211_get_qos(wh);
1532 tid = qos & IEEE80211_QOS_TID0x000f;
1533 qid = athn_ac2qid[ieee80211_up_to_ac(ic, tid)];
1534 } else if (type == AR_FRAME_TYPE_PSPOLL2) {
1535 qid = ATHN_QID_PSPOLL1;
1536 } else if (txflags & ATHN_TXFLAG_CAB(1 << 1)) {
1537 qid = ATHN_QID_CAB6;
1538 } else
1539 qid = ATHN_QID_AC_BE0;
1540 txq = &sc->txq[qid];
1541
1542 /* Select the transmit rates to use for this frame. */
1543 if (IEEE80211_IS_MULTICAST(wh->i_addr1)(*(wh->i_addr1) & 0x01) ||
1544 (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK0x0c) !=
1545 IEEE80211_FC0_TYPE_DATA0x08) {
1546 /* Use lowest rate for all tries. */
1547 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1548 (IEEE80211_IS_CHAN_5GHZ(ni->ni_chan)(((ni->ni_chan)->ic_flags & 0x0100) != 0) ?
1549 ATHN_RIDX_OFDM64 : ATHN_RIDX_CCK10);
1550 } else if ((ni->ni_flags & IEEE80211_NODE_HT0x0400) &&
1551 ic->ic_fixed_mcs != -1) {
1552 /* Use same fixed rate for all tries. */
1553 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1554 ATHN_RIDX_MCS012 + ic->ic_fixed_mcs;
1555 } else if (ic->ic_fixed_rate != -1) {
1556 /* Use same fixed rate for all tries. */
1557 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1558 sc->fixed_ridx;
1559 } else {
1560 /* Use fallback table of the node. */
1561 int txrate;
1562
1563 if (ni->ni_flags & IEEE80211_NODE_HT0x0400)
1564 txrate = ATHN_NUM_LEGACY_RATES15 + ni->ni_txmcs;
1565 else
1566 txrate = ni->ni_txrate;
1567 for (i = 0; i < 4; i++) {
1568 ridx[i] = an->ridx[txrate];
1569 txrate = an->fallback[txrate];
1570 }
1571 }
1572
1573#if NBPFILTER1 > 0
1574 if (__predict_false(sc->sc_drvbpf != NULL)__builtin_expect(((sc->sc_drvbpf != ((void *)0)) != 0), 0)) {
1575 struct athn_tx_radiotap_header *tap = &sc->sc_txtapsc_txtapu.th;
1576
1577 tap->wt_flags = 0;
1578 /* Use initial transmit rate. */
1579 if (athn_rates[ridx[0]].hwrate & 0x80) /* MCS */
1580 tap->wt_rate = athn_rates[ridx[0]].hwrate;
1581 else
1582 tap->wt_rate = athn_rates[ridx[0]].rate;
1583 tap->wt_chan_freq = htole16(ic->ic_bss->ni_chan->ic_freq)((__uint16_t)(ic->ic_bss->ni_chan->ic_freq));
1584 tap->wt_chan_flags = htole16(ic->ic_bss->ni_chan->ic_flags)((__uint16_t)(ic->ic_bss->ni_chan->ic_flags));
1585 if (athn_rates[ridx[0]].phy == IEEE80211_T_DS &&
1586 ridx[0] != ATHN_RIDX_CCK10 &&
1587 (ic->ic_flags & IEEE80211_F_SHPREAMBLE0x00040000))
1588 tap->wt_flags |= IEEE80211_RADIOTAP_F_SHORTPRE0x02;
1589 bpf_mtap_hdr(sc->sc_drvbpf, tap, sc->sc_txtap_len, m,
1590 BPF_DIRECTION_OUT(1 << 1));
1591 }
1592#endif
1593
1594 /* DMA map mbuf. */
1595 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,(*(sc->sc_dmat)->_dmamap_load_mbuf)((sc->sc_dmat), (
bf->bf_map), (m), (0x0001 | 0x0400))
1596 BUS_DMA_NOWAIT | BUS_DMA_WRITE)(*(sc->sc_dmat)->_dmamap_load_mbuf)((sc->sc_dmat), (
bf->bf_map), (m), (0x0001 | 0x0400))
;
1597 if (__predict_false(error != 0)__builtin_expect(((error != 0) != 0), 0)) {
1598 if (error != EFBIG27) {
1599 printf("%s: can't map mbuf (error %d)\n",
1600 sc->sc_dev.dv_xname, error);
1601 m_freem(m);
1602 return (error);
1603 }
1604 /*
1605 * DMA mapping requires too many DMA segments; linearize
1606 * mbuf in kernel virtual address space and retry.
1607 */
1608 if (m_defrag(m, M_DONTWAIT0x0002) != 0) {
1609 m_freem(m);
1610 return (ENOBUFS55);
1611 }
1612
1613 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,(*(sc->sc_dmat)->_dmamap_load_mbuf)((sc->sc_dmat), (
bf->bf_map), (m), (0x0001 | 0x0400))
1614 BUS_DMA_NOWAIT | BUS_DMA_WRITE)(*(sc->sc_dmat)->_dmamap_load_mbuf)((sc->sc_dmat), (
bf->bf_map), (m), (0x0001 | 0x0400))
;
1615 if (error != 0) {
1616 printf("%s: can't map mbuf (error %d)\n",
1617 sc->sc_dev.dv_xname, error);
1618 m_freem(m);
1619 return (error);
1620 }
1621 }
1622 bf->bf_m = m;
1623 bf->bf_ni = ni;
1624 bf->bf_txmcs = ni->ni_txmcs;
1625 bf->bf_txflags = txflags;
1626
1627 wh = mtod(m, struct ieee80211_frame *)((struct ieee80211_frame *)((m)->m_hdr.mh_data));
1628
1629 totlen = m->m_pkthdrM_dat.MH.MH_pkthdr.len + IEEE80211_CRC_LEN4;
1630
1631 /* Clear all Tx descriptors that we will use. */
1632 memset(bf->bf_descs, 0, bf->bf_map->dm_nsegs * sizeof(*ds))__builtin_memset((bf->bf_descs), (0), (bf->bf_map->dm_nsegs
* sizeof(*ds)))
;
1633
1634 /* Setup first Tx descriptor. */
1635 ds = bf->bf_descs;
1636
1637 ds->ds_ctl0 = AR_TXC0_INTR_REQ0x20000000 | AR_TXC0_CLR_DEST_MASK0x01000000;
1638 txpower = AR_MAX_RATE_POWER63; /* Get from per-rate registers. */
1639 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, txpower)(((uint32_t)(txpower) << 16) & 0x003f0000);
1640
1641 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, type)(((uint32_t)(type) << 20) & 0x00f00000);
1642
1643 if (IEEE80211_IS_MULTICAST(wh->i_addr1)(*(wh->i_addr1) & 0x01) ||
1644 (hasqos && (qos & IEEE80211_QOS_ACK_POLICY_MASK0x0060) ==
1645 IEEE80211_QOS_ACK_POLICY_NOACK0x0020))
1646 ds->ds_ctl1 |= AR_TXC1_NO_ACK0x01000000;
1647
1648 if (k != NULL((void *)0)) {
1649 /* Map 802.11 cipher to hardware encryption type. */
1650 if (k->k_cipher == IEEE80211_CIPHER_CCMP) {
1651 encrtype = AR_ENCR_TYPE_AES2;
1652 totlen += IEEE80211_CCMP_MICLEN8;
1653 } else
1654 panic("unsupported cipher");
1655 /*
1656 * NB: The key cache entry index is stored in the key
1657 * private field when the key is installed.
1658 */
1659 entry = (uintptr_t)k->k_priv;
1660 ds->ds_ctl1 |= SM(AR_TXC1_DEST_IDX, entry)(((uint32_t)(entry) << 13) & 0x000fe000);
1661 ds->ds_ctl0 |= AR_TXC0_DEST_IDX_VALID0x40000000;
1662 } else
1663 encrtype = AR_ENCR_TYPE_CLEAR0;
1664 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, encrtype)(((uint32_t)(encrtype) << 26) & 0x0c000000);
1665
1666 /* Check if frame must be protected using RTS/CTS or CTS-to-self. */
1667 if (!IEEE80211_IS_MULTICAST(wh->i_addr1)(*(wh->i_addr1) & 0x01) &&
1668 (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK0x0c) ==
1669 IEEE80211_FC0_TYPE_DATA0x08) {
1670 enum ieee80211_htprot htprot;
1671
1672 htprot = (ic->ic_bss->ni_htop1 & IEEE80211_HTOP1_PROT_MASK0x0003);
1673
1674 /* NB: Group frames are sent using CCK in 802.11b/g. */
1675 if (totlen > ic->ic_rtsthreshold) {
1676 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE0x00400000;
1677 } else if (((ic->ic_flags & IEEE80211_F_USEPROT0x00100000) &&
1678 athn_rates[ridx[0]].phy == IEEE80211_T_OFDM) ||
1679 ((ni->ni_flags & IEEE80211_NODE_HT0x0400) &&
1680 htprot != IEEE80211_HTPROT_NONE)) {
1681 if (ic->ic_protmode == IEEE80211_PROT_RTSCTS)
1682 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE0x00400000;
1683 else if (ic->ic_protmode == IEEE80211_PROT_CTSONLY)
1684 ds->ds_ctl0 |= AR_TXC0_CTS_ENABLE0x80000000;
1685 }
1686 }
1687 /*
1688 * Disable multi-rate retries when protection is used.
1689 * The RTS/CTS frame's duration field is fixed and won't be
1690 * updated by hardware when the data rate changes.
1691 */
1692 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE0x00400000 | AR_TXC0_CTS_ENABLE0x80000000)) {
1693 ridx[1] = ridx[2] = ridx[3] = ridx[0];
1694 }
1695 /* Setup multi-rate retries. */
1696 for (i = 0; i < 4; i++) {
1697 series[i].hwrate = athn_rates[ridx[i]].hwrate;
1698 if (athn_rates[ridx[i]].phy == IEEE80211_T_DS &&
1699 ridx[i] != ATHN_RIDX_CCK10 &&
1700 (ic->ic_flags & IEEE80211_F_SHPREAMBLE0x00040000))
1701 series[i].hwrate |= 0x04;
1702 /* Compute duration for each series. */
1703 series[i].dur = athn_txtime(sc, totlen, ridx[i], ic->ic_flags);
1704 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK0x01000000)) {
1705 /* Account for ACK duration. */
1706 series[i].dur += athn_txtime(sc, IEEE80211_ACK_LEN(sizeof(struct ieee80211_frame_ack) + 4),
1707 athn_rates[ridx[i]].rspridx, ic->ic_flags);
1708 }
1709 }
1710
1711 /* Write number of tries for each series. */
1712 ds->ds_ctl2 =
1713 SM(AR_TXC2_XMIT_DATA_TRIES0, 2)(((uint32_t)(2) << 16) & 0x000f0000) |
1714 SM(AR_TXC2_XMIT_DATA_TRIES1, 2)(((uint32_t)(2) << 20) & 0x00f00000) |
1715 SM(AR_TXC2_XMIT_DATA_TRIES2, 2)(((uint32_t)(2) << 24) & 0x0f000000) |
1716 SM(AR_TXC2_XMIT_DATA_TRIES3, 4)(((uint32_t)(4) << 28) & 0xf0000000);
1717
1718 /* Tell HW to update duration field in 802.11 header. */
1719 if (type != AR_FRAME_TYPE_PSPOLL2)
1720 ds->ds_ctl2 |= AR_TXC2_DUR_UPDATE_ENA0x00008000;
1721
1722 /* Write Tx rate for each series. */
1723 ds->ds_ctl3 =
1724 SM(AR_TXC3_XMIT_RATE0, series[0].hwrate)(((uint32_t)(series[0].hwrate) << 0) & 0x000000ff) |
1725 SM(AR_TXC3_XMIT_RATE1, series[1].hwrate)(((uint32_t)(series[1].hwrate) << 8) & 0x0000ff00) |
1726 SM(AR_TXC3_XMIT_RATE2, series[2].hwrate)(((uint32_t)(series[2].hwrate) << 16) & 0x00ff0000) |
1727 SM(AR_TXC3_XMIT_RATE3, series[3].hwrate)(((uint32_t)(series[3].hwrate) << 24) & 0xff000000);
1728
1729 /* Write duration for each series. */
1730 ds->ds_ctl4 =
1731 SM(AR_TXC4_PACKET_DUR0, series[0].dur)(((uint32_t)(series[0].dur) << 0) & 0x00007fff) |
1732 SM(AR_TXC4_PACKET_DUR1, series[1].dur)(((uint32_t)(series[1].dur) << 16) & 0x7fff0000);
1733 ds->ds_ctl5 =
1734 SM(AR_TXC5_PACKET_DUR2, series[2].dur)(((uint32_t)(series[2].dur) << 0) & 0x00007fff) |
1735 SM(AR_TXC5_PACKET_DUR3, series[3].dur)(((uint32_t)(series[3].dur) << 16) & 0x7fff0000);
1736
1737 /* Use the same Tx chains for all tries. */
1738 ds->ds_ctl7 =
1739 SM(AR_TXC7_CHAIN_SEL0, sc->txchainmask)(((uint32_t)(sc->txchainmask) << 2) & 0x0000001c
)
|
1740 SM(AR_TXC7_CHAIN_SEL1, sc->txchainmask)(((uint32_t)(sc->txchainmask) << 7) & 0x00000380
)
|
1741 SM(AR_TXC7_CHAIN_SEL2, sc->txchainmask)(((uint32_t)(sc->txchainmask) << 12) & 0x00007000
)
|
1742 SM(AR_TXC7_CHAIN_SEL3, sc->txchainmask)(((uint32_t)(sc->txchainmask) << 17) & 0x000e0000
)
;
1743#ifdef notyet
1744 /* Use the same short GI setting for all tries. */
1745 if (ni->ni_htcaps & IEEE80211_HTCAP_SGI200x00000020)
1746 ds->ds_ctl7 |= AR_TXC7_GI0123(0x00000002 | 0x00000040 | 0x00000800 | 0x00010000);
1747 /* Use the same channel width for all tries. */
1748 if (ic->ic_flags & IEEE80211_F_CBW40)
1749 ds->ds_ctl7 |= AR_TXC7_2040_0123(0x00000001 | 0x00000020 | 0x00000400 | 0x00008000);
1750#endif
1751
1752 /* Set Tx power for series 1 - 3 */
1753 ds->ds_ctl9 = SM(AR_TXC9_XMIT_POWER1, txpower)(((uint32_t)(txpower) << 24) & 0x3f000000);
1754 ds->ds_ctl10 = SM(AR_TXC10_XMIT_POWER2, txpower)(((uint32_t)(txpower) << 24) & 0x3f000000);
1755 ds->ds_ctl11 = SM(AR_TXC11_XMIT_POWER3, txpower)(((uint32_t)(txpower) << 24) & 0x3f000000);
1756
1757 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE0x00400000 | AR_TXC0_CTS_ENABLE0x80000000)) {
1758 uint8_t protridx, hwrate;
1759 uint16_t dur = 0;
1760
1761 /* Use the same protection mode for all tries. */
1762 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE0x00400000) {
1763 ds->ds_ctl4 |= AR_TXC4_RTSCTS_QUAL01(0x00008000 | 0x80000000);
1764 ds->ds_ctl5 |= AR_TXC5_RTSCTS_QUAL23(0x00008000 | 0x80000000);
1765 }
1766 /* Select protection rate (suboptimal but ok). */
1767 protridx = IEEE80211_IS_CHAN_5GHZ(ni->ni_chan)(((ni->ni_chan)->ic_flags & 0x0100) != 0) ?
1768 ATHN_RIDX_OFDM64 : ATHN_RIDX_CCK21;
1769 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE0x00400000) {
1770 /* Account for CTS duration. */
1771 dur += athn_txtime(sc, IEEE80211_ACK_LEN(sizeof(struct ieee80211_frame_ack) + 4),
1772 athn_rates[protridx].rspridx, ic->ic_flags);
1773 }
1774 dur += athn_txtime(sc, totlen, ridx[0], ic->ic_flags);
1775 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK0x01000000)) {
1776 /* Account for ACK duration. */
1777 dur += athn_txtime(sc, IEEE80211_ACK_LEN(sizeof(struct ieee80211_frame_ack) + 4),
1778 athn_rates[ridx[0]].rspridx, ic->ic_flags);
1779 }
1780 /* Write protection frame duration and rate. */
1781 ds->ds_ctl2 |= SM(AR_TXC2_BURST_DUR, dur)(((uint32_t)(dur) << 0) & 0x00007fff);
1782 hwrate = athn_rates[protridx].hwrate;
1783 if (protridx == ATHN_RIDX_CCK21 &&
1784 (ic->ic_flags & IEEE80211_F_SHPREAMBLE0x00040000))
1785 hwrate |= 0x04;
1786 ds->ds_ctl7 |= SM(AR_TXC7_RTSCTS_RATE, hwrate)(((uint32_t)(hwrate) << 20) & 0x0ff00000);
1787 }
1788
1789 /* Finalize first Tx descriptor and fill others (if any). */
1790 ds->ds_ctl0 |= SM(AR_TXC0_FRAME_LEN, totlen)(((uint32_t)(totlen) << 0) & 0x00000fff);
1791
1792 for (i = 0; i < bf->bf_map->dm_nsegs; i++, ds++) {
1793 ds->ds_data = bf->bf_map->dm_segs[i].ds_addr;
1794 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,(((uint32_t)(bf->bf_map->dm_segs[i].ds_len) << 0)
& 0x00000fff)
1795 bf->bf_map->dm_segs[i].ds_len)(((uint32_t)(bf->bf_map->dm_segs[i].ds_len) << 0)
& 0x00000fff)
;
1796
1797 if (i != bf->bf_map->dm_nsegs - 1)
1798 ds->ds_ctl1 |= AR_TXC1_MORE0x00001000;
1799 ds->ds_link = 0;
1800
1801 /* Chain Tx descriptor. */
1802 if (i != 0)
1803 lastds->ds_link = bf->bf_daddr + i * sizeof(*ds);
1804 lastds = ds;
1805 }
1806 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (bf->bf_map->dm_mapsize), (0x04))
1807 BUS_DMASYNC_PREWRITE)(*(sc->sc_dmat)->_dmamap_sync)((sc->sc_dmat), (bf->
bf_map), (0), (bf->bf_map->dm_mapsize), (0x04))
;
1808
1809 if (!SIMPLEQ_EMPTY(&txq->head)(((&txq->head)->sqh_first) == ((void *)0)))
1810 ((struct ar_tx_desc *)txq->lastds)->ds_link = bf->bf_daddr;
1811 else
1812 AR_WRITE(sc, AR_QTXDP(qid), bf->bf_daddr)(sc)->ops.write((sc), ((0x0800 + (qid) * 4)), (bf->bf_daddr
))
;
1813 txq->lastds = lastds;
1814 SIMPLEQ_REMOVE_HEAD(&sc->txbufs, bf_list)do { if (((&sc->txbufs)->sqh_first = (&sc->txbufs
)->sqh_first->bf_list.sqe_next) == ((void *)0)) (&sc
->txbufs)->sqh_last = &(&sc->txbufs)->sqh_first
; } while (0)
;
1815 SIMPLEQ_INSERT_TAIL(&txq->head, bf, bf_list)do { (bf)->bf_list.sqe_next = ((void *)0); *(&txq->
head)->sqh_last = (bf); (&txq->head)->sqh_last =
&(bf)->bf_list.sqe_next; } while (0)
;
1816
1817 ds = bf->bf_descs;
Value stored to 'ds' is never read
1818 DPRINTFN(6, ("Tx qid=%d nsegs=%d ctl0=0x%x ctl1=0x%x ctl3=0x%x\n",
1819 qid, bf->bf_map->dm_nsegs, ds->ds_ctl0, ds->ds_ctl1, ds->ds_ctl3));
1820
1821 /* Kick Tx. */
1822 AR_WRITE(sc, AR_Q_TXE, 1 << qid)(sc)->ops.write((sc), (0x0840), (1 << qid));
1823 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
1824 return (0);
1825}
1826
1827void
1828ar5008_set_rf_mode(struct athn_softc *sc, struct ieee80211_channel *c)
1829{
1830 uint32_t reg;
1831
1832 reg = IEEE80211_IS_CHAN_2GHZ(c)(((c)->ic_flags & 0x0080) != 0) ?
1833 AR_PHY_MODE_DYNAMIC0x00000004 : AR_PHY_MODE_OFDM0x00000000;
1834 if (!AR_SREV_9280_10_OR_LATER(sc)((sc)->mac_ver >= 0x080)) {
1835 reg |= IEEE80211_IS_CHAN_2GHZ(c)(((c)->ic_flags & 0x0080) != 0) ?
1836 AR_PHY_MODE_RF2GHZ0x00000002 : AR_PHY_MODE_RF5GHZ0x00000000;
1837 } else if (IEEE80211_IS_CHAN_5GHZ(c)(((c)->ic_flags & 0x0100) != 0) &&
1838 (sc->flags & ATHN_FLAG_FAST_PLL_CLOCK(1 << 4))) {
1839 reg |= AR_PHY_MODE_DYNAMIC0x00000004 | AR_PHY_MODE_DYN_CCK_DISABLE0x00000100;
1840 }
1841 AR_WRITE(sc, AR_PHY_MODE, reg)(sc)->ops.write((sc), (0xa200), (reg));
1842 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
1843}
1844
1845static __inline uint32_t
1846ar5008_synth_delay(struct athn_softc *sc)
1847{
1848 uint32_t delay;
1849
1850 delay = MS(AR_READ(sc, AR_PHY_RX_DELAY), AR_PHY_RX_DELAY_DELAY)(((uint32_t)((sc)->ops.read((sc), (0x9914))) & 0x00003fff
) >> 0)
;
1851 if (sc->sc_ic.ic_curmode == IEEE80211_MODE_11B)
1852 delay = (delay * 4) / 22;
1853 else
1854 delay = delay / 10; /* in 100ns steps */
1855 return (delay);
1856}
1857
1858int
1859ar5008_rf_bus_request(struct athn_softc *sc)
1860{
1861 int ntries;
1862
1863 /* Request RF Bus grant. */
1864 AR_WRITE(sc, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN)(sc)->ops.write((sc), (0x997c), (0x00000001));
1865 for (ntries = 0; ntries < 10000; ntries++) {
1866 if (AR_READ(sc, AR_PHY_RFBUS_GRANT)(sc)->ops.read((sc), (0x9c20)) & AR_PHY_RFBUS_GRANT_EN0x00000001)
1867 return (0);
1868 DELAY(10)(*delay_func)(10);
1869 }
1870 DPRINTF(("could not kill baseband Rx"));
1871 return (ETIMEDOUT60);
1872}
1873
1874void
1875ar5008_rf_bus_release(struct athn_softc *sc)
1876{
1877 /* Wait for the synthesizer to settle. */
1878 DELAY(AR_BASE_PHY_ACTIVE_DELAY + ar5008_synth_delay(sc))(*delay_func)(100 + ar5008_synth_delay(sc));
1879
1880 /* Release the RF Bus grant. */
1881 AR_WRITE(sc, AR_PHY_RFBUS_REQ, 0)(sc)->ops.write((sc), (0x997c), (0));
1882 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
1883}
1884
1885void
1886ar5008_set_phy(struct athn_softc *sc, struct ieee80211_channel *c,
1887 struct ieee80211_channel *extc)
1888{
1889 uint32_t phy;
1890
1891 if (AR_SREV_9285_10_OR_LATER(sc)((sc)->mac_ver >= 0x0c0))
1892 phy = AR_READ(sc, AR_PHY_TURBO)(sc)->ops.read((sc), (0x9804)) & AR_PHY_FC_ENABLE_DAC_FIFO0x00000800;
1893 else
1894 phy = 0;
1895 phy |= AR_PHY_FC_HT_EN0x00000040 | AR_PHY_FC_SHORT_GI_400x00000080 |
1896 AR_PHY_FC_SINGLE_HT_LTF10x00000200 | AR_PHY_FC_WALSH0x00000100;
1897 if (extc != NULL((void *)0)) {
1898 phy |= AR_PHY_FC_DYN2040_EN0x00000004;
1899 if (extc > c) /* XXX */
1900 phy |= AR_PHY_FC_DYN2040_PRI_CH0x00000010;
1901 }
1902 AR_WRITE(sc, AR_PHY_TURBO, phy)(sc)->ops.write((sc), (0x9804), (phy));
1903
1904 AR_WRITE(sc, AR_2040_MODE,(sc)->ops.write((sc), (0x8318), ((extc != ((void *)0)) ? 0x00000001
: 0))
1905 (extc != NULL) ? AR_2040_JOINED_RX_CLEAR : 0)(sc)->ops.write((sc), (0x8318), ((extc != ((void *)0)) ? 0x00000001
: 0))
;
1906
1907 /* Set global transmit timeout. */
1908 AR_WRITE(sc, AR_GTXTO, SM(AR_GTXTO_TIMEOUT_LIMIT, 25))(sc)->ops.write((sc), (0x0064), ((((uint32_t)(25) <<
16) & 0xffff0000)))
;
1909 /* Set carrier sense timeout. */
1910 AR_WRITE(sc, AR_CST, SM(AR_CST_TIMEOUT_LIMIT, 15))(sc)->ops.write((sc), (0x006c), ((((uint32_t)(15) <<
16) & 0xffff0000)))
;
1911 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
1912}
1913
1914void
1915ar5008_set_delta_slope(struct athn_softc *sc, struct ieee80211_channel *c,
1916 struct ieee80211_channel *extc)
1917{
1918 uint32_t coeff, exp, man, reg;
1919
1920 /* Set Delta Slope (exponent and mantissa). */
1921 coeff = (100 << 24) / c->ic_freq;
1922 athn_get_delta_slope(coeff, &exp, &man);
1923 DPRINTFN(5, ("delta slope coeff exp=%u man=%u\n", exp, man));
1924
1925 reg = AR_READ(sc, AR_PHY_TIMING3)(sc)->ops.read((sc), (0x9814));
1926 reg = RW(reg, AR_PHY_TIMING3_DSC_EXP, exp)(((reg) & ~0x0001e000) | (((uint32_t)(exp) << 13) &
0x0001e000))
;
1927 reg = RW(reg, AR_PHY_TIMING3_DSC_MAN, man)(((reg) & ~0xfffe0000) | (((uint32_t)(man) << 17) &
0xfffe0000))
;
1928 AR_WRITE(sc, AR_PHY_TIMING3, reg)(sc)->ops.write((sc), (0x9814), (reg));
1929
1930 /* For Short GI, coeff is 9/10 that of normal coeff. */
1931 coeff = (9 * coeff) / 10;
1932 athn_get_delta_slope(coeff, &exp, &man);
1933 DPRINTFN(5, ("delta slope coeff exp=%u man=%u\n", exp, man));
1934
1935 reg = AR_READ(sc, AR_PHY_HALFGI)(sc)->ops.read((sc), (0x99d0));
1936 reg = RW(reg, AR_PHY_HALFGI_DSC_EXP, exp)(((reg) & ~0x0000000f) | (((uint32_t)(exp) << 0) &
0x0000000f))
;
1937 reg = RW(reg, AR_PHY_HALFGI_DSC_MAN, man)(((reg) & ~0x0007fff0) | (((uint32_t)(man) << 4) &
0x0007fff0))
;
1938 AR_WRITE(sc, AR_PHY_HALFGI, reg)(sc)->ops.write((sc), (0x99d0), (reg));
1939 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
1940}
1941
1942void
1943ar5008_enable_antenna_diversity(struct athn_softc *sc)
1944{
1945 AR_SETBITS(sc, AR_PHY_CCK_DETECT,(sc)->ops.write((sc), (0xa208), ((sc)->ops.read((sc), (
0xa208)) | (0x00002000)))
1946 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV)(sc)->ops.write((sc), (0xa208), ((sc)->ops.read((sc), (
0xa208)) | (0x00002000)))
;
1947 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
1948}
1949
1950void
1951ar5008_init_baseband(struct athn_softc *sc)
1952{
1953 uint32_t synth_delay;
1954
1955 synth_delay = ar5008_synth_delay(sc);
1956 /* Activate the PHY (includes baseband activate and synthesizer on). */
1957 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN)(sc)->ops.write((sc), (0x981c), (0x00000001));
1958 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
1959 DELAY(AR_BASE_PHY_ACTIVE_DELAY + synth_delay)(*delay_func)(100 + synth_delay);
1960}
1961
1962void
1963ar5008_disable_phy(struct athn_softc *sc)
1964{
1965 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS)(sc)->ops.write((sc), (0x981c), (0x00000000));
1966 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
1967}
1968
1969void
1970ar5008_init_chains(struct athn_softc *sc)
1971{
1972 if (sc->rxchainmask == 0x5 || sc->txchainmask == 0x5)
1973 AR_SETBITS(sc, AR_PHY_ANALOG_SWAP, AR_PHY_SWAP_ALT_CHAIN)(sc)->ops.write((sc), (0xa268), ((sc)->ops.read((sc), (
0xa268)) | (0x00000040)))
;
1974
1975 /* Setup chain masks. */
1976 if (sc->mac_ver <= AR_SREV_VERSION_91600x040 &&
1977 (sc->rxchainmask == 0x3 || sc->rxchainmask == 0x5)) {
1978 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, 0x7)(sc)->ops.write((sc), (0x99a4), (0x7));
1979 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, 0x7)(sc)->ops.write((sc), (0xa39c), (0x7));
1980 } else {
1981 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->rxchainmask)(sc)->ops.write((sc), (0x99a4), (sc->rxchainmask));
1982 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->rxchainmask)(sc)->ops.write((sc), (0xa39c), (sc->rxchainmask));
1983 }
1984 AR_WRITE(sc, AR_SELFGEN_MASK, sc->txchainmask)(sc)->ops.write((sc), (0x832c), (sc->txchainmask));
1985 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
1986}
1987
1988void
1989ar5008_set_rxchains(struct athn_softc *sc)
1990{
1991 if (sc->rxchainmask == 0x3 || sc->rxchainmask == 0x5) {
1992 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->rxchainmask)(sc)->ops.write((sc), (0x99a4), (sc->rxchainmask));
1993 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->rxchainmask)(sc)->ops.write((sc), (0xa39c), (sc->rxchainmask));
1994 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
1995 }
1996}
1997
1998void
1999ar5008_read_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
2000{
2001/* Sign-extends 9-bit value (assumes upper bits are zeroes). */
2002#define SIGN_EXT(v) (((v) ^ 0x100) - 0x100)
2003 uint32_t reg;
2004 int i;
2005
2006 for (i = 0; i < sc->nrxchains; i++) {
2007 reg = AR_READ(sc, AR_PHY_CCA(i))(sc)->ops.read((sc), ((0x9864 + (i) * 0x1000)));
2008 if (AR_SREV_9280_10_OR_LATER(sc)((sc)->mac_ver >= 0x080))
2009 nf[i] = MS(reg, AR9280_PHY_MINCCA_PWR)(((uint32_t)(reg) & 0x1ff00000) >> 20);
2010 else
2011 nf[i] = MS(reg, AR_PHY_MINCCA_PWR)(((uint32_t)(reg) & 0x0ff80000) >> 19);
2012 nf[i] = SIGN_EXT(nf[i]);
2013
2014 reg = AR_READ(sc, AR_PHY_EXT_CCA(i))(sc)->ops.read((sc), ((0x99bc + (i) * 0x1000)));
2015 if (AR_SREV_9280_10_OR_LATER(sc)((sc)->mac_ver >= 0x080))
2016 nf_ext[i] = MS(reg, AR9280_PHY_EXT_MINCCA_PWR)(((uint32_t)(reg) & 0x01ff0000) >> 16);
2017 else
2018 nf_ext[i] = MS(reg, AR_PHY_EXT_MINCCA_PWR)(((uint32_t)(reg) & 0xff800000) >> 23);
2019 nf_ext[i] = SIGN_EXT(nf_ext[i]);
2020 }
2021#undef SIGN_EXT
2022}
2023
2024void
2025ar5008_write_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
2026{
2027 uint32_t reg;
2028 int i;
2029
2030 for (i = 0; i < sc->nrxchains; i++) {
2031 reg = AR_READ(sc, AR_PHY_CCA(i))(sc)->ops.read((sc), ((0x9864 + (i) * 0x1000)));
2032 reg = RW(reg, AR_PHY_MAXCCA_PWR, nf[i])(((reg) & ~0x000001ff) | (((uint32_t)(nf[i]) << 0) &
0x000001ff))
;
2033 AR_WRITE(sc, AR_PHY_CCA(i), reg)(sc)->ops.write((sc), ((0x9864 + (i) * 0x1000)), (reg));
2034
2035 reg = AR_READ(sc, AR_PHY_EXT_CCA(i))(sc)->ops.read((sc), ((0x99bc + (i) * 0x1000)));
2036 reg = RW(reg, AR_PHY_EXT_MAXCCA_PWR, nf_ext[i])(((reg) & ~0x000001ff) | (((uint32_t)(nf_ext[i]) <<
0) & 0x000001ff))
;
2037 AR_WRITE(sc, AR_PHY_EXT_CCA(i), reg)(sc)->ops.write((sc), ((0x99bc + (i) * 0x1000)), (reg));
2038 }
2039 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2040}
2041
2042int
2043ar5008_get_noisefloor(struct athn_softc *sc)
2044{
2045 int16_t nf[AR_MAX_CHAINS3], nf_ext[AR_MAX_CHAINS3];
2046 int i;
2047
2048 if (AR_READ(sc, AR_PHY_AGC_CONTROL)(sc)->ops.read((sc), (0x9860)) & AR_PHY_AGC_CONTROL_NF0x00000002) {
2049 /* Noisefloor calibration not finished. */
2050 return 0;
2051 }
2052 /* Noisefloor calibration is finished. */
2053 ar5008_read_noisefloor(sc, nf, nf_ext);
2054
2055 /* Update noisefloor history. */
2056 for (i = 0; i < sc->nrxchains; i++) {
2057 sc->nf_hist[sc->nf_hist_cur].nf[i] = nf[i];
2058 sc->nf_hist[sc->nf_hist_cur].nf_ext[i] = nf_ext[i];
2059 }
2060 if (++sc->nf_hist_cur >= ATHN_NF_CAL_HIST_MAX5)
2061 sc->nf_hist_cur = 0;
2062 return 1;
2063}
2064
2065void
2066ar5008_bb_load_noisefloor(struct athn_softc *sc)
2067{
2068 int16_t nf[AR_MAX_CHAINS3], nf_ext[AR_MAX_CHAINS3];
2069 int i, ntries;
2070
2071 /* Write filtered noisefloor values. */
2072 for (i = 0; i < sc->nrxchains; i++) {
2073 nf[i] = sc->nf_priv[i] * 2;
2074 nf_ext[i] = sc->nf_ext_priv[i] * 2;
2075 }
2076 ar5008_write_noisefloor(sc, nf, nf_ext);
2077
2078 /* Load filtered noisefloor values into baseband. */
2079 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF)(sc)->ops.write((sc), (0x9860), ((sc)->ops.read((sc), (
0x9860)) & ~(0x00008000)))
;
2080 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF)(sc)->ops.write((sc), (0x9860), ((sc)->ops.read((sc), (
0x9860)) & ~(0x00020000)))
;
2081 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF)(sc)->ops.write((sc), (0x9860), ((sc)->ops.read((sc), (
0x9860)) | (0x00000002)))
;
2082 /* Wait for load to complete. */
2083 for (ntries = 0; ntries < 1000; ntries++) {
2084 if (!(AR_READ(sc, AR_PHY_AGC_CONTROL)(sc)->ops.read((sc), (0x9860)) & AR_PHY_AGC_CONTROL_NF0x00000002))
2085 break;
2086 DELAY(50)(*delay_func)(50);
2087 }
2088 if (ntries == 1000) {
2089 DPRINTF(("failed to load noisefloor values\n"));
2090 return;
2091 }
2092
2093 /*
2094 * Restore noisefloor values to initial (max) values. These will
2095 * be used as initial values during the next NF calibration.
2096 */
2097 for (i = 0; i < AR_MAX_CHAINS3; i++)
2098 nf[i] = nf_ext[i] = AR_DEFAULT_NOISE_FLOOR(-100);
2099 ar5008_write_noisefloor(sc, nf, nf_ext);
2100}
2101
2102void
2103ar5008_apply_noisefloor(struct athn_softc *sc)
2104{
2105 uint32_t agc_nfcal;
2106
2107 agc_nfcal = AR_READ(sc, AR_PHY_AGC_CONTROL)(sc)->ops.read((sc), (0x9860)) &
2108 (AR_PHY_AGC_CONTROL_NF0x00000002 | AR_PHY_AGC_CONTROL_ENABLE_NF0x00008000 |
2109 AR_PHY_AGC_CONTROL_NO_UPDATE_NF0x00020000);
2110
2111 if (agc_nfcal & AR_PHY_AGC_CONTROL_NF0x00000002) {
2112 /* Pause running NF calibration while values are updated. */
2113 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF)(sc)->ops.write((sc), (0x9860), ((sc)->ops.read((sc), (
0x9860)) & ~(0x00000002)))
;
2114 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2115 }
2116
2117 ar5008_bb_load_noisefloor(sc);
2118
2119 if (agc_nfcal & AR_PHY_AGC_CONTROL_NF0x00000002) {
2120 /* Restart interrupted NF calibration. */
2121 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, agc_nfcal)(sc)->ops.write((sc), (0x9860), ((sc)->ops.read((sc), (
0x9860)) | (agc_nfcal)))
;
2122 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2123 }
2124}
2125
2126void
2127ar5008_do_noisefloor_calib(struct athn_softc *sc)
2128{
2129 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF)(sc)->ops.write((sc), (0x9860), ((sc)->ops.read((sc), (
0x9860)) | (0x00008000)))
;
2130 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF)(sc)->ops.write((sc), (0x9860), ((sc)->ops.read((sc), (
0x9860)) | (0x00020000)))
;
2131 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF)(sc)->ops.write((sc), (0x9860), ((sc)->ops.read((sc), (
0x9860)) | (0x00000002)))
;
2132 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2133}
2134
2135void
2136ar5008_init_noisefloor_calib(struct athn_softc *sc)
2137{
2138 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF)(sc)->ops.write((sc), (0x9860), ((sc)->ops.read((sc), (
0x9860)) | (0x00000002)))
;
2139 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2140}
2141
2142void
2143ar5008_do_calib(struct athn_softc *sc)
2144{
2145 uint32_t mode, reg;
2146 int log;
2147
2148 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4_0)(sc)->ops.read((sc), (0x9920));
2149 log = AR_SREV_9280_10_OR_LATER(sc)((sc)->mac_ver >= 0x080) ? 10 : 2;
2150 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCAL_LOG_COUNT_MAX, log)(((reg) & ~0x0000f000) | (((uint32_t)(log) << 12) &
0x0000f000))
;
2151 AR_WRITE(sc, AR_PHY_TIMING_CTRL4_0, reg)(sc)->ops.write((sc), (0x9920), (reg));
2152
2153 if (sc->cur_calib_mask & ATHN_CAL_ADC_GAIN(1 << 1))
2154 mode = AR_PHY_CALMODE_ADC_GAIN0x00000001;
2155 else if (sc->cur_calib_mask & ATHN_CAL_ADC_DC(1 << 2))
2156 mode = AR_PHY_CALMODE_ADC_DC_PER0x00000002;
2157 else /* ATHN_CAL_IQ */
2158 mode = AR_PHY_CALMODE_IQ0x00000000;
2159 AR_WRITE(sc, AR_PHY_CALMODE, mode)(sc)->ops.write((sc), (0x99f0), (mode));
2160
2161 DPRINTF(("starting calibration mode=0x%x\n", mode));
2162 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0, AR_PHY_TIMING_CTRL4_DO_CAL)(sc)->ops.write((sc), (0x9920), ((sc)->ops.read((sc), (
0x9920)) | (0x00010000)))
;
2163 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2164}
2165
2166void
2167ar5008_next_calib(struct athn_softc *sc)
2168{
2169 /* Check if we have any calibration in progress. */
2170 if (sc->cur_calib_mask != 0) {
2171 if (!(AR_READ(sc, AR_PHY_TIMING_CTRL4_0)(sc)->ops.read((sc), (0x9920)) &
2172 AR_PHY_TIMING_CTRL4_DO_CAL0x00010000)) {
2173 /* Calibration completed for current sample. */
2174 if (sc->cur_calib_mask & ATHN_CAL_ADC_GAIN(1 << 1))
2175 ar5008_calib_adc_gain(sc);
2176 else if (sc->cur_calib_mask & ATHN_CAL_ADC_DC(1 << 2))
2177 ar5008_calib_adc_dc_off(sc);
2178 else /* ATHN_CAL_IQ */
2179 ar5008_calib_iq(sc);
2180 }
2181 }
2182}
2183
2184void
2185ar5008_calib_iq(struct athn_softc *sc)
2186{
2187 struct athn_iq_cal *cal;
2188 uint32_t reg, i_coff_denom, q_coff_denom;
2189 int32_t i_coff, q_coff;
2190 int i, iq_corr_neg;
2191
2192 for (i = 0; i < AR_MAX_CHAINS3; i++) {
2193 cal = &sc->calib.iq[i];
2194
2195 /* Accumulate IQ calibration measures (clear on read). */
2196 cal->pwr_meas_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i))(sc)->ops.read((sc), ((0x9c10 + (i) * 0x1000)));
2197 cal->pwr_meas_q += AR_READ(sc, AR_PHY_CAL_MEAS_1(i))(sc)->ops.read((sc), ((0x9c14 + (i) * 0x1000)));
2198 cal->iq_corr_meas +=
2199 (int32_t)AR_READ(sc, AR_PHY_CAL_MEAS_2(i))(sc)->ops.read((sc), ((0x9c18 + (i) * 0x1000)));
2200 }
2201 if (!AR_SREV_9280_10_OR_LATER(sc)((sc)->mac_ver >= 0x080) &&
2202 ++sc->calib.nsamples < AR_CAL_SAMPLES64) {
2203 /* Not enough samples accumulated, continue. */
2204 ar5008_do_calib(sc);
2205 return;
2206 }
2207
2208 for (i = 0; i < sc->nrxchains; i++) {
2209 cal = &sc->calib.iq[i];
2210
2211 if (cal->pwr_meas_q == 0)
2212 continue;
2213
2214 if ((iq_corr_neg = cal->iq_corr_meas < 0))
2215 cal->iq_corr_meas = -cal->iq_corr_meas;
2216
2217 i_coff_denom =
2218 (cal->pwr_meas_i / 2 + cal->pwr_meas_q / 2) / 128;
2219 q_coff_denom = cal->pwr_meas_q / 64;
2220
2221 if (i_coff_denom == 0 || q_coff_denom == 0)
2222 continue; /* Prevents division by zero. */
2223
2224 i_coff = cal->iq_corr_meas / i_coff_denom;
2225 q_coff = (cal->pwr_meas_i / q_coff_denom) - 64;
2226
2227 /* Negate i_coff if iq_corr_meas is positive. */
2228 if (!iq_corr_neg)
2229 i_coff = 0x40 - (i_coff & 0x3f);
2230 if (q_coff > 15)
2231 q_coff = 15;
2232 else if (q_coff <= -16)
2233 q_coff = -16; /* XXX Linux has a bug here? */
2234
2235 DPRINTFN(2, ("IQ calibration for chain %d\n", i));
2236 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4(i))(sc)->ops.read((sc), ((0x9920 + (i) * 0x1000)));
2237 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_I_COFF, i_coff)(((reg) & ~0x000007e0) | (((uint32_t)(i_coff) << 5)
& 0x000007e0))
;
2238 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_Q_COFF, q_coff)(((reg) & ~0x0000001f) | (((uint32_t)(q_coff) << 0)
& 0x0000001f))
;
2239 AR_WRITE(sc, AR_PHY_TIMING_CTRL4(i), reg)(sc)->ops.write((sc), ((0x9920 + (i) * 0x1000)), (reg));
2240 }
2241
2242 /* Apply new settings. */
2243 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0,(sc)->ops.write((sc), (0x9920), ((sc)->ops.read((sc), (
0x9920)) | (0x00000800)))
2244 AR_PHY_TIMING_CTRL4_IQCORR_ENABLE)(sc)->ops.write((sc), (0x9920), ((sc)->ops.read((sc), (
0x9920)) | (0x00000800)))
;
2245 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2246
2247 /* IQ calibration done. */
2248 sc->cur_calib_mask &= ~ATHN_CAL_IQ(1 << 0);
2249 memset(&sc->calib, 0, sizeof(sc->calib))__builtin_memset((&sc->calib), (0), (sizeof(sc->calib
)))
;
2250}
2251
2252void
2253ar5008_calib_adc_gain(struct athn_softc *sc)
2254{
2255 struct athn_adc_cal *cal;
2256 uint32_t reg, gain_mismatch_i, gain_mismatch_q;
2257 int i;
2258
2259 for (i = 0; i < AR_MAX_CHAINS3; i++) {
2260 cal = &sc->calib.adc_gain[i];
2261
2262 /* Accumulate ADC gain measures (clear on read). */
2263 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i))(sc)->ops.read((sc), ((0x9c10 + (i) * 0x1000)));
2264 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i))(sc)->ops.read((sc), ((0x9c14 + (i) * 0x1000)));
2265 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i))(sc)->ops.read((sc), ((0x9c18 + (i) * 0x1000)));
2266 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i))(sc)->ops.read((sc), ((0x9c1c + (i) * 0x1000)));
2267 }
2268 if (!AR_SREV_9280_10_OR_LATER(sc)((sc)->mac_ver >= 0x080) &&
2269 ++sc->calib.nsamples < AR_CAL_SAMPLES64) {
2270 /* Not enough samples accumulated, continue. */
2271 ar5008_do_calib(sc);
2272 return;
2273 }
2274
2275 for (i = 0; i < sc->nrxchains; i++) {
2276 cal = &sc->calib.adc_gain[i];
2277
2278 if (cal->pwr_meas_odd_i == 0 || cal->pwr_meas_even_q == 0)
2279 continue; /* Prevents division by zero. */
2280
2281 gain_mismatch_i =
2282 (cal->pwr_meas_even_i * 32) / cal->pwr_meas_odd_i;
2283 gain_mismatch_q =
2284 (cal->pwr_meas_odd_q * 32) / cal->pwr_meas_even_q;
2285
2286 DPRINTFN(2, ("ADC gain calibration for chain %d\n", i));
2287 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i))(sc)->ops.read((sc), ((0x99b4 + (i) * 0x1000)));
2288 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IGAIN, gain_mismatch_i)(((reg) & ~0x00000fc0) | (((uint32_t)(gain_mismatch_i) <<
6) & 0x00000fc0))
;
2289 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QGAIN, gain_mismatch_q)(((reg) & ~0x0000003f) | (((uint32_t)(gain_mismatch_q) <<
0) & 0x0000003f))
;
2290 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg)(sc)->ops.write((sc), ((0x99b4 + (i) * 0x1000)), (reg));
2291 }
2292
2293 /* Apply new settings. */
2294 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),(sc)->ops.write((sc), ((0x99b4 + (0) * 0x1000)), ((sc)->
ops.read((sc), ((0x99b4 + (0) * 0x1000))) | (0x40000000)))
2295 AR_PHY_NEW_ADC_GAIN_CORR_ENABLE)(sc)->ops.write((sc), ((0x99b4 + (0) * 0x1000)), ((sc)->
ops.read((sc), ((0x99b4 + (0) * 0x1000))) | (0x40000000)))
;
2296 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2297
2298 /* ADC gain calibration done. */
2299 sc->cur_calib_mask &= ~ATHN_CAL_ADC_GAIN(1 << 1);
2300 memset(&sc->calib, 0, sizeof(sc->calib))__builtin_memset((&sc->calib), (0), (sizeof(sc->calib
)))
;
2301}
2302
2303void
2304ar5008_calib_adc_dc_off(struct athn_softc *sc)
2305{
2306 struct athn_adc_cal *cal;
2307 int32_t dc_offset_mismatch_i, dc_offset_mismatch_q;
2308 uint32_t reg;
2309 int count, i;
2310
2311 for (i = 0; i < AR_MAX_CHAINS3; i++) {
2312 cal = &sc->calib.adc_dc_offset[i];
2313
2314 /* Accumulate ADC DC offset measures (clear on read). */
2315 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i))(sc)->ops.read((sc), ((0x9c10 + (i) * 0x1000)));
2316 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i))(sc)->ops.read((sc), ((0x9c14 + (i) * 0x1000)));
2317 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i))(sc)->ops.read((sc), ((0x9c18 + (i) * 0x1000)));
2318 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i))(sc)->ops.read((sc), ((0x9c1c + (i) * 0x1000)));
2319 }
2320 if (!AR_SREV_9280_10_OR_LATER(sc)((sc)->mac_ver >= 0x080) &&
2321 ++sc->calib.nsamples < AR_CAL_SAMPLES64) {
2322 /* Not enough samples accumulated, continue. */
2323 ar5008_do_calib(sc);
2324 return;
2325 }
2326
2327 if (AR_SREV_9280_10_OR_LATER(sc)((sc)->mac_ver >= 0x080))
2328 count = (1 << (10 + 5));
2329 else
2330 count = (1 << ( 2 + 5)) * AR_CAL_SAMPLES64;
2331 for (i = 0; i < sc->nrxchains; i++) {
2332 cal = &sc->calib.adc_dc_offset[i];
2333
2334 dc_offset_mismatch_i =
2335 (cal->pwr_meas_even_i - cal->pwr_meas_odd_i * 2) / count;
2336 dc_offset_mismatch_q =
2337 (cal->pwr_meas_odd_q - cal->pwr_meas_even_q * 2) / count;
2338
2339 DPRINTFN(2, ("ADC DC offset calibration for chain %d\n", i));
2340 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i))(sc)->ops.read((sc), ((0x99b4 + (i) * 0x1000)));
2341 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QDC,(((reg) & ~0x001ff000) | (((uint32_t)(dc_offset_mismatch_q
) << 12) & 0x001ff000))
2342 dc_offset_mismatch_q)(((reg) & ~0x001ff000) | (((uint32_t)(dc_offset_mismatch_q
) << 12) & 0x001ff000))
;
2343 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IDC,(((reg) & ~0x3fe00000) | (((uint32_t)(dc_offset_mismatch_i
) << 21) & 0x3fe00000))
2344 dc_offset_mismatch_i)(((reg) & ~0x3fe00000) | (((uint32_t)(dc_offset_mismatch_i
) << 21) & 0x3fe00000))
;
2345 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg)(sc)->ops.write((sc), ((0x99b4 + (i) * 0x1000)), (reg));
2346 }
2347
2348 /* Apply new settings. */
2349 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),(sc)->ops.write((sc), ((0x99b4 + (0) * 0x1000)), ((sc)->
ops.read((sc), ((0x99b4 + (0) * 0x1000))) | (0x80000000)))
2350 AR_PHY_NEW_ADC_DC_OFFSET_CORR_ENABLE)(sc)->ops.write((sc), ((0x99b4 + (0) * 0x1000)), ((sc)->
ops.read((sc), ((0x99b4 + (0) * 0x1000))) | (0x80000000)))
;
2351 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2352
2353 /* ADC DC offset calibration done. */
2354 sc->cur_calib_mask &= ~ATHN_CAL_ADC_DC(1 << 2);
2355 memset(&sc->calib, 0, sizeof(sc->calib))__builtin_memset((&sc->calib), (0), (sizeof(sc->calib
)))
;
2356}
2357
2358void
2359ar5008_write_txpower(struct athn_softc *sc, int16_t power[ATHN_POWER_COUNT68])
2360{
2361 AR_WRITE(sc, AR_PHY_POWER_TX_RATE1,(sc)->ops.write((sc), (0x9934), ((power[3 ] & 0x3f) <<
24 | (power[2 ] & 0x3f) << 16 | (power[1 ] & 0x3f
) << 8 | (power[0 ] & 0x3f)))
2362 (power[ATHN_POWER_OFDM18 ] & 0x3f) << 24 |(sc)->ops.write((sc), (0x9934), ((power[3 ] & 0x3f) <<
24 | (power[2 ] & 0x3f) << 16 | (power[1 ] & 0x3f
) << 8 | (power[0 ] & 0x3f)))
2363 (power[ATHN_POWER_OFDM12 ] & 0x3f) << 16 |(sc)->ops.write((sc), (0x9934), ((power[3 ] & 0x3f) <<
24 | (power[2 ] & 0x3f) << 16 | (power[1 ] & 0x3f
) << 8 | (power[0 ] & 0x3f)))
2364 (power[ATHN_POWER_OFDM9 ] & 0x3f) << 8 |(sc)->ops.write((sc), (0x9934), ((power[3 ] & 0x3f) <<
24 | (power[2 ] & 0x3f) << 16 | (power[1 ] & 0x3f
) << 8 | (power[0 ] & 0x3f)))
2365 (power[ATHN_POWER_OFDM6 ] & 0x3f))(sc)->ops.write((sc), (0x9934), ((power[3 ] & 0x3f) <<
24 | (power[2 ] & 0x3f) << 16 | (power[1 ] & 0x3f
) << 8 | (power[0 ] & 0x3f)))
;
2366 AR_WRITE(sc, AR_PHY_POWER_TX_RATE2,(sc)->ops.write((sc), (0x9938), ((power[7 ] & 0x3f) <<
24 | (power[6 ] & 0x3f) << 16 | (power[5 ] & 0x3f
) << 8 | (power[4 ] & 0x3f)))
2367 (power[ATHN_POWER_OFDM54 ] & 0x3f) << 24 |(sc)->ops.write((sc), (0x9938), ((power[7 ] & 0x3f) <<
24 | (power[6 ] & 0x3f) << 16 | (power[5 ] & 0x3f
) << 8 | (power[4 ] & 0x3f)))
2368 (power[ATHN_POWER_OFDM48 ] & 0x3f) << 16 |(sc)->ops.write((sc), (0x9938), ((power[7 ] & 0x3f) <<
24 | (power[6 ] & 0x3f) << 16 | (power[5 ] & 0x3f
) << 8 | (power[4 ] & 0x3f)))
2369 (power[ATHN_POWER_OFDM36 ] & 0x3f) << 8 |(sc)->ops.write((sc), (0x9938), ((power[7 ] & 0x3f) <<
24 | (power[6 ] & 0x3f) << 16 | (power[5 ] & 0x3f
) << 8 | (power[4 ] & 0x3f)))
2370 (power[ATHN_POWER_OFDM24 ] & 0x3f))(sc)->ops.write((sc), (0x9938), ((power[7 ] & 0x3f) <<
24 | (power[6 ] & 0x3f) << 16 | (power[5 ] & 0x3f
) << 8 | (power[4 ] & 0x3f)))
;
2371 AR_WRITE(sc, AR_PHY_POWER_TX_RATE3,(sc)->ops.write((sc), (0xa234), ((power[10 ] & 0x3f) <<
24 | (power[9 ] & 0x3f) << 16 | (power[15 ] & 0x3f
) << 8 | (power[8 ] & 0x3f)))
2372 (power[ATHN_POWER_CCK2_SP ] & 0x3f) << 24 |(sc)->ops.write((sc), (0xa234), ((power[10 ] & 0x3f) <<
24 | (power[9 ] & 0x3f) << 16 | (power[15 ] & 0x3f
) << 8 | (power[8 ] & 0x3f)))
2373 (power[ATHN_POWER_CCK2_LP ] & 0x3f) << 16 |(sc)->ops.write((sc), (0xa234), ((power[10 ] & 0x3f) <<
24 | (power[9 ] & 0x3f) << 16 | (power[15 ] & 0x3f
) << 8 | (power[8 ] & 0x3f)))
2374 (power[ATHN_POWER_XR ] & 0x3f) << 8 |(sc)->ops.write((sc), (0xa234), ((power[10 ] & 0x3f) <<
24 | (power[9 ] & 0x3f) << 16 | (power[15 ] & 0x3f
) << 8 | (power[8 ] & 0x3f)))
2375 (power[ATHN_POWER_CCK1_LP ] & 0x3f))(sc)->ops.write((sc), (0xa234), ((power[10 ] & 0x3f) <<
24 | (power[9 ] & 0x3f) << 16 | (power[15 ] & 0x3f
) << 8 | (power[8 ] & 0x3f)))
;
2376 AR_WRITE(sc, AR_PHY_POWER_TX_RATE4,(sc)->ops.write((sc), (0xa238), ((power[14] & 0x3f) <<
24 | (power[13] & 0x3f) << 16 | (power[12] & 0x3f
) << 8 | (power[11] & 0x3f)))
2377 (power[ATHN_POWER_CCK11_SP] & 0x3f) << 24 |(sc)->ops.write((sc), (0xa238), ((power[14] & 0x3f) <<
24 | (power[13] & 0x3f) << 16 | (power[12] & 0x3f
) << 8 | (power[11] & 0x3f)))
2378 (power[ATHN_POWER_CCK11_LP] & 0x3f) << 16 |(sc)->ops.write((sc), (0xa238), ((power[14] & 0x3f) <<
24 | (power[13] & 0x3f) << 16 | (power[12] & 0x3f
) << 8 | (power[11] & 0x3f)))
2379 (power[ATHN_POWER_CCK55_SP] & 0x3f) << 8 |(sc)->ops.write((sc), (0xa238), ((power[14] & 0x3f) <<
24 | (power[13] & 0x3f) << 16 | (power[12] & 0x3f
) << 8 | (power[11] & 0x3f)))
2380 (power[ATHN_POWER_CCK55_LP] & 0x3f))(sc)->ops.write((sc), (0xa238), ((power[14] & 0x3f) <<
24 | (power[13] & 0x3f) << 16 | (power[12] & 0x3f
) << 8 | (power[11] & 0x3f)))
;
2381 AR_WRITE(sc, AR_PHY_POWER_TX_RATE5,(sc)->ops.write((sc), (0xa38c), ((power[(16 + (3)) ] &
0x3f) << 24 | (power[(16 + (2)) ] & 0x3f) <<
16 | (power[(16 + (1)) ] & 0x3f) << 8 | (power[(16
+ (0)) ] & 0x3f)))
2382 (power[ATHN_POWER_HT20(3) ] & 0x3f) << 24 |(sc)->ops.write((sc), (0xa38c), ((power[(16 + (3)) ] &
0x3f) << 24 | (power[(16 + (2)) ] & 0x3f) <<
16 | (power[(16 + (1)) ] & 0x3f) << 8 | (power[(16
+ (0)) ] & 0x3f)))
2383 (power[ATHN_POWER_HT20(2) ] & 0x3f) << 16 |(sc)->ops.write((sc), (0xa38c), ((power[(16 + (3)) ] &
0x3f) << 24 | (power[(16 + (2)) ] & 0x3f) <<
16 | (power[(16 + (1)) ] & 0x3f) << 8 | (power[(16
+ (0)) ] & 0x3f)))
2384 (power[ATHN_POWER_HT20(1) ] & 0x3f) << 8 |(sc)->ops.write((sc), (0xa38c), ((power[(16 + (3)) ] &
0x3f) << 24 | (power[(16 + (2)) ] & 0x3f) <<
16 | (power[(16 + (1)) ] & 0x3f) << 8 | (power[(16
+ (0)) ] & 0x3f)))
2385 (power[ATHN_POWER_HT20(0) ] & 0x3f))(sc)->ops.write((sc), (0xa38c), ((power[(16 + (3)) ] &
0x3f) << 24 | (power[(16 + (2)) ] & 0x3f) <<
16 | (power[(16 + (1)) ] & 0x3f) << 8 | (power[(16
+ (0)) ] & 0x3f)))
;
2386 AR_WRITE(sc, AR_PHY_POWER_TX_RATE6,(sc)->ops.write((sc), (0xa390), ((power[(16 + (7)) ] &
0x3f) << 24 | (power[(16 + (6)) ] & 0x3f) <<
16 | (power[(16 + (5)) ] & 0x3f) << 8 | (power[(16
+ (4)) ] & 0x3f)))
2387 (power[ATHN_POWER_HT20(7) ] & 0x3f) << 24 |(sc)->ops.write((sc), (0xa390), ((power[(16 + (7)) ] &
0x3f) << 24 | (power[(16 + (6)) ] & 0x3f) <<
16 | (power[(16 + (5)) ] & 0x3f) << 8 | (power[(16
+ (4)) ] & 0x3f)))
2388 (power[ATHN_POWER_HT20(6) ] & 0x3f) << 16 |(sc)->ops.write((sc), (0xa390), ((power[(16 + (7)) ] &
0x3f) << 24 | (power[(16 + (6)) ] & 0x3f) <<
16 | (power[(16 + (5)) ] & 0x3f) << 8 | (power[(16
+ (4)) ] & 0x3f)))
2389 (power[ATHN_POWER_HT20(5) ] & 0x3f) << 8 |(sc)->ops.write((sc), (0xa390), ((power[(16 + (7)) ] &
0x3f) << 24 | (power[(16 + (6)) ] & 0x3f) <<
16 | (power[(16 + (5)) ] & 0x3f) << 8 | (power[(16
+ (4)) ] & 0x3f)))
2390 (power[ATHN_POWER_HT20(4) ] & 0x3f))(sc)->ops.write((sc), (0xa390), ((power[(16 + (7)) ] &
0x3f) << 24 | (power[(16 + (6)) ] & 0x3f) <<
16 | (power[(16 + (5)) ] & 0x3f) << 8 | (power[(16
+ (4)) ] & 0x3f)))
;
2391 AR_WRITE(sc, AR_PHY_POWER_TX_RATE7,(sc)->ops.write((sc), (0xa3cc), ((power[(40 + (3)) ] &
0x3f) << 24 | (power[(40 + (2)) ] & 0x3f) <<
16 | (power[(40 + (1)) ] & 0x3f) << 8 | (power[(40
+ (0)) ] & 0x3f)))
2392 (power[ATHN_POWER_HT40(3) ] & 0x3f) << 24 |(sc)->ops.write((sc), (0xa3cc), ((power[(40 + (3)) ] &
0x3f) << 24 | (power[(40 + (2)) ] & 0x3f) <<
16 | (power[(40 + (1)) ] & 0x3f) << 8 | (power[(40
+ (0)) ] & 0x3f)))
2393 (power[ATHN_POWER_HT40(2) ] & 0x3f) << 16 |(sc)->ops.write((sc), (0xa3cc), ((power[(40 + (3)) ] &
0x3f) << 24 | (power[(40 + (2)) ] & 0x3f) <<
16 | (power[(40 + (1)) ] & 0x3f) << 8 | (power[(40
+ (0)) ] & 0x3f)))
2394 (power[ATHN_POWER_HT40(1) ] & 0x3f) << 8 |(sc)->ops.write((sc), (0xa3cc), ((power[(40 + (3)) ] &
0x3f) << 24 | (power[(40 + (2)) ] & 0x3f) <<
16 | (power[(40 + (1)) ] & 0x3f) << 8 | (power[(40
+ (0)) ] & 0x3f)))
2395 (power[ATHN_POWER_HT40(0) ] & 0x3f))(sc)->ops.write((sc), (0xa3cc), ((power[(40 + (3)) ] &
0x3f) << 24 | (power[(40 + (2)) ] & 0x3f) <<
16 | (power[(40 + (1)) ] & 0x3f) << 8 | (power[(40
+ (0)) ] & 0x3f)))
;
2396 AR_WRITE(sc, AR_PHY_POWER_TX_RATE8,(sc)->ops.write((sc), (0xa3d0), ((power[(40 + (7)) ] &
0x3f) << 24 | (power[(40 + (6)) ] & 0x3f) <<
16 | (power[(40 + (5)) ] & 0x3f) << 8 | (power[(40
+ (4)) ] & 0x3f)))
2397 (power[ATHN_POWER_HT40(7) ] & 0x3f) << 24 |(sc)->ops.write((sc), (0xa3d0), ((power[(40 + (7)) ] &
0x3f) << 24 | (power[(40 + (6)) ] & 0x3f) <<
16 | (power[(40 + (5)) ] & 0x3f) << 8 | (power[(40
+ (4)) ] & 0x3f)))
2398 (power[ATHN_POWER_HT40(6) ] & 0x3f) << 16 |(sc)->ops.write((sc), (0xa3d0), ((power[(40 + (7)) ] &
0x3f) << 24 | (power[(40 + (6)) ] & 0x3f) <<
16 | (power[(40 + (5)) ] & 0x3f) << 8 | (power[(40
+ (4)) ] & 0x3f)))
2399 (power[ATHN_POWER_HT40(5) ] & 0x3f) << 8 |(sc)->ops.write((sc), (0xa3d0), ((power[(40 + (7)) ] &
0x3f) << 24 | (power[(40 + (6)) ] & 0x3f) <<
16 | (power[(40 + (5)) ] & 0x3f) << 8 | (power[(40
+ (4)) ] & 0x3f)))
2400 (power[ATHN_POWER_HT40(4) ] & 0x3f))(sc)->ops.write((sc), (0xa3d0), ((power[(40 + (7)) ] &
0x3f) << 24 | (power[(40 + (6)) ] & 0x3f) <<
16 | (power[(40 + (5)) ] & 0x3f) << 8 | (power[(40
+ (4)) ] & 0x3f)))
;
2401 AR_WRITE(sc, AR_PHY_POWER_TX_RATE9,(sc)->ops.write((sc), (0xa3d4), ((power[67] & 0x3f) <<
24 | (power[66 ] & 0x3f) << 16 | (power[65] & 0x3f
) << 8 | (power[64 ] & 0x3f)))
2402 (power[ATHN_POWER_OFDM_EXT] & 0x3f) << 24 |(sc)->ops.write((sc), (0xa3d4), ((power[67] & 0x3f) <<
24 | (power[66 ] & 0x3f) << 16 | (power[65] & 0x3f
) << 8 | (power[64 ] & 0x3f)))
2403 (power[ATHN_POWER_CCK_EXT ] & 0x3f) << 16 |(sc)->ops.write((sc), (0xa3d4), ((power[67] & 0x3f) <<
24 | (power[66 ] & 0x3f) << 16 | (power[65] & 0x3f
) << 8 | (power[64 ] & 0x3f)))
2404 (power[ATHN_POWER_OFDM_DUP] & 0x3f) << 8 |(sc)->ops.write((sc), (0xa3d4), ((power[67] & 0x3f) <<
24 | (power[66 ] & 0x3f) << 16 | (power[65] & 0x3f
) << 8 | (power[64 ] & 0x3f)))
2405 (power[ATHN_POWER_CCK_DUP ] & 0x3f))(sc)->ops.write((sc), (0xa3d4), ((power[67] & 0x3f) <<
24 | (power[66 ] & 0x3f) << 16 | (power[65] & 0x3f
) << 8 | (power[64 ] & 0x3f)))
;
2406 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2407}
2408
2409void
2410ar5008_set_viterbi_mask(struct athn_softc *sc, int bin)
2411{
2412 uint32_t mask[4], reg;
2413 uint8_t m[62], p[62]; /* XXX use bit arrays? */
2414 int i, bit, cur;
2415
2416 /* Compute pilot mask. */
2417 cur = -6000;
2418 for (i = 0; i < 4; i++) {
2419 mask[i] = 0;
2420 for (bit = 0; bit < 30; bit++) {
2421 if (abs(cur - bin) < 100)
2422 mask[i] |= 1 << bit;
2423 cur += 100;
2424 }
2425 if (cur == 0) /* Skip entry "0". */
2426 cur = 100;
2427 }
2428 /* Write entries from -6000 to -3100. */
2429 AR_WRITE(sc, AR_PHY_TIMING7, mask[0])(sc)->ops.write((sc), (0x9980), (mask[0]));
2430 AR_WRITE(sc, AR_PHY_TIMING9, mask[0])(sc)->ops.write((sc), (0x9998), (mask[0]));
2431 /* Write entries from -3000 to -100. */
2432 AR_WRITE(sc, AR_PHY_TIMING8, mask[1])(sc)->ops.write((sc), (0x9984), (mask[1]));
2433 AR_WRITE(sc, AR_PHY_TIMING10, mask[1])(sc)->ops.write((sc), (0x999c), (mask[1]));
2434 /* Write entries from 100 to 3000. */
2435 AR_WRITE(sc, AR_PHY_PILOT_MASK_01_30, mask[2])(sc)->ops.write((sc), (0xa3b0), (mask[2]));
2436 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_01_30, mask[2])(sc)->ops.write((sc), (0x99d4), (mask[2]));
2437 /* Write entries from 3100 to 6000. */
2438 AR_WRITE(sc, AR_PHY_PILOT_MASK_31_60, mask[3])(sc)->ops.write((sc), (0xa3b4), (mask[3]));
2439 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_31_60, mask[3])(sc)->ops.write((sc), (0x99d8), (mask[3]));
2440
2441 /* Compute viterbi mask. */
2442 for (cur = 6100; cur >= 0; cur -= 100)
2443 p[+cur / 100] = abs(cur - bin) < 75;
2444 for (cur = -100; cur >= -6100; cur -= 100)
2445 m[-cur / 100] = abs(cur - bin) < 75;
2446
2447 /* Write viterbi mask (XXX needs to be reworked). */
2448 reg =
2449 m[46] << 30 | m[47] << 28 | m[48] << 26 | m[49] << 24 |
2450 m[50] << 22 | m[51] << 20 | m[52] << 18 | m[53] << 16 |
2451 m[54] << 14 | m[55] << 12 | m[56] << 10 | m[57] << 8 |
2452 m[58] << 6 | m[59] << 4 | m[60] << 2 | m[61] << 0;
2453 AR_WRITE(sc, AR_PHY_BIN_MASK_1, reg)(sc)->ops.write((sc), (0x9900), (reg));
2454 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_46_61, reg)(sc)->ops.write((sc), (0xa3a0), (reg));
2455
2456 /* XXX m[48] should be m[38] ? */
2457 reg = m[31] << 28 | m[32] << 26 | m[33] << 24 |
2458 m[34] << 22 | m[35] << 20 | m[36] << 18 | m[37] << 16 |
2459 m[48] << 14 | m[39] << 12 | m[40] << 10 | m[41] << 8 |
2460 m[42] << 6 | m[43] << 4 | m[44] << 2 | m[45] << 0;
2461 AR_WRITE(sc, AR_PHY_BIN_MASK_2, reg)(sc)->ops.write((sc), (0x9904), (reg));
2462 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_31_45, reg)(sc)->ops.write((sc), (0xa3a4), (reg));
2463
2464 /* XXX This one is weird too. */
2465 reg =
2466 m[16] << 30 | m[16] << 28 | m[18] << 26 | m[18] << 24 |
2467 m[20] << 22 | m[20] << 20 | m[22] << 18 | m[22] << 16 |
2468 m[24] << 14 | m[24] << 12 | m[25] << 10 | m[26] << 8 |
2469 m[27] << 6 | m[28] << 4 | m[29] << 2 | m[30] << 0;
2470 AR_WRITE(sc, AR_PHY_BIN_MASK_3, reg)(sc)->ops.write((sc), (0x9908), (reg));
2471 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_16_30, reg)(sc)->ops.write((sc), (0xa3a8), (reg));
2472
2473 reg =
2474 m[ 0] << 30 | m[ 1] << 28 | m[ 2] << 26 | m[ 3] << 24 |
2475 m[ 4] << 22 | m[ 5] << 20 | m[ 6] << 18 | m[ 7] << 16 |
2476 m[ 8] << 14 | m[ 9] << 12 | m[10] << 10 | m[11] << 8 |
2477 m[12] << 6 | m[13] << 4 | m[14] << 2 | m[15] << 0;
2478 AR_WRITE(sc, AR_PHY_MASK_CTL, reg)(sc)->ops.write((sc), (0x990c), (reg));
2479 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_00_15, reg)(sc)->ops.write((sc), (0xa3ac), (reg));
2480
2481 reg = p[15] << 28 | p[14] << 26 | p[13] << 24 |
2482 p[12] << 22 | p[11] << 20 | p[10] << 18 | p[ 9] << 16 |
2483 p[ 8] << 14 | p[ 7] << 12 | p[ 6] << 10 | p[ 5] << 8 |
2484 p[ 4] << 6 | p[ 3] << 4 | p[ 2] << 2 | p[ 1] << 0;
2485 AR_WRITE(sc, AR_PHY_BIN_MASK2_1, reg)(sc)->ops.write((sc), (0x9988), (reg));
2486 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_15_01, reg)(sc)->ops.write((sc), (0xa3b8), (reg));
2487
2488 reg = p[30] << 28 | p[29] << 26 | p[28] << 24 |
2489 p[27] << 22 | p[26] << 20 | p[25] << 18 | p[24] << 16 |
2490 p[23] << 14 | p[22] << 12 | p[21] << 10 | p[20] << 8 |
2491 p[19] << 6 | p[18] << 4 | p[17] << 2 | p[16] << 0;
2492 AR_WRITE(sc, AR_PHY_BIN_MASK2_2, reg)(sc)->ops.write((sc), (0x998c), (reg));
2493 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_30_16, reg)(sc)->ops.write((sc), (0xa3bc), (reg));
2494
2495 reg = p[45] << 28 | p[44] << 26 | p[43] << 24 |
2496 p[42] << 22 | p[41] << 20 | p[40] << 18 | p[39] << 16 |
2497 p[38] << 14 | p[37] << 12 | p[36] << 10 | p[35] << 8 |
2498 p[34] << 6 | p[33] << 4 | p[32] << 2 | p[31] << 0;
2499 AR_WRITE(sc, AR_PHY_BIN_MASK2_3, reg)(sc)->ops.write((sc), (0x9990), (reg));
2500 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_45_31, reg)(sc)->ops.write((sc), (0xa3c0), (reg));
2501
2502 reg =
2503 p[61] << 30 | p[60] << 28 | p[59] << 26 | p[58] << 24 |
2504 p[57] << 22 | p[56] << 20 | p[55] << 18 | p[54] << 16 |
2505 p[53] << 14 | p[52] << 12 | p[51] << 10 | p[50] << 8 |
2506 p[49] << 6 | p[48] << 4 | p[47] << 2 | p[46] << 0;
2507 AR_WRITE(sc, AR_PHY_BIN_MASK2_4, reg)(sc)->ops.write((sc), (0x9994), (reg));
2508 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_61_46, reg)(sc)->ops.write((sc), (0xa3c4), (reg));
2509 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2510}
2511
2512void
2513ar5008_hw_init(struct athn_softc *sc, struct ieee80211_channel *c,
2514 struct ieee80211_channel *extc)
2515{
2516 struct athn_ops *ops = &sc->ops;
2517 const struct athn_ini *ini = sc->ini;
2518 const uint32_t *pvals;
2519 uint32_t reg;
2520 int i;
2521
2522 AR_WRITE(sc, AR_PHY(0), 0x00000007)(sc)->ops.write((sc), ((0x9800 + (0) * 4)), (0x00000007));
2523 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO)(sc)->ops.write((sc), (0x9830), (0x00000001));
2524
2525 if (!AR_SINGLE_CHIP(sc)((sc)->mac_ver >= 0x080))
2526 ar5416_reset_addac(sc, c);
2527
2528 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC)(sc)->ops.write((sc), (0x9830), (0x00000000));
2529
2530 /* First initialization step (depends on channel band/bandwidth). */
2531 if (extc != NULL((void *)0)) {
2532 if (IEEE80211_IS_CHAN_2GHZ(c)(((c)->ic_flags & 0x0080) != 0))
2533 pvals = ini->vals_2g40;
2534 else
2535 pvals = ini->vals_5g40;
2536 } else {
2537 if (IEEE80211_IS_CHAN_2GHZ(c)(((c)->ic_flags & 0x0080) != 0))
2538 pvals = ini->vals_2g20;
2539 else
2540 pvals = ini->vals_5g20;
2541 }
2542 DPRINTFN(4, ("writing modal init vals\n"));
2543 for (i = 0; i < ini->nregs; i++) {
2544 uint32_t val = pvals[i];
2545
2546 /* Fix AR_AN_TOP2 initialization value if required. */
2547 if (ini->regs[i] == AR_AN_TOP20x7894 &&
2548 (sc->flags & ATHN_FLAG_AN_TOP2_FIXUP(1 << 12)))
2549 val &= ~AR_AN_TOP2_PWDCLKIND0x00400000;
2550 AR_WRITE(sc, ini->regs[i], val)(sc)->ops.write((sc), (ini->regs[i]), (val));
2551 if (AR_IS_ANALOG_REG(ini->regs[i])((ini->regs[i]) >= 0x7800 && (ini->regs[i]) <=
0x78b4)
) {
2552 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2553 DELAY(100)(*delay_func)(100);
2554 }
2555 if ((i & 0x1f) == 0)
2556 DELAY(1)(*delay_func)(1);
2557 }
2558 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2559
2560 if (sc->rx_gain != NULL((void *)0))
2561 ar9280_reset_rx_gain(sc, c);
2562 if (sc->tx_gain != NULL((void *)0))
2563 ar9280_reset_tx_gain(sc, c);
2564
2565 if (AR_SREV_9271_10(sc)(((sc)->mac_ver == 0x140) && (sc)->mac_rev == 0
)
) {
2566 AR_WRITE(sc, AR_PHY(68), 0x30002311)(sc)->ops.write((sc), ((0x9800 + (68) * 4)), (0x30002311));
2567 AR_WRITE(sc, AR_PHY_RF_CTL3, 0x0a020001)(sc)->ops.write((sc), (0x9828), (0x0a020001));
2568 }
2569 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2570
2571 /* Second initialization step (common to all channels). */
2572 DPRINTFN(4, ("writing common init vals\n"));
2573 for (i = 0; i < ini->ncmregs; i++) {
2574 AR_WRITE(sc, ini->cmregs[i], ini->cmvals[i])(sc)->ops.write((sc), (ini->cmregs[i]), (ini->cmvals
[i]))
;
2575 if (AR_IS_ANALOG_REG(ini->cmregs[i])((ini->cmregs[i]) >= 0x7800 && (ini->cmregs[
i]) <= 0x78b4)
) {
2576 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2577 DELAY(100)(*delay_func)(100);
2578 }
2579 if ((i & 0x1f) == 0)
2580 DELAY(1)(*delay_func)(1);
2581 }
2582 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2583
2584 if (!AR_SINGLE_CHIP(sc)((sc)->mac_ver >= 0x080))
2585 ar5416_reset_bb_gain(sc, c);
2586
2587 if (IEEE80211_IS_CHAN_5GHZ(c)(((c)->ic_flags & 0x0100) != 0) &&
2588 (sc->flags & ATHN_FLAG_FAST_PLL_CLOCK(1 << 4))) {
2589 /* Update modal values for fast PLL clock. */
2590 if (extc != NULL((void *)0))
2591 pvals = ini->fastvals_5g40;
2592 else
2593 pvals = ini->fastvals_5g20;
2594 DPRINTFN(4, ("writing fast pll clock init vals\n"));
2595 for (i = 0; i < ini->nfastregs; i++) {
2596 AR_WRITE(sc, ini->fastregs[i], pvals[i])(sc)->ops.write((sc), (ini->fastregs[i]), (pvals[i]));
2597 if (AR_IS_ANALOG_REG(ini->fastregs[i])((ini->fastregs[i]) >= 0x7800 && (ini->fastregs
[i]) <= 0x78b4)
) {
2598 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2599 DELAY(100)(*delay_func)(100);
2600 }
2601 if ((i & 0x1f) == 0)
2602 DELAY(1)(*delay_func)(1);
2603 }
2604 }
2605
2606 /*
2607 * Set the RX_ABORT and RX_DIS bits to prevent frames with corrupted
2608 * descriptor status.
2609 */
2610 AR_SETBITS(sc, AR_DIAG_SW, AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT)(sc)->ops.write((sc), (0x8048), ((sc)->ops.read((sc), (
0x8048)) | (0x00000020 | 0x02000000)))
;
2611
2612 /* Hardware workarounds for occasional Rx data corruption. */
2613 if (AR_SREV_9280_10_OR_LATER(sc)((sc)->mac_ver >= 0x080)) {
2614 reg = AR_READ(sc, AR_PCU_MISC_MODE2)(sc)->ops.read((sc), (0x8344));
2615 if (!AR_SREV_9271(sc)((sc)->mac_ver == 0x140))
2616 reg &= ~AR_PCU_MISC_MODE2_HWWAR10x00100000;
2617 if (AR_SREV_9287_10_OR_LATER(sc)((sc)->mac_ver >= 0x180))
2618 reg &= ~AR_PCU_MISC_MODE2_HWWAR20x02000000;
2619 AR_WRITE(sc, AR_PCU_MISC_MODE2, reg)(sc)->ops.write((sc), (0x8344), (reg));
2620
2621 } else if (AR_SREV_5416_20_OR_LATER(sc)((((sc)->mac_ver == 0x00d || (sc)->mac_ver == 0x00c) &&
(sc)->mac_rev >= 1) || (sc)->mac_ver >= 0x014)
) {
2622 /* Disable baseband clock gating. */
2623 AR_WRITE(sc, AR_PHY(651), 0x11)(sc)->ops.write((sc), ((0x9800 + (651) * 4)), (0x11));
2624
2625 if (AR_SREV_9160(sc)((sc)->mac_ver == 0x040)) {
2626 /* Disable RIFS search to fix baseband hang. */
2627 AR_CLRBITS(sc, AR_PHY_HEAVY_CLIP_FACTOR_RIFS,(sc)->ops.write((sc), (0x99ec), ((sc)->ops.read((sc), (
0x99ec)) & ~(0x03ff0000)))
2628 AR_PHY_RIFS_INIT_DELAY_M)(sc)->ops.write((sc), (0x99ec), ((sc)->ops.read((sc), (
0x99ec)) & ~(0x03ff0000)))
;
2629 }
2630 }
2631 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2632
2633 ar5008_set_phy(sc, c, extc);
2634 ar5008_init_chains(sc);
2635
2636 if (sc->flags & ATHN_FLAG_OLPC(1 << 2)) {
2637 extern int ticks;
2638 sc->olpc_ticks = ticks;
2639 ops->olpc_init(sc);
2640 }
2641
2642 ops->set_txpower(sc, c, extc);
2643
2644 if (!AR_SINGLE_CHIP(sc)((sc)->mac_ver >= 0x080))
2645 ar5416_rf_reset(sc, c);
2646}
2647
2648uint8_t
2649ar5008_get_vpd(uint8_t pwr, const uint8_t *pwrPdg, const uint8_t *vpdPdg,
2650 int nicepts)
2651{
2652 uint8_t vpd;
2653 int i, lo, hi;
2654
2655 for (i = 0; i < nicepts; i++)
2656 if (pwrPdg[i] > pwr)
2657 break;
2658 hi = i;
2659 lo = hi - 1;
2660 if (lo == -1)
2661 lo = hi;
2662 else if (hi == nicepts)
2663 hi = lo;
2664
2665 vpd = athn_interpolate(pwr, pwrPdg[lo], vpdPdg[lo],
2666 pwrPdg[hi], vpdPdg[hi]);
2667 return (vpd);
2668}
2669
2670void
2671ar5008_get_pdadcs(struct athn_softc *sc, uint8_t fbin,
2672 struct athn_pier *lopier, struct athn_pier *hipier, int nxpdgains,
2673 int nicepts, uint8_t overlap, uint8_t *boundaries, uint8_t *pdadcs)
2674{
2675#define DB(x) ((x) / 2) /* Convert half dB to dB. */
2676 uint8_t minpwr[AR_PD_GAINS_IN_MASK4], maxpwr[AR_PD_GAINS_IN_MASK4];
2677 uint8_t vpd[AR_MAX_PWR_RANGE_IN_HALF_DB64], pwr;
2678 uint8_t lovpd, hivpd, boundary;
2679 int16_t ss, delta, vpdstep, val;
2680 int i, j, npdadcs, nvpds, maxidx, tgtidx;
2681
2682 /* Compute min and max power in half dB for each pdGain. */
2683 for (i = 0; i < nxpdgains; i++) {
2684 minpwr[i] = MAX(lopier->pwr[i][0], hipier->pwr[i][0])(((lopier->pwr[i][0])>(hipier->pwr[i][0]))?(lopier->
pwr[i][0]):(hipier->pwr[i][0]))
;
2685 maxpwr[i] = MIN(lopier->pwr[i][nicepts - 1],(((lopier->pwr[i][nicepts - 1])<(hipier->pwr[i][nicepts
- 1]))?(lopier->pwr[i][nicepts - 1]):(hipier->pwr[i][nicepts
- 1]))
2686 hipier->pwr[i][nicepts - 1])(((lopier->pwr[i][nicepts - 1])<(hipier->pwr[i][nicepts
- 1]))?(lopier->pwr[i][nicepts - 1]):(hipier->pwr[i][nicepts
- 1]))
;
2687 }
2688
2689 /* Fill phase domain analog-to-digital converter (PDADC) table. */
2690 npdadcs = 0;
2691 for (i = 0; i < nxpdgains; i++) {
2692 if (i != nxpdgains - 1)
2693 boundaries[i] = DB(maxpwr[i] + minpwr[i + 1]) / 2;
2694 else
2695 boundaries[i] = DB(maxpwr[i]);
2696 if (boundaries[i] > AR_MAX_RATE_POWER63)
2697 boundaries[i] = AR_MAX_RATE_POWER63;
2698
2699 if (i == 0 && !AR_SREV_5416_20_OR_LATER(sc)((((sc)->mac_ver == 0x00d || (sc)->mac_ver == 0x00c) &&
(sc)->mac_rev >= 1) || (sc)->mac_ver >= 0x014)
) {
2700 /* Fix the gain delta (AR5416 1.0 only). */
2701 delta = boundaries[0] - 23;
2702 boundaries[0] = 23;
2703 } else
2704 delta = 0;
2705
2706 /* Find starting index for this pdGain. */
2707 if (i != 0) {
2708 ss = boundaries[i - 1] - DB(minpwr[i]) -
2709 overlap + 1 + delta;
2710 } else if (AR_SREV_9280_10_OR_LATER(sc)((sc)->mac_ver >= 0x080)) {
2711 ss = -DB(minpwr[i]);
2712 } else
2713 ss = 0;
2714
2715 /* Compute Vpd table for this pdGain. */
2716 nvpds = DB(maxpwr[i] - minpwr[i]) + 1;
2717 memset(vpd, 0, sizeof(vpd))__builtin_memset((vpd), (0), (sizeof(vpd)));
2718 pwr = minpwr[i];
2719 for (j = 0; j < nvpds; j++) {
2720 /* Get lower and higher Vpd. */
2721 lovpd = ar5008_get_vpd(pwr, lopier->pwr[i],
2722 lopier->vpd[i], nicepts);
2723 hivpd = ar5008_get_vpd(pwr, hipier->pwr[i],
2724 hipier->vpd[i], nicepts);
2725
2726 /* Interpolate the final Vpd. */
2727 vpd[j] = athn_interpolate(fbin,
2728 lopier->fbin, lovpd, hipier->fbin, hivpd);
2729
2730 pwr += 2; /* In half dB. */
2731 }
2732
2733 /* Extrapolate data for ss < 0. */
2734 if (vpd[1] > vpd[0])
2735 vpdstep = vpd[1] - vpd[0];
2736 else
2737 vpdstep = 1;
2738 while (ss < 0 && npdadcs < AR_NUM_PDADC_VALUES128 - 1) {
2739 val = vpd[0] + ss * vpdstep;
2740 pdadcs[npdadcs++] = MAX(val, 0)(((val)>(0))?(val):(0));
2741 ss++;
2742 }
2743
2744 tgtidx = boundaries[i] + overlap - DB(minpwr[i]);
2745 maxidx = MIN(tgtidx, nvpds)(((tgtidx)<(nvpds))?(tgtidx):(nvpds));
2746 while (ss < maxidx && npdadcs < AR_NUM_PDADC_VALUES128 - 1)
2747 pdadcs[npdadcs++] = vpd[ss++];
2748
2749 if (tgtidx < maxidx)
2750 continue;
2751
2752 /* Extrapolate data for maxidx <= ss <= tgtidx. */
2753 if (vpd[nvpds - 1] > vpd[nvpds - 2])
2754 vpdstep = vpd[nvpds - 1] - vpd[nvpds - 2];
2755 else
2756 vpdstep = 1;
2757 while (ss <= tgtidx && npdadcs < AR_NUM_PDADC_VALUES128 - 1) {
2758 val = vpd[nvpds - 1] + (ss - maxidx + 1) * vpdstep;
2759 pdadcs[npdadcs++] = MIN(val, 255)(((val)<(255))?(val):(255));
2760 ss++;
2761 }
2762 }
2763
2764 /* Fill remaining PDADC and boundaries entries. */
2765 if (AR_SREV_9285(sc)((sc)->mac_ver == 0x0c0))
2766 boundary = AR9285_PD_GAIN_BOUNDARY_DEFAULT58;
2767 else /* Fill with latest. */
2768 boundary = boundaries[nxpdgains - 1];
2769
2770 for (; nxpdgains < AR_PD_GAINS_IN_MASK4; nxpdgains++)
2771 boundaries[nxpdgains] = boundary;
2772
2773 for (; npdadcs < AR_NUM_PDADC_VALUES128; npdadcs++)
2774 pdadcs[npdadcs] = pdadcs[npdadcs - 1];
2775#undef DB
2776}
2777
2778void
2779ar5008_get_lg_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2780 uint8_t ctl, const struct ar_cal_target_power_leg *tgt, int nchans,
2781 uint8_t tpow[4])
2782{
2783 uint8_t fbin;
2784 int i, lo, hi;
2785
2786 /* Find interval (lower and upper indices). */
2787 fbin = athn_chan2fbin(c);
2788 for (i = 0; i < nchans; i++) {
2789 if (tgt[i].bChannel == AR_BCHAN_UNUSED0xff ||
2790 tgt[i].bChannel > fbin)
2791 break;
2792 }
2793 hi = i;
2794 lo = hi - 1;
2795 if (lo == -1)
2796 lo = hi;
2797 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED0xff)
2798 hi = lo;
2799
2800 /* Interpolate values. */
2801 for (i = 0; i < 4; i++) {
2802 tpow[i] = athn_interpolate(fbin,
2803 tgt[lo].bChannel, tgt[lo].tPow2x[i],
2804 tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2805 }
2806 /* XXX Apply conformance testing limit. */
2807}
2808
2809void
2810ar5008_get_ht_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2811 uint8_t ctl, const struct ar_cal_target_power_ht *tgt, int nchans,
2812 uint8_t tpow[8])
2813{
2814 uint8_t fbin;
2815 int i, lo, hi;
2816
2817 /* Find interval (lower and upper indices). */
2818 fbin = athn_chan2fbin(c);
2819 for (i = 0; i < nchans; i++) {
2820 if (tgt[i].bChannel == AR_BCHAN_UNUSED0xff ||
2821 tgt[i].bChannel > fbin)
2822 break;
2823 }
2824 hi = i;
2825 lo = hi - 1;
2826 if (lo == -1)
2827 lo = hi;
2828 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED0xff)
2829 hi = lo;
2830
2831 /* Interpolate values. */
2832 for (i = 0; i < 8; i++) {
2833 tpow[i] = athn_interpolate(fbin,
2834 tgt[lo].bChannel, tgt[lo].tPow2x[i],
2835 tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2836 }
2837 /* XXX Apply conformance testing limit. */
2838}
2839
2840/*
2841 * Adaptive noise immunity.
2842 */
2843void
2844ar5008_set_noise_immunity_level(struct athn_softc *sc, int level)
2845{
2846 int high = level == 4;
2847 uint32_t reg;
2848
2849 reg = AR_READ(sc, AR_PHY_DESIRED_SZ)(sc)->ops.read((sc), (0x9850));
2850 reg = RW(reg, AR_PHY_DESIRED_SZ_TOT_DES, high ? -62 : -55)(((reg) & ~0x0ff00000) | (((uint32_t)(high ? -62 : -55) <<
20) & 0x0ff00000))
;
2851 AR_WRITE(sc, AR_PHY_DESIRED_SZ, reg)(sc)->ops.write((sc), (0x9850), (reg));
2852
2853 reg = AR_READ(sc, AR_PHY_AGC_CTL1)(sc)->ops.read((sc), (0x985c));
2854 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_LOW, high ? -70 : -64)(((reg) & ~0x00007f80) | (((uint32_t)(high ? -70 : -64) <<
7) & 0x00007f80))
;
2855 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_HIGH, high ? -12 : -14)(((reg) & ~0x003f8000) | (((uint32_t)(high ? -12 : -14) <<
15) & 0x003f8000))
;
2856 AR_WRITE(sc, AR_PHY_AGC_CTL1, reg)(sc)->ops.write((sc), (0x985c), (reg));
2857
2858 reg = AR_READ(sc, AR_PHY_FIND_SIG)(sc)->ops.read((sc), (0x9858));
2859 reg = RW(reg, AR_PHY_FIND_SIG_FIRPWR, high ? -80 : -78)(((reg) & ~0x03fc0000) | (((uint32_t)(high ? -80 : -78) <<
18) & 0x03fc0000))
;
2860 AR_WRITE(sc, AR_PHY_FIND_SIG, reg)(sc)->ops.write((sc), (0x9858), (reg));
2861
2862 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2863}
2864
2865void
2866ar5008_enable_ofdm_weak_signal(struct athn_softc *sc)
2867{
2868 uint32_t reg;
2869
2870 reg = AR_READ(sc, AR_PHY_SFCORR_LOW)(sc)->ops.read((sc), (0x986c));
2871 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 50)(((reg) & ~0x001fc000) | (((uint32_t)(50) << 14) &
0x001fc000))
;
2872 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 40)(((reg) & ~0x0fe00000) | (((uint32_t)(40) << 21) &
0x0fe00000))
;
2873 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 48)(((reg) & ~0x00003f00) | (((uint32_t)(48) << 8) &
0x00003f00))
;
2874 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg)(sc)->ops.write((sc), (0x986c), (reg));
2875
2876 reg = AR_READ(sc, AR_PHY_SFCORR)(sc)->ops.read((sc), (0x9868));
2877 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 77)(((reg) & ~0x00fe0000) | (((uint32_t)(77) << 17) &
0x00fe0000))
;
2878 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 64)(((reg) & ~0x7f000000) | (((uint32_t)(64) << 24) &
0x7f000000))
;
2879 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 16)(((reg) & ~0x0000001f) | (((uint32_t)(16) << 0) &
0x0000001f))
;
2880 AR_WRITE(sc, AR_PHY_SFCORR, reg)(sc)->ops.write((sc), (0x9868), (reg));
2881
2882 reg = AR_READ(sc, AR_PHY_SFCORR_EXT)(sc)->ops.read((sc), (0x99c0));
2883 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 50)(((reg) & ~0x001fc000) | (((uint32_t)(50) << 14) &
0x001fc000))
;
2884 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 40)(((reg) & ~0x0fe00000) | (((uint32_t)(40) << 21) &
0x0fe00000))
;
2885 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 77)(((reg) & ~0x0000007f) | (((uint32_t)(77) << 0) &
0x0000007f))
;
2886 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 64)(((reg) & ~0x00003f80) | (((uint32_t)(64) << 7) &
0x00003f80))
;
2887 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg)(sc)->ops.write((sc), (0x99c0), (reg));
2888
2889 AR_SETBITS(sc, AR_PHY_SFCORR_LOW,(sc)->ops.write((sc), (0x986c), ((sc)->ops.read((sc), (
0x986c)) | (0x00000001)))
2890 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW)(sc)->ops.write((sc), (0x986c), ((sc)->ops.read((sc), (
0x986c)) | (0x00000001)))
;
2891 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2892}
2893
2894void
2895ar5008_disable_ofdm_weak_signal(struct athn_softc *sc)
2896{
2897 uint32_t reg;
2898
2899 reg = AR_READ(sc, AR_PHY_SFCORR_LOW)(sc)->ops.read((sc), (0x986c));
2900 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 127)(((reg) & ~0x001fc000) | (((uint32_t)(127) << 14) &
0x001fc000))
;
2901 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 127)(((reg) & ~0x0fe00000) | (((uint32_t)(127) << 21) &
0x0fe00000))
;
2902 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 63)(((reg) & ~0x00003f00) | (((uint32_t)(63) << 8) &
0x00003f00))
;
2903 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg)(sc)->ops.write((sc), (0x986c), (reg));
2904
2905 reg = AR_READ(sc, AR_PHY_SFCORR)(sc)->ops.read((sc), (0x9868));
2906 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 127)(((reg) & ~0x00fe0000) | (((uint32_t)(127) << 17) &
0x00fe0000))
;
2907 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 127)(((reg) & ~0x7f000000) | (((uint32_t)(127) << 24) &
0x7f000000))
;
2908 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 31)(((reg) & ~0x0000001f) | (((uint32_t)(31) << 0) &
0x0000001f))
;
2909 AR_WRITE(sc, AR_PHY_SFCORR, reg)(sc)->ops.write((sc), (0x9868), (reg));
2910
2911 reg = AR_READ(sc, AR_PHY_SFCORR_EXT)(sc)->ops.read((sc), (0x99c0));
2912 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 127)(((reg) & ~0x001fc000) | (((uint32_t)(127) << 14) &
0x001fc000))
;
2913 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 127)(((reg) & ~0x0fe00000) | (((uint32_t)(127) << 21) &
0x0fe00000))
;
2914 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 127)(((reg) & ~0x0000007f) | (((uint32_t)(127) << 0) &
0x0000007f))
;
2915 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 127)(((reg) & ~0x00003f80) | (((uint32_t)(127) << 7) &
0x00003f80))
;
2916 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg)(sc)->ops.write((sc), (0x99c0), (reg));
2917
2918 AR_CLRBITS(sc, AR_PHY_SFCORR_LOW,(sc)->ops.write((sc), (0x986c), ((sc)->ops.read((sc), (
0x986c)) & ~(0x00000001)))
2919 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW)(sc)->ops.write((sc), (0x986c), ((sc)->ops.read((sc), (
0x986c)) & ~(0x00000001)))
;
2920 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2921}
2922
2923void
2924ar5008_set_cck_weak_signal(struct athn_softc *sc, int high)
2925{
2926 uint32_t reg;
2927
2928 reg = AR_READ(sc, AR_PHY_CCK_DETECT)(sc)->ops.read((sc), (0xa208));
2929 reg = RW(reg, AR_PHY_CCK_DETECT_WEAK_SIG_THR_CCK, high ? 6 : 8)(((reg) & ~0x0000003f) | (((uint32_t)(high ? 6 : 8) <<
0) & 0x0000003f))
;
2930 AR_WRITE(sc, AR_PHY_CCK_DETECT, reg)(sc)->ops.write((sc), (0xa208), (reg));
2931 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2932}
2933
2934void
2935ar5008_set_firstep_level(struct athn_softc *sc, int level)
2936{
2937 uint32_t reg;
2938
2939 reg = AR_READ(sc, AR_PHY_FIND_SIG)(sc)->ops.read((sc), (0x9858));
2940 reg = RW(reg, AR_PHY_FIND_SIG_FIRSTEP, level * 4)(((reg) & ~0x0003f000) | (((uint32_t)(level * 4) <<
12) & 0x0003f000))
;
2941 AR_WRITE(sc, AR_PHY_FIND_SIG, reg)(sc)->ops.write((sc), (0x9858), (reg));
2942 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2943}
2944
2945void
2946ar5008_set_spur_immunity_level(struct athn_softc *sc, int level)
2947{
2948 uint32_t reg;
2949
2950 reg = AR_READ(sc, AR_PHY_TIMING5)(sc)->ops.read((sc), (0x9924));
2951 reg = RW(reg, AR_PHY_TIMING5_CYCPWR_THR1, (level + 1) * 2)(((reg) & ~0x000000fe) | (((uint32_t)((level + 1) * 2) <<
1) & 0x000000fe))
;
2952 AR_WRITE(sc, AR_PHY_TIMING5, reg)(sc)->ops.write((sc), (0x9924), (reg));
2953 AR_WRITE_BARRIER(sc)(sc)->ops.write_barrier((sc));
2954}