arn5008.c revision 1.3 1 /* $NetBSD: arn5008.c,v 1.3 2013/04/06 14:57:38 martin Exp $ */
2 /* $OpenBSD: ar5008.c,v 1.21 2012/08/25 12:14:31 kettenis Exp $ */
3
4 /*-
5 * Copyright (c) 2009 Damien Bergamini <damien.bergamini (at) free.fr>
6 * Copyright (c) 2008-2009 Atheros Communications Inc.
7 *
8 * Permission to use, copy, modify, and/or distribute this software for any
9 * purpose with or without fee is hereby granted, provided that the above
10 * copyright notice and this permission notice appear in all copies.
11 *
12 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
13 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
14 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
15 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
16 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
17 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
18 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19 */
20
21 /*
22 * Driver for Atheros 802.11a/g/n chipsets.
23 * Routines common to AR5008, AR9001 and AR9002 families.
24 */
25
26 #include <sys/cdefs.h>
27 __KERNEL_RCSID(0, "$NetBSD: arn5008.c,v 1.3 2013/04/06 14:57:38 martin Exp $");
28
29 #include <sys/param.h>
30 #include <sys/sockio.h>
31 #include <sys/mbuf.h>
32 #include <sys/kernel.h>
33 #include <sys/socket.h>
34 #include <sys/systm.h>
35 #include <sys/malloc.h>
36 #include <sys/queue.h>
37 #include <sys/conf.h>
38 #include <sys/device.h>
39
40 #include <sys/bus.h>
41 #include <sys/endian.h>
42 #include <sys/intr.h>
43
44 #include <net/bpf.h>
45 #include <net/if.h>
46 #include <net/if_arp.h>
47 #include <net/if_dl.h>
48 #include <net/if_ether.h>
49 #include <net/if_media.h>
50 #include <net/if_types.h>
51
52 #include <netinet/in.h>
53 #include <netinet/in_systm.h>
54 #include <netinet/in_var.h>
55 #include <netinet/ip.h>
56
57 #include <net80211/ieee80211_var.h>
58 #include <net80211/ieee80211_amrr.h>
59 #include <net80211/ieee80211_radiotap.h>
60
61 #include <dev/ic/athnreg.h>
62 #include <dev/ic/athnvar.h>
63
64 #include <dev/ic/arn5008reg.h>
65 #include <dev/ic/arn5008.h>
66 #include <dev/ic/arn5416.h>
67 #include <dev/ic/arn9280.h>
68
69 #define Static static
70
71 Static void ar5008_calib_adc_dc_off(struct athn_softc *);
72 Static void ar5008_calib_adc_gain(struct athn_softc *);
73 Static void ar5008_calib_iq(struct athn_softc *);
74 Static void ar5008_disable_ofdm_weak_signal(struct athn_softc *);
75 Static void ar5008_disable_phy(struct athn_softc *);
76 Static int ar5008_dma_alloc(struct athn_softc *);
77 Static void ar5008_dma_free(struct athn_softc *);
78 Static void ar5008_do_calib(struct athn_softc *);
79 Static void ar5008_do_noisefloor_calib(struct athn_softc *);
80 Static void ar5008_enable_antenna_diversity(struct athn_softc *);
81 Static void ar5008_enable_ofdm_weak_signal(struct athn_softc *);
82 Static uint8_t ar5008_get_vpd(uint8_t, const uint8_t *, const uint8_t *, int);
83 Static void ar5008_gpio_config_input(struct athn_softc *, int);
84 Static void ar5008_gpio_config_output(struct athn_softc *, int, int);
85 Static int ar5008_gpio_read(struct athn_softc *, int);
86 Static void ar5008_gpio_write(struct athn_softc *, int, int);
87 Static void ar5008_hw_init(struct athn_softc *, struct ieee80211_channel *,
88 struct ieee80211_channel *);
89 Static void ar5008_init_baseband(struct athn_softc *);
90 Static void ar5008_init_chains(struct athn_softc *);
91 Static int ar5008_intr(struct athn_softc *);
92 Static void ar5008_next_calib(struct athn_softc *);
93 Static int ar5008_read_eep_word(struct athn_softc *, uint32_t,
94 uint16_t *);
95 Static int ar5008_read_rom(struct athn_softc *);
96 Static void ar5008_rf_bus_release(struct athn_softc *);
97 Static int ar5008_rf_bus_request(struct athn_softc *);
98 Static void ar5008_rfsilent_init(struct athn_softc *);
99 Static int ar5008_rx_alloc(struct athn_softc *);
100 Static void ar5008_rx_enable(struct athn_softc *);
101 Static void ar5008_rx_free(struct athn_softc *);
102 Static void ar5008_rx_intr(struct athn_softc *);
103 Static void ar5008_rx_radiotap(struct athn_softc *, struct mbuf *,
104 struct ar_rx_desc *);
105 Static void ar5008_set_cck_weak_signal(struct athn_softc *, int);
106 Static void ar5008_set_delta_slope(struct athn_softc *,
107 struct ieee80211_channel *, struct ieee80211_channel *);
108 Static void ar5008_set_firstep_level(struct athn_softc *, int);
109 Static void ar5008_set_noise_immunity_level(struct athn_softc *, int);
110 Static void ar5008_set_phy(struct athn_softc *, struct ieee80211_channel *,
111 struct ieee80211_channel *);
112 Static void ar5008_set_rf_mode(struct athn_softc *,
113 struct ieee80211_channel *);
114 Static void ar5008_set_rxchains(struct athn_softc *);
115 Static void ar5008_set_spur_immunity_level(struct athn_softc *, int);
116 Static void ar5008_swap_rom(struct athn_softc *);
117 Static int ar5008_swba_intr(struct athn_softc *);
118 Static int ar5008_tx(struct athn_softc *, struct mbuf *,
119 struct ieee80211_node *, int);
120 Static int ar5008_tx_alloc(struct athn_softc *);
121 Static void ar5008_tx_free(struct athn_softc *);
122 Static void ar5008_tx_intr(struct athn_softc *);
123 Static int ar5008_tx_process(struct athn_softc *, int);
124
125 #ifdef notused
126 Static void ar5008_bb_load_noisefloor(struct athn_softc *);
127 Static void ar5008_get_noisefloor(struct athn_softc *,
128 struct ieee80211_channel *);
129 Static void ar5008_noisefloor_calib(struct athn_softc *);
130 Static void ar5008_read_noisefloor(struct athn_softc *, int16_t *,
131 int16_t *);
132 Static void ar5008_write_noisefloor(struct athn_softc *, int16_t *,
133 int16_t *);
134 #endif /* notused */
135
136 // bf->bf_m = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
137
138 /*
139 * XXX: see if_iwn.c:MCLGETIalt() for a better solution.
140 */
141 static struct mbuf *
142 MCLGETI(struct athn_softc *sc __unused, int how,
143 struct ifnet *ifp __unused, u_int size)
144 {
145 struct mbuf *m;
146
147 MGETHDR(m, how, MT_DATA);
148 if (m == NULL)
149 return NULL;
150
151 MEXTMALLOC(m, size, how);
152 if ((m->m_flags & M_EXT) == 0) {
153 m_freem(m);
154 return NULL;
155 }
156 return m;
157 }
158
159 PUBLIC int
160 ar5008_attach(struct athn_softc *sc)
161 {
162 struct athn_ops *ops = &sc->sc_ops;
163 struct ieee80211com *ic = &sc->sc_ic;
164 struct ar_base_eep_header *base;
165 uint8_t eep_ver, kc_entries_log;
166 int error;
167
168 /* Set callbacks for AR5008, AR9001 and AR9002 families. */
169 ops->gpio_read = ar5008_gpio_read;
170 ops->gpio_write = ar5008_gpio_write;
171 ops->gpio_config_input = ar5008_gpio_config_input;
172 ops->gpio_config_output = ar5008_gpio_config_output;
173 ops->rfsilent_init = ar5008_rfsilent_init;
174
175 ops->dma_alloc = ar5008_dma_alloc;
176 ops->dma_free = ar5008_dma_free;
177 ops->rx_enable = ar5008_rx_enable;
178 ops->intr = ar5008_intr;
179 ops->tx = ar5008_tx;
180
181 ops->set_rf_mode = ar5008_set_rf_mode;
182 ops->rf_bus_request = ar5008_rf_bus_request;
183 ops->rf_bus_release = ar5008_rf_bus_release;
184 ops->set_phy = ar5008_set_phy;
185 ops->set_delta_slope = ar5008_set_delta_slope;
186 ops->enable_antenna_diversity = ar5008_enable_antenna_diversity;
187 ops->init_baseband = ar5008_init_baseband;
188 ops->disable_phy = ar5008_disable_phy;
189 ops->set_rxchains = ar5008_set_rxchains;
190 ops->noisefloor_calib = ar5008_do_noisefloor_calib;
191 ops->do_calib = ar5008_do_calib;
192 ops->next_calib = ar5008_next_calib;
193 ops->hw_init = ar5008_hw_init;
194
195 ops->set_noise_immunity_level = ar5008_set_noise_immunity_level;
196 ops->enable_ofdm_weak_signal = ar5008_enable_ofdm_weak_signal;
197 ops->disable_ofdm_weak_signal = ar5008_disable_ofdm_weak_signal;
198 ops->set_cck_weak_signal = ar5008_set_cck_weak_signal;
199 ops->set_firstep_level = ar5008_set_firstep_level;
200 ops->set_spur_immunity_level = ar5008_set_spur_immunity_level;
201
202 /* Set MAC registers offsets. */
203 sc->sc_obs_off = AR_OBS;
204 sc->sc_gpio_input_en_off = AR_GPIO_INPUT_EN_VAL;
205
206 if (!(sc->sc_flags & ATHN_FLAG_PCIE))
207 athn_config_nonpcie(sc);
208 else
209 athn_config_pcie(sc);
210
211 /* Read entire ROM content in memory. */
212 if ((error = ar5008_read_rom(sc)) != 0) {
213 printf("%s: could not read ROM\n", device_xname(sc->sc_dev));
214 return error;
215 }
216
217 /* Get RF revision. */
218 sc->sc_rf_rev = ar5416_get_rf_rev(sc);
219
220 base = sc->sc_eep;
221 eep_ver = (base->version >> 12) & 0xf;
222 sc->sc_eep_rev = (base->version & 0xfff);
223 if (eep_ver != AR_EEP_VER || sc->sc_eep_rev == 0) {
224 printf("%s: unsupported ROM version %d.%d\n",
225 device_xname(sc->sc_dev), eep_ver, sc->sc_eep_rev);
226 return EINVAL;
227 }
228
229 if (base->opCapFlags & AR_OPFLAGS_11A)
230 sc->sc_flags |= ATHN_FLAG_11A;
231 if (base->opCapFlags & AR_OPFLAGS_11G)
232 sc->sc_flags |= ATHN_FLAG_11G;
233 if (base->opCapFlags & AR_OPFLAGS_11N)
234 sc->sc_flags |= ATHN_FLAG_11N;
235
236 IEEE80211_ADDR_COPY(ic->ic_myaddr, base->macAddr);
237
238 /* Check if we have a hardware radio switch. */
239 if (base->rfSilent & AR_EEP_RFSILENT_ENABLED) {
240 sc->sc_flags |= ATHN_FLAG_RFSILENT;
241 /* Get GPIO pin used by hardware radio switch. */
242 sc->sc_rfsilent_pin = MS(base->rfSilent,
243 AR_EEP_RFSILENT_GPIO_SEL);
244 /* Get polarity of hardware radio switch. */
245 if (base->rfSilent & AR_EEP_RFSILENT_POLARITY)
246 sc->sc_flags |= ATHN_FLAG_RFSILENT_REVERSED;
247 }
248
249 /* Get the number of HW key cache entries. */
250 kc_entries_log = MS(base->deviceCap, AR_EEP_DEVCAP_KC_ENTRIES);
251 sc->sc_kc_entries = kc_entries_log != 0 ?
252 1 << kc_entries_log : AR_KEYTABLE_SIZE;
253
254 sc->sc_txchainmask = base->txMask;
255 if (sc->sc_mac_ver == AR_SREV_VERSION_5416_PCI &&
256 !(base->opCapFlags & AR_OPFLAGS_11A)) {
257 /* For single-band AR5416 PCI, use GPIO pin 0. */
258 sc->sc_rxchainmask = ar5008_gpio_read(sc, 0) ? 0x5 : 0x7;
259 }
260 else
261 sc->sc_rxchainmask = base->rxMask;
262
263 ops->setup(sc);
264 return 0;
265 }
266
267 /*
268 * Read 16-bit word from ROM.
269 */
270 Static int
271 ar5008_read_eep_word(struct athn_softc *sc, uint32_t addr, uint16_t *val)
272 {
273 uint32_t reg;
274 int ntries;
275
276 reg = AR_READ(sc, AR_EEPROM_OFFSET(addr));
277 for (ntries = 0; ntries < 1000; ntries++) {
278 reg = AR_READ(sc, AR_EEPROM_STATUS_DATA);
279 if (!(reg & (AR_EEPROM_STATUS_DATA_BUSY |
280 AR_EEPROM_STATUS_DATA_PROT_ACCESS))) {
281 *val = MS(reg, AR_EEPROM_STATUS_DATA_VAL);
282 return 0;
283 }
284 DELAY(10);
285 }
286 *val = 0xffff;
287 return ETIMEDOUT;
288 }
289
290 Static int
291 ar5008_read_rom(struct athn_softc *sc)
292 {
293 uint32_t addr, end;
294 uint16_t magic, sum, *eep;
295 int need_swap = 0;
296 int error;
297
298 /* Determine ROM endianness. */
299 error = ar5008_read_eep_word(sc, AR_EEPROM_MAGIC_OFFSET, &magic);
300 if (error != 0)
301 return error;
302 if (magic != AR_EEPROM_MAGIC) {
303 if (magic != bswap16(AR_EEPROM_MAGIC)) {
304 DPRINTFN(DBG_INIT, sc,
305 "invalid ROM magic 0x%x != 0x%x\n",
306 magic, AR_EEPROM_MAGIC);
307 return EIO;
308 }
309 DPRINTFN(DBG_INIT, sc, "non-native ROM endianness\n");
310 need_swap = 1;
311 }
312
313 /* Allocate space to store ROM in host memory. */
314 sc->sc_eep = malloc(sc->sc_eep_size, M_DEVBUF, M_NOWAIT);
315 if (sc->sc_eep == NULL)
316 return ENOMEM;
317
318 /* Read entire ROM and compute checksum. */
319 sum = 0;
320 eep = sc->sc_eep;
321 end = sc->sc_eep_base + sc->sc_eep_size / sizeof(uint16_t);
322 for (addr = sc->sc_eep_base; addr < end; addr++, eep++) {
323 if ((error = ar5008_read_eep_word(sc, addr, eep)) != 0) {
324 DPRINTFN(DBG_INIT, sc,
325 "could not read ROM at 0x%x\n", addr);
326 return error;
327 }
328 if (need_swap)
329 *eep = bswap16(*eep);
330 sum ^= *eep;
331 }
332 if (sum != 0xffff) {
333 printf("%s: bad ROM checksum 0x%04x\n",
334 device_xname(sc->sc_dev), sum);
335 return EIO;
336 }
337 if (need_swap)
338 ar5008_swap_rom(sc);
339
340 return 0;
341 }
342
343 Static void
344 ar5008_swap_rom(struct athn_softc *sc)
345 {
346 struct ar_base_eep_header *base = sc->sc_eep;
347
348 /* Swap common fields first. */
349 base->length = bswap16(base->length);
350 base->version = bswap16(base->version);
351 base->regDmn[0] = bswap16(base->regDmn[0]);
352 base->regDmn[1] = bswap16(base->regDmn[1]);
353 base->rfSilent = bswap16(base->rfSilent);
354 base->blueToothOptions = bswap16(base->blueToothOptions);
355 base->deviceCap = bswap16(base->deviceCap);
356
357 /* Swap device-dependent fields. */
358 sc->sc_ops.swap_rom(sc);
359 }
360
361 /*
362 * Access to General Purpose Input/Output ports.
363 */
364 Static int
365 ar5008_gpio_read(struct athn_softc *sc, int pin)
366 {
367
368 KASSERT(pin < sc->sc_ngpiopins);
369 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc))
370 return !((AR_READ(sc, AR7010_GPIO_IN) >> pin) & 1);
371 return (AR_READ(sc, AR_GPIO_IN_OUT) >> (sc->sc_ngpiopins + pin)) & 1;
372 }
373
374 Static void
375 ar5008_gpio_write(struct athn_softc *sc, int pin, int set)
376 {
377 uint32_t reg;
378
379 KASSERT(pin < sc->sc_ngpiopins);
380
381 if (sc->sc_flags & ATHN_FLAG_USB)
382 set = !set; /* AR9271/AR7010 is reversed. */
383
384 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
385 /* Special case for AR7010. */
386 reg = AR_READ(sc, AR7010_GPIO_OUT);
387 if (set)
388 reg |= 1 << pin;
389 else
390 reg &= ~(1 << pin);
391 AR_WRITE(sc, AR7010_GPIO_OUT, reg);
392 }
393 else {
394 reg = AR_READ(sc, AR_GPIO_IN_OUT);
395 if (set)
396 reg |= 1 << pin;
397 else
398 reg &= ~(1 << pin);
399 AR_WRITE(sc, AR_GPIO_IN_OUT, reg);
400 }
401 AR_WRITE_BARRIER(sc);
402 }
403
404 Static void
405 ar5008_gpio_config_input(struct athn_softc *sc, int pin)
406 {
407 uint32_t reg;
408
409 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
410 /* Special case for AR7010. */
411 AR_SETBITS(sc, AR7010_GPIO_OE, 1 << pin);
412 }
413 else {
414 reg = AR_READ(sc, AR_GPIO_OE_OUT);
415 reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2));
416 reg |= AR_GPIO_OE_OUT_DRV_NO << (pin * 2);
417 AR_WRITE(sc, AR_GPIO_OE_OUT, reg);
418 }
419 AR_WRITE_BARRIER(sc);
420 }
421
422 Static void
423 ar5008_gpio_config_output(struct athn_softc *sc, int pin, int type)
424 {
425 uint32_t reg;
426 int mux, off;
427
428 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
429 /* Special case for AR7010. */
430 AR_CLRBITS(sc, AR7010_GPIO_OE, 1 << pin);
431 AR_WRITE_BARRIER(sc);
432 return;
433 }
434 mux = pin / 6;
435 off = pin % 6;
436
437 reg = AR_READ(sc, AR_GPIO_OUTPUT_MUX(mux));
438 if (!AR_SREV_9280_20_OR_LATER(sc) && mux == 0)
439 reg = (reg & ~0x1f0) | (reg & 0x1f0) << 1;
440 reg &= ~(0x1f << (off * 5));
441 reg |= (type & 0x1f) << (off * 5);
442 AR_WRITE(sc, AR_GPIO_OUTPUT_MUX(mux), reg);
443
444 reg = AR_READ(sc, AR_GPIO_OE_OUT);
445 reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2));
446 reg |= AR_GPIO_OE_OUT_DRV_ALL << (pin * 2);
447 AR_WRITE(sc, AR_GPIO_OE_OUT, reg);
448 AR_WRITE_BARRIER(sc);
449 }
450
451 Static void
452 ar5008_rfsilent_init(struct athn_softc *sc)
453 {
454 uint32_t reg;
455
456 /* Configure hardware radio switch. */
457 AR_SETBITS(sc, AR_GPIO_INPUT_EN_VAL, AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
458 reg = AR_READ(sc, AR_GPIO_INPUT_MUX2);
459 reg = RW(reg, AR_GPIO_INPUT_MUX2_RFSILENT, 0);
460 AR_WRITE(sc, AR_GPIO_INPUT_MUX2, reg);
461 ar5008_gpio_config_input(sc, sc->sc_rfsilent_pin);
462 AR_SETBITS(sc, AR_PHY_TEST, AR_PHY_TEST_RFSILENT_BB);
463 if (!(sc->sc_flags & ATHN_FLAG_RFSILENT_REVERSED)) {
464 AR_SETBITS(sc, AR_GPIO_INTR_POL,
465 AR_GPIO_INTR_POL_PIN(sc->sc_rfsilent_pin));
466 }
467 AR_WRITE_BARRIER(sc);
468 }
469
470 Static int
471 ar5008_dma_alloc(struct athn_softc *sc)
472 {
473 int error;
474
475 error = ar5008_tx_alloc(sc);
476 if (error != 0)
477 return error;
478
479 error = ar5008_rx_alloc(sc);
480 if (error != 0)
481 return error;
482
483 return 0;
484 }
485
486 Static void
487 ar5008_dma_free(struct athn_softc *sc)
488 {
489
490 ar5008_tx_free(sc);
491 ar5008_rx_free(sc);
492 }
493
494 Static int
495 ar5008_tx_alloc(struct athn_softc *sc)
496 {
497 struct athn_tx_buf *bf;
498 bus_size_t size;
499 int error, nsegs, i;
500
501 /*
502 * Allocate a pool of Tx descriptors shared between all Tx queues.
503 */
504 size = ATHN_NTXBUFS * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc);
505
506 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,
507 BUS_DMA_NOWAIT, &sc->sc_map);
508 if (error != 0)
509 goto fail;
510
511 error = bus_dmamem_alloc(sc->sc_dmat, size, 4, 0, &sc->sc_seg, 1,
512 // XXX &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO);
513 &nsegs, BUS_DMA_NOWAIT);
514 if (error != 0)
515 goto fail;
516
517 error = bus_dmamem_map(sc->sc_dmat, &sc->sc_seg, 1, size,
518 (void **)&sc->sc_descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT);
519 if (error != 0)
520 goto fail;
521
522 error = bus_dmamap_load_raw(sc->sc_dmat, sc->sc_map, &sc->sc_seg, 1, size,
523 BUS_DMA_NOWAIT);
524 if (error != 0)
525 goto fail;
526
527 SIMPLEQ_INIT(&sc->sc_txbufs);
528 for (i = 0; i < ATHN_NTXBUFS; i++) {
529 bf = &sc->sc_txpool[i];
530
531 error = bus_dmamap_create(sc->sc_dmat, ATHN_TXBUFSZ,
532 AR5008_MAX_SCATTER, ATHN_TXBUFSZ, 0, BUS_DMA_NOWAIT,
533 &bf->bf_map);
534 if (error != 0) {
535 printf("%s: could not create Tx buf DMA map\n",
536 device_xname(sc->sc_dev));
537 goto fail;
538 }
539
540 bf->bf_descs =
541 &((struct ar_tx_desc *)sc->sc_descs)[i * AR5008_MAX_SCATTER];
542 bf->bf_daddr = sc->sc_map->dm_segs[0].ds_addr +
543 i * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc);
544
545 SIMPLEQ_INSERT_TAIL(&sc->sc_txbufs, bf, bf_list);
546 }
547 return 0;
548 fail:
549 ar5008_tx_free(sc);
550 return error;
551 }
552
553 Static void
554 ar5008_tx_free(struct athn_softc *sc)
555 {
556 struct athn_tx_buf *bf;
557 int i;
558
559 for (i = 0; i < ATHN_NTXBUFS; i++) {
560 bf = &sc->sc_txpool[i];
561
562 if (bf->bf_map != NULL)
563 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map);
564 }
565 /* Free Tx descriptors. */
566 if (sc->sc_map != NULL) {
567 if (sc->sc_descs != NULL) {
568 bus_dmamap_unload(sc->sc_dmat, sc->sc_map);
569 bus_dmamem_unmap(sc->sc_dmat, (void *)sc->sc_descs,
570 ATHN_NTXBUFS * AR5008_MAX_SCATTER *
571 sizeof(struct ar_tx_desc));
572 bus_dmamem_free(sc->sc_dmat, &sc->sc_seg, 1);
573 }
574 bus_dmamap_destroy(sc->sc_dmat, sc->sc_map);
575 }
576 }
577
578 Static int
579 ar5008_rx_alloc(struct athn_softc *sc)
580 {
581 struct athn_rxq *rxq = &sc->sc_rxq[0];
582 struct athn_rx_buf *bf;
583 struct ar_rx_desc *ds;
584 bus_size_t size;
585 int error, nsegs, i;
586
587 rxq->bf = malloc(ATHN_NRXBUFS * sizeof(*bf), M_DEVBUF,
588 M_NOWAIT | M_ZERO);
589 if (rxq->bf == NULL)
590 return ENOMEM;
591
592 size = ATHN_NRXBUFS * sizeof(struct ar_rx_desc);
593
594 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,
595 BUS_DMA_NOWAIT, &rxq->map);
596 if (error != 0)
597 goto fail;
598
599 error = bus_dmamem_alloc(sc->sc_dmat, size, 0, 0, &rxq->seg, 1,
600 // &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO);
601 &nsegs, BUS_DMA_NOWAIT);
602 if (error != 0)
603 goto fail;
604
605 error = bus_dmamem_map(sc->sc_dmat, &rxq->seg, 1, size,
606 (void **)&rxq->descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT);
607 if (error != 0)
608 goto fail;
609
610 error = bus_dmamap_load_raw(sc->sc_dmat, rxq->map, &rxq->seg, 1,
611 size, BUS_DMA_NOWAIT);
612 if (error != 0)
613 goto fail;
614
615 for (i = 0; i < ATHN_NRXBUFS; i++) {
616 bf = &rxq->bf[i];
617 ds = &((struct ar_rx_desc *)rxq->descs)[i];
618
619 error = bus_dmamap_create(sc->sc_dmat, ATHN_RXBUFSZ, 1,
620 ATHN_RXBUFSZ, 0, BUS_DMA_NOWAIT | BUS_DMA_ALLOCNOW,
621 &bf->bf_map);
622 if (error != 0) {
623 printf("%s: could not create Rx buf DMA map\n",
624 device_xname(sc->sc_dev));
625 goto fail;
626 }
627 /*
628 * Assumes MCLGETI returns cache-line-size aligned buffers.
629 * XXX: does ours?
630 */
631 bf->bf_m = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
632 if (bf->bf_m == NULL) {
633 printf("%s: could not allocate Rx mbuf\n",
634 device_xname(sc->sc_dev));
635 error = ENOBUFS;
636 goto fail;
637 }
638
639 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,
640 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,
641 BUS_DMA_NOWAIT | BUS_DMA_READ);
642 if (error != 0) {
643 printf("%s: could not DMA map Rx buffer\n",
644 device_xname(sc->sc_dev));
645 goto fail;
646 }
647
648 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
649 BUS_DMASYNC_PREREAD);
650
651 bf->bf_desc = ds;
652 bf->bf_daddr = rxq->map->dm_segs[0].ds_addr +
653 i * sizeof(struct ar_rx_desc);
654 }
655 return 0;
656 fail:
657 ar5008_rx_free(sc);
658 return error;
659 }
660
661 Static void
662 ar5008_rx_free(struct athn_softc *sc)
663 {
664 struct athn_rxq *rxq = &sc->sc_rxq[0];
665 struct athn_rx_buf *bf;
666 int i;
667
668 if (rxq->bf == NULL)
669 return;
670 for (i = 0; i < ATHN_NRXBUFS; i++) {
671 bf = &rxq->bf[i];
672
673 if (bf->bf_map != NULL)
674 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map);
675 if (bf->bf_m != NULL)
676 m_freem(bf->bf_m);
677 }
678 free(rxq->bf, M_DEVBUF);
679
680 /* Free Rx descriptors. */
681 if (rxq->map != NULL) {
682 if (rxq->descs != NULL) {
683 bus_dmamap_unload(sc->sc_dmat, rxq->map);
684 bus_dmamem_unmap(sc->sc_dmat, (void *)rxq->descs,
685 ATHN_NRXBUFS * sizeof(struct ar_rx_desc));
686 bus_dmamem_free(sc->sc_dmat, &rxq->seg, 1);
687 }
688 bus_dmamap_destroy(sc->sc_dmat, rxq->map);
689 }
690 }
691
692 Static void
693 ar5008_rx_enable(struct athn_softc *sc)
694 {
695 struct athn_rxq *rxq = &sc->sc_rxq[0];
696 struct athn_rx_buf *bf;
697 struct ar_rx_desc *ds;
698 int i;
699
700 /* Setup and link Rx descriptors. */
701 SIMPLEQ_INIT(&rxq->head);
702 rxq->lastds = NULL;
703 for (i = 0; i < ATHN_NRXBUFS; i++) {
704 bf = &rxq->bf[i];
705 ds = bf->bf_desc;
706
707 memset(ds, 0, sizeof(*ds));
708 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
709 ds->ds_ctl1 = SM(AR_RXC1_BUF_LEN, ATHN_RXBUFSZ);
710
711 if (rxq->lastds != NULL) {
712 ((struct ar_rx_desc *)rxq->lastds)->ds_link =
713 bf->bf_daddr;
714 }
715 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list);
716 rxq->lastds = ds;
717 }
718 bus_dmamap_sync(sc->sc_dmat, rxq->map, 0, rxq->map->dm_mapsize,
719 BUS_DMASYNC_PREREAD);
720
721 /* Enable Rx. */
722 AR_WRITE(sc, AR_RXDP, SIMPLEQ_FIRST(&rxq->head)->bf_daddr);
723 AR_WRITE(sc, AR_CR, AR_CR_RXE);
724 AR_WRITE_BARRIER(sc);
725 }
726
727 Static void
728 ar5008_rx_radiotap(struct athn_softc *sc, struct mbuf *m,
729 struct ar_rx_desc *ds)
730 {
731 struct athn_rx_radiotap_header *tap = &sc->sc_rxtap;
732 struct ieee80211com *ic = &sc->sc_ic;
733 uint64_t tsf;
734 uint32_t tstamp;
735 uint8_t rate;
736
737 /* Extend the 15-bit timestamp from Rx descriptor to 64-bit TSF. */
738 tstamp = ds->ds_status2;
739 tsf = AR_READ(sc, AR_TSF_U32);
740 tsf = tsf << 32 | AR_READ(sc, AR_TSF_L32);
741 if ((tsf & 0x7fff) < tstamp)
742 tsf -= 0x8000;
743 tsf = (tsf & ~0x7fff) | tstamp;
744
745 tap->wr_flags = IEEE80211_RADIOTAP_F_FCS;
746 tap->wr_tsft = htole64(tsf);
747 tap->wr_chan_freq = htole16(ic->ic_curchan->ic_freq);
748 tap->wr_chan_flags = htole16(ic->ic_curchan->ic_flags);
749 tap->wr_dbm_antsignal = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED);
750 /* XXX noise. */
751 tap->wr_antenna = MS(ds->ds_status3, AR_RXS3_ANTENNA);
752 tap->wr_rate = 0; /* In case it can't be found below. */
753 if (AR_SREV_5416_20_OR_LATER(sc))
754 rate = MS(ds->ds_status0, AR_RXS0_RATE);
755 else
756 rate = MS(ds->ds_status3, AR_RXS3_RATE);
757 if (rate & 0x80) { /* HT. */
758 /* Bit 7 set means HT MCS instead of rate. */
759 tap->wr_rate = rate;
760 if (!(ds->ds_status3 & AR_RXS3_GI))
761 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTGI;
762
763 }
764 else if (rate & 0x10) { /* CCK. */
765 if (rate & 0x04)
766 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTPRE;
767 switch (rate & ~0x14) {
768 case 0xb: tap->wr_rate = 2; break;
769 case 0xa: tap->wr_rate = 4; break;
770 case 0x9: tap->wr_rate = 11; break;
771 case 0x8: tap->wr_rate = 22; break;
772 }
773 }
774 else { /* OFDM. */
775 switch (rate) {
776 case 0xb: tap->wr_rate = 12; break;
777 case 0xf: tap->wr_rate = 18; break;
778 case 0xa: tap->wr_rate = 24; break;
779 case 0xe: tap->wr_rate = 36; break;
780 case 0x9: tap->wr_rate = 48; break;
781 case 0xd: tap->wr_rate = 72; break;
782 case 0x8: tap->wr_rate = 96; break;
783 case 0xc: tap->wr_rate = 108; break;
784 }
785 }
786 bpf_mtap2(sc->sc_drvbpf, tap, sc->sc_rxtap_len, m);
787 }
788
789 static __inline int
790 ar5008_rx_process(struct athn_softc *sc)
791 {
792 struct ieee80211com *ic = &sc->sc_ic;
793 struct ifnet *ifp = &sc->sc_if;
794 struct athn_rxq *rxq = &sc->sc_rxq[0];
795 struct athn_rx_buf *bf, *nbf;
796 struct ar_rx_desc *ds;
797 struct ieee80211_frame *wh;
798 struct ieee80211_node *ni;
799 struct mbuf *m, *m1;
800 u_int32_t rstamp;
801 int error, len, rssi;
802
803 bf = SIMPLEQ_FIRST(&rxq->head);
804 if (__predict_false(bf == NULL)) { /* Should not happen. */
805 printf("%s: Rx queue is empty!\n", device_xname(sc->sc_dev));
806 return ENOENT;
807 }
808 ds = bf->bf_desc;
809
810 if (!(ds->ds_status8 & AR_RXS8_DONE)) {
811 /*
812 * On some parts, the status words can get corrupted
813 * (including the "done" bit), so we check the next
814 * descriptor "done" bit. If it is set, it is a good
815 * indication that the status words are corrupted, so
816 * we skip this descriptor and drop the frame.
817 */
818 nbf = SIMPLEQ_NEXT(bf, bf_list);
819 if (nbf != NULL &&
820 (((struct ar_rx_desc *)nbf->bf_desc)->ds_status8 &
821 AR_RXS8_DONE)) {
822 DPRINTFN(DBG_RX, sc,
823 "corrupted descriptor status=0x%x\n",
824 ds->ds_status8);
825 /* HW will not "move" RXDP in this case, so do it. */
826 AR_WRITE(sc, AR_RXDP, nbf->bf_daddr);
827 AR_WRITE_BARRIER(sc);
828 ifp->if_ierrors++;
829 goto skip;
830 }
831 return EBUSY;
832 }
833
834 if (__predict_false(ds->ds_status1 & AR_RXS1_MORE)) {
835 /* Drop frames that span multiple Rx descriptors. */
836 DPRINTFN(DBG_RX, sc, "dropping split frame\n");
837 ifp->if_ierrors++;
838 goto skip;
839 }
840 if (!(ds->ds_status8 & AR_RXS8_FRAME_OK)) {
841 if (ds->ds_status8 & AR_RXS8_CRC_ERR)
842 DPRINTFN(DBG_RX, sc, "CRC error\n");
843 else if (ds->ds_status8 & AR_RXS8_PHY_ERR)
844 DPRINTFN(DBG_RX, sc, "PHY error=0x%x\n",
845 MS(ds->ds_status8, AR_RXS8_PHY_ERR_CODE));
846 else if (ds->ds_status8 & AR_RXS8_DECRYPT_CRC_ERR)
847 DPRINTFN(DBG_RX, sc, "Decryption CRC error\n");
848 else if (ds->ds_status8 & AR_RXS8_MICHAEL_ERR) {
849 DPRINTFN(DBG_RX, sc, "Michael MIC failure\n");
850
851 len = MS(ds->ds_status1, AR_RXS1_DATA_LEN);
852 m = bf->bf_m;
853 m->m_pkthdr.rcvif = ifp;
854 m->m_pkthdr.len = m->m_len = len;
855 wh = mtod(m, struct ieee80211_frame *);
856
857 /* Report Michael MIC failures to net80211. */
858 ieee80211_notify_michael_failure(ic, wh, 0 /* XXX: keyix */);
859 }
860 ifp->if_ierrors++;
861 goto skip;
862 }
863
864 len = MS(ds->ds_status1, AR_RXS1_DATA_LEN);
865 if (__predict_false(len < (int)IEEE80211_MIN_LEN || len > ATHN_RXBUFSZ)) {
866 DPRINTFN(DBG_RX, sc, "corrupted descriptor length=%d\n", len);
867 ifp->if_ierrors++;
868 goto skip;
869 }
870
871 /* Allocate a new Rx buffer. */
872 m1 = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
873 if (__predict_false(m1 == NULL)) {
874 ic->ic_stats.is_rx_nobuf++;
875 ifp->if_ierrors++;
876 goto skip;
877 }
878
879 /* Sync and unmap the old Rx buffer. */
880 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
881 BUS_DMASYNC_POSTREAD);
882 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
883
884 /* Map the new Rx buffer. */
885 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map, mtod(m1, void *),
886 ATHN_RXBUFSZ, NULL, BUS_DMA_NOWAIT | BUS_DMA_READ);
887 if (__predict_false(error != 0)) {
888 m_freem(m1);
889
890 /* Remap the old Rx buffer or panic. */
891 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,
892 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,
893 BUS_DMA_NOWAIT | BUS_DMA_READ);
894 KASSERT(error != 0);
895 ifp->if_ierrors++;
896 goto skip;
897 }
898
899 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
900 BUS_DMASYNC_PREREAD);
901
902 /* Write physical address of new Rx buffer. */
903 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
904
905 m = bf->bf_m;
906 bf->bf_m = m1;
907
908 /* Finalize mbuf. */
909 m->m_pkthdr.rcvif = ifp;
910 m->m_pkthdr.len = m->m_len = len;
911
912 /* Grab a reference to the source node. */
913 wh = mtod(m, struct ieee80211_frame *);
914 ni = ieee80211_find_rxnode(ic, (struct ieee80211_frame_min *)wh);
915
916 /* Remove any HW padding after the 802.11 header. */
917 if (!(wh->i_fc[0] & IEEE80211_FC0_TYPE_CTL)) {
918 u_int hdrlen = ieee80211_anyhdrsize(wh);
919 if (hdrlen & 3) {
920 ovbcopy(wh, (uint8_t *)wh + 2, hdrlen);
921 m_adj(m, 2);
922 }
923 }
924 if (__predict_false(sc->sc_drvbpf != NULL))
925 ar5008_rx_radiotap(sc, m, ds);
926
927 /* Trim 802.11 FCS after radiotap. */
928 m_adj(m, -IEEE80211_CRC_LEN);
929
930 /* Send the frame to the 802.11 layer. */
931 rssi = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED);
932 rstamp = ds->ds_status2;
933 ieee80211_input(ic, m, ni, rssi, rstamp);
934
935 /* Node is no longer needed. */
936 ieee80211_free_node(ni);
937
938 skip:
939 /* Unlink this descriptor from head. */
940 SIMPLEQ_REMOVE_HEAD(&rxq->head, bf_list);
941 memset(&ds->ds_status0, 0, 36); /* XXX Really needed? */
942 ds->ds_status8 &= ~AR_RXS8_DONE;
943 ds->ds_link = 0;
944
945 /* Re-use this descriptor and link it to tail. */
946 if (__predict_true(!SIMPLEQ_EMPTY(&rxq->head)))
947 ((struct ar_rx_desc *)rxq->lastds)->ds_link = bf->bf_daddr;
948 else
949 AR_WRITE(sc, AR_RXDP, bf->bf_daddr);
950 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list);
951 rxq->lastds = ds;
952
953 /* Re-enable Rx. */
954 AR_WRITE(sc, AR_CR, AR_CR_RXE);
955 AR_WRITE_BARRIER(sc);
956 return 0;
957 }
958
959 Static void
960 ar5008_rx_intr(struct athn_softc *sc)
961 {
962
963 while (ar5008_rx_process(sc) == 0)
964 continue;
965 }
966
967 Static int
968 ar5008_tx_process(struct athn_softc *sc, int qid)
969 {
970 struct ifnet *ifp = &sc->sc_if;
971 struct athn_txq *txq = &sc->sc_txq[qid];
972 struct athn_node *an;
973 struct athn_tx_buf *bf;
974 struct ar_tx_desc *ds;
975 uint8_t failcnt;
976
977 bf = SIMPLEQ_FIRST(&txq->head);
978 if (bf == NULL)
979 return ENOENT;
980 /* Get descriptor of last DMA segment. */
981 ds = &((struct ar_tx_desc *)bf->bf_descs)[bf->bf_map->dm_nsegs - 1];
982
983 if (!(ds->ds_status9 & AR_TXS9_DONE))
984 return EBUSY;
985
986 SIMPLEQ_REMOVE_HEAD(&txq->head, bf_list);
987 ifp->if_opackets++;
988
989 sc->sc_tx_timer = 0;
990
991 if (ds->ds_status1 & AR_TXS1_EXCESSIVE_RETRIES)
992 ifp->if_oerrors++;
993
994 if (ds->ds_status1 & AR_TXS1_UNDERRUN)
995 athn_inc_tx_trigger_level(sc);
996
997 an = (struct athn_node *)bf->bf_ni;
998 /*
999 * NB: the data fail count contains the number of un-acked tries
1000 * for the final series used. We must add the number of tries for
1001 * each series that was fully processed.
1002 */
1003 failcnt = MS(ds->ds_status1, AR_TXS1_DATA_FAIL_CNT);
1004 /* NB: Assume two tries per series. */
1005 failcnt += MS(ds->ds_status9, AR_TXS9_FINAL_IDX) * 2;
1006
1007 /* Update rate control statistics. */
1008 an->amn.amn_txcnt++;
1009 if (failcnt > 0)
1010 an->amn.amn_retrycnt++;
1011
1012 DPRINTFN(DBG_TX, sc, "Tx done qid=%d status1=%d fail count=%d\n",
1013 qid, ds->ds_status1, failcnt);
1014
1015 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1016 BUS_DMASYNC_POSTWRITE);
1017 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
1018
1019 m_freem(bf->bf_m);
1020 bf->bf_m = NULL;
1021 ieee80211_free_node(bf->bf_ni);
1022 bf->bf_ni = NULL;
1023
1024 /* Link Tx buffer back to global free list. */
1025 SIMPLEQ_INSERT_TAIL(&sc->sc_txbufs, bf, bf_list);
1026 return 0;
1027 }
1028
1029 Static void
1030 ar5008_tx_intr(struct athn_softc *sc)
1031 {
1032 struct ifnet *ifp = &sc->sc_if;
1033 uint16_t mask = 0;
1034 uint32_t reg;
1035 int qid;
1036
1037 reg = AR_READ(sc, AR_ISR_S0_S);
1038 mask |= MS(reg, AR_ISR_S0_QCU_TXOK);
1039 mask |= MS(reg, AR_ISR_S0_QCU_TXDESC);
1040
1041 reg = AR_READ(sc, AR_ISR_S1_S);
1042 mask |= MS(reg, AR_ISR_S1_QCU_TXERR);
1043 mask |= MS(reg, AR_ISR_S1_QCU_TXEOL);
1044
1045 DPRINTFN(DBG_TX, sc, "Tx interrupt mask=0x%x\n", mask);
1046 for (qid = 0; mask != 0; mask >>= 1, qid++) {
1047 if (mask & 1)
1048 while (ar5008_tx_process(sc, qid) == 0);
1049 }
1050 if (!SIMPLEQ_EMPTY(&sc->sc_txbufs)) {
1051 ifp->if_flags &= ~IFF_OACTIVE;
1052 ifp->if_start(ifp);
1053 }
1054 }
1055
1056 #ifndef IEEE80211_STA_ONLY
1057 /*
1058 * Process Software Beacon Alert interrupts.
1059 */
1060 Static int
1061 ar5008_swba_intr(struct athn_softc *sc)
1062 {
1063 struct ieee80211com *ic = &sc->sc_ic;
1064 struct ifnet *ifp = &sc->sc_if;
1065 struct ieee80211_node *ni = ic->ic_bss;
1066 struct athn_tx_buf *bf = sc->sc_bcnbuf;
1067 struct ieee80211_frame *wh;
1068 struct ieee80211_beacon_offsets bo;
1069 struct ar_tx_desc *ds;
1070 struct mbuf *m;
1071 uint8_t ridx, hwrate;
1072 int error, totlen;
1073
1074 #if notyet
1075 if (ic->ic_tim_mcast_pending &&
1076 IF_IS_EMPTY(&ni->ni_savedq) &&
1077 SIMPLEQ_EMPTY(&sc->sc_txq[ATHN_QID_CAB].head))
1078 ic->ic_tim_mcast_pending = 0;
1079 #endif
1080 if (ic->ic_dtim_count == 0)
1081 ic->ic_dtim_count = ic->ic_dtim_period - 1;
1082 else
1083 ic->ic_dtim_count--;
1084
1085 /* Make sure previous beacon has been sent. */
1086 if (athn_tx_pending(sc, ATHN_QID_BEACON)) {
1087 DPRINTFN(DBG_INTR, sc, "beacon stuck\n");
1088 return EBUSY;
1089 }
1090 /* Get new beacon. */
1091 m = ieee80211_beacon_alloc(ic, ic->ic_bss, &bo);
1092 if (__predict_false(m == NULL))
1093 return ENOBUFS;
1094 /* Assign sequence number. */
1095 /* XXX: use non-QoS tid? */
1096 wh = mtod(m, struct ieee80211_frame *);
1097 *(uint16_t *)&wh->i_seq[0] =
1098 htole16(ic->ic_bss->ni_txseqs[0] << IEEE80211_SEQ_SEQ_SHIFT);
1099 ic->ic_bss->ni_txseqs[0]++;
1100
1101 /* Unmap and free old beacon if any. */
1102 if (__predict_true(bf->bf_m != NULL)) {
1103 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0,
1104 bf->bf_map->dm_mapsize, BUS_DMASYNC_POSTWRITE);
1105 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
1106 m_freem(bf->bf_m);
1107 bf->bf_m = NULL;
1108 }
1109 /* DMA map new beacon. */
1110 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1111 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1112 if (__predict_false(error != 0)) {
1113 m_freem(m);
1114 return error;
1115 }
1116 bf->bf_m = m;
1117
1118 /* Setup Tx descriptor (simplified ar5008_tx()). */
1119 ds = bf->bf_descs;
1120 memset(ds, 0, sizeof(*ds));
1121
1122 totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN;
1123 ds->ds_ctl0 = SM(AR_TXC0_FRAME_LEN, totlen);
1124 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, AR_MAX_RATE_POWER);
1125 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, AR_FRAME_TYPE_BEACON);
1126 ds->ds_ctl1 |= AR_TXC1_NO_ACK;
1127 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, AR_ENCR_TYPE_CLEAR);
1128
1129 /* Write number of tries. */
1130 ds->ds_ctl2 = SM(AR_TXC2_XMIT_DATA_TRIES0, 1);
1131
1132 /* Write Tx rate. */
1133 ridx = (ic->ic_curmode == IEEE80211_MODE_11A) ?
1134 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1;
1135 hwrate = athn_rates[ridx].hwrate;
1136 ds->ds_ctl3 = SM(AR_TXC3_XMIT_RATE0, hwrate);
1137
1138 /* Write Tx chains. */
1139 ds->ds_ctl7 = SM(AR_TXC7_CHAIN_SEL0, sc->sc_txchainmask);
1140
1141 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
1142 /* Segment length must be a multiple of 4. */
1143 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,
1144 (bf->bf_map->dm_segs[0].ds_len + 3) & ~3);
1145
1146 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1147 BUS_DMASYNC_PREWRITE);
1148
1149 /* Stop Tx DMA before putting the new beacon on the queue. */
1150 athn_stop_tx_dma(sc, ATHN_QID_BEACON);
1151
1152 AR_WRITE(sc, AR_QTXDP(ATHN_QID_BEACON), bf->bf_daddr);
1153
1154 for(;;) {
1155 if (SIMPLEQ_EMPTY(&sc->sc_txbufs))
1156 break;
1157
1158 IF_DEQUEUE(&ni->ni_savedq, m);
1159 if (m == NULL)
1160 break;
1161 if (!IF_IS_EMPTY(&ni->ni_savedq)) {
1162 /* more queued frames, set the more data bit */
1163 wh = mtod(m, struct ieee80211_frame *);
1164 wh->i_fc[1] |= IEEE80211_FC1_MORE_DATA;
1165 }
1166
1167 if (sc->sc_ops.tx(sc, m, ni, ATHN_TXFLAG_CAB) != 0) {
1168 ieee80211_free_node(ni);
1169 ifp->if_oerrors++;
1170 break;
1171 }
1172 }
1173
1174 /* Kick Tx. */
1175 AR_WRITE(sc, AR_Q_TXE, 1 << ATHN_QID_BEACON);
1176 AR_WRITE_BARRIER(sc);
1177 return 0;
1178 }
1179 #endif
1180
1181 Static int
1182 ar5008_intr(struct athn_softc *sc)
1183 {
1184 uint32_t intr, intr2, intr5, sync;
1185
1186 /* Get pending interrupts. */
1187 intr = AR_READ(sc, AR_INTR_ASYNC_CAUSE);
1188 if (!(intr & AR_INTR_MAC_IRQ) || intr == AR_INTR_SPURIOUS) {
1189 intr = AR_READ(sc, AR_INTR_SYNC_CAUSE);
1190 if (intr == AR_INTR_SPURIOUS || (intr & sc->sc_isync) == 0)
1191 return 0; /* Not for us. */
1192 }
1193
1194 if ((AR_READ(sc, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) &&
1195 (AR_READ(sc, AR_RTC_STATUS) & AR_RTC_STATUS_M) == AR_RTC_STATUS_ON)
1196 intr = AR_READ(sc, AR_ISR);
1197 else
1198 intr = 0;
1199 sync = AR_READ(sc, AR_INTR_SYNC_CAUSE) & sc->sc_isync;
1200 if (intr == 0 && sync == 0)
1201 return 0; /* Not for us. */
1202
1203 if (intr != 0) {
1204 if (intr & AR_ISR_BCNMISC) {
1205 intr2 = AR_READ(sc, AR_ISR_S2);
1206 #if notyet
1207 if (intr2 & AR_ISR_S2_TIM)
1208 /* TBD */;
1209 if (intr2 & AR_ISR_S2_TSFOOR)
1210 /* TBD */;
1211 #endif
1212 }
1213 intr = AR_READ(sc, AR_ISR_RAC);
1214 if (intr == AR_INTR_SPURIOUS)
1215 return 1;
1216
1217 #ifndef IEEE80211_STA_ONLY
1218 if (intr & AR_ISR_SWBA)
1219 ar5008_swba_intr(sc);
1220 #endif
1221 if (intr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
1222 ar5008_rx_intr(sc);
1223 if (intr & (AR_ISR_RXOK | AR_ISR_RXERR | AR_ISR_RXORN))
1224 ar5008_rx_intr(sc);
1225
1226 if (intr & (AR_ISR_TXOK | AR_ISR_TXDESC |
1227 AR_ISR_TXERR | AR_ISR_TXEOL))
1228 ar5008_tx_intr(sc);
1229
1230 intr5 = AR_READ(sc, AR_ISR_S5_S);
1231 if (intr & AR_ISR_GENTMR) {
1232 if (intr5 & AR_ISR_GENTMR) {
1233 DPRINTFN(DBG_INTR, sc,
1234 "GENTMR trigger=%d thresh=%d\n",
1235 MS(intr5, AR_ISR_S5_GENTIMER_TRIG),
1236 MS(intr5, AR_ISR_S5_GENTIMER_THRESH));
1237 }
1238 }
1239 #if notyet
1240 if (intr5 & AR_ISR_S5_TIM_TIMER) {
1241 /* TBD */;
1242 }
1243 #endif
1244 }
1245 if (sync != 0) {
1246 #if notyet
1247 if (sync &
1248 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR)) {
1249 /* TBD */;
1250 }
1251 #endif
1252 if (sync & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
1253 AR_WRITE(sc, AR_RC, AR_RC_HOSTIF);
1254 AR_WRITE(sc, AR_RC, 0);
1255 }
1256
1257 if ((sc->sc_flags & ATHN_FLAG_RFSILENT) &&
1258 (sync & AR_INTR_SYNC_GPIO_PIN(sc->sc_rfsilent_pin))) {
1259 pmf_event_inject(sc->sc_dev, PMFE_RADIO_OFF);
1260 return 1;
1261 }
1262
1263 AR_WRITE(sc, AR_INTR_SYNC_CAUSE, sync);
1264 (void)AR_READ(sc, AR_INTR_SYNC_CAUSE);
1265 }
1266 return 1;
1267 }
1268
1269 Static int
1270 ar5008_tx(struct athn_softc *sc, struct mbuf *m, struct ieee80211_node *ni,
1271 int txflags)
1272 {
1273 struct ieee80211com *ic = &sc->sc_ic;
1274 struct ieee80211_key *k = NULL;
1275 struct ieee80211_frame *wh;
1276 struct athn_series series[4];
1277 struct ar_tx_desc *ds, *lastds;
1278 struct athn_txq *txq;
1279 struct athn_tx_buf *bf;
1280 struct athn_node *an = (void *)ni;
1281 struct mbuf *m1;
1282 uint16_t qos;
1283 uint8_t txpower, type, encrtype, ridx[4];
1284 int i, error, totlen, hasqos, qid;
1285
1286 /* Grab a Tx buffer from our global free list. */
1287 bf = SIMPLEQ_FIRST(&sc->sc_txbufs);
1288 KASSERT(bf != NULL);
1289
1290 /* Map 802.11 frame type to hardware frame type. */
1291 wh = mtod(m, struct ieee80211_frame *);
1292 if ((wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) ==
1293 IEEE80211_FC0_TYPE_MGT) {
1294 /* NB: Beacons do not use ar5008_tx(). */
1295 if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) ==
1296 IEEE80211_FC0_SUBTYPE_PROBE_RESP)
1297 type = AR_FRAME_TYPE_PROBE_RESP;
1298 else if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) ==
1299 IEEE80211_FC0_SUBTYPE_ATIM)
1300 type = AR_FRAME_TYPE_ATIM;
1301 else
1302 type = AR_FRAME_TYPE_NORMAL;
1303 }
1304 else if ((wh->i_fc[0] &
1305 (IEEE80211_FC0_TYPE_MASK | IEEE80211_FC0_SUBTYPE_MASK)) ==
1306 (IEEE80211_FC0_TYPE_CTL | IEEE80211_FC0_SUBTYPE_PS_POLL)) {
1307 type = AR_FRAME_TYPE_PSPOLL;
1308 }
1309 else
1310 type = AR_FRAME_TYPE_NORMAL;
1311
1312 if (wh->i_fc[1] & IEEE80211_FC1_PROTECTED) {
1313 k = ieee80211_crypto_encap(ic, ni, m);
1314 if (k == NULL)
1315 return ENOBUFS;
1316
1317 /* packet header may have moved, reset our local pointer */
1318 wh = mtod(m, struct ieee80211_frame *);
1319 }
1320
1321 /* XXX 2-byte padding for QoS and 4-addr headers. */
1322
1323 /* Select the HW Tx queue to use for this frame. */
1324 if ((hasqos = ieee80211_has_qos(wh))) {
1325 #ifdef notyet_edca
1326 uint8_t tid;
1327
1328 qos = ieee80211_get_qos(wh);
1329 tid = qos & IEEE80211_QOS_TID;
1330 qid = athn_ac2qid[ieee80211_up_to_ac(ic, tid)];
1331 #else
1332 qos = ieee80211_get_qos(wh);
1333 qid = ATHN_QID_AC_BE;
1334 #endif /* notyet_edca */
1335 }
1336 else if (type == AR_FRAME_TYPE_PSPOLL) {
1337 qos = 0;
1338 qid = ATHN_QID_PSPOLL;
1339 }
1340 else if (txflags & ATHN_TXFLAG_CAB) {
1341 qos = 0;
1342 qid = ATHN_QID_CAB;
1343 }
1344 else {
1345 qos = 0;
1346 qid = ATHN_QID_AC_BE;
1347 }
1348 txq = &sc->sc_txq[qid];
1349
1350 /* Select the transmit rates to use for this frame. */
1351 if (IEEE80211_IS_MULTICAST(wh->i_addr1) ||
1352 (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) !=
1353 IEEE80211_FC0_TYPE_DATA) {
1354 /* Use lowest rate for all tries. */
1355 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1356 (ic->ic_curmode == IEEE80211_MODE_11A) ?
1357 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1;
1358 }
1359 else if (ic->ic_fixed_rate != -1) {
1360 /* Use same fixed rate for all tries. */
1361 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1362 sc->sc_fixed_ridx;
1363 }
1364 else {
1365 int txrate = ni->ni_txrate;
1366 /* Use fallback table of the node. */
1367 for (i = 0; i < 4; i++) {
1368 ridx[i] = an->ridx[txrate];
1369 txrate = an->fallback[txrate];
1370 }
1371 }
1372
1373 if (__predict_false(sc->sc_drvbpf != NULL)) {
1374 struct athn_tx_radiotap_header *tap = &sc->sc_txtap;
1375
1376 tap->wt_flags = 0;
1377 /* Use initial transmit rate. */
1378 tap->wt_rate = athn_rates[ridx[0]].rate;
1379 tap->wt_chan_freq = htole16(ic->ic_curchan->ic_freq);
1380 tap->wt_chan_flags = htole16(ic->ic_curchan->ic_flags);
1381 // XXX tap->wt_hwqueue = qid;
1382 if (ridx[0] != ATHN_RIDX_CCK1 &&
1383 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1384 tap->wt_flags |= IEEE80211_RADIOTAP_F_SHORTPRE;
1385
1386 bpf_mtap2(sc->sc_drvbpf, tap, sc->sc_txtap_len, m);
1387 }
1388
1389 /* DMA map mbuf. */
1390 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1391 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1392 if (__predict_false(error != 0)) {
1393 if (error != EFBIG) {
1394 printf("%s: can't map mbuf (error %d)\n",
1395 device_xname(sc->sc_dev), error);
1396 m_freem(m);
1397 return error;
1398 }
1399 /*
1400 * DMA mapping requires too many DMA segments; linearize
1401 * mbuf in kernel virtual address space and retry.
1402 */
1403 MGETHDR(m1, M_DONTWAIT, MT_DATA);
1404 if (m1 == NULL) {
1405 m_freem(m);
1406 return ENOBUFS;
1407 }
1408 if (m->m_pkthdr.len > (int)MHLEN) {
1409 MCLGET(m1, M_DONTWAIT);
1410 if (!(m1->m_flags & M_EXT)) {
1411 m_freem(m);
1412 m_freem(m1);
1413 return ENOBUFS;
1414 }
1415 }
1416 m_copydata(m, 0, m->m_pkthdr.len, mtod(m1, void *));
1417 m1->m_pkthdr.len = m1->m_len = m->m_pkthdr.len;
1418 m_freem(m);
1419 m = m1;
1420
1421 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1422 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1423 if (error != 0) {
1424 printf("%s: can't map mbuf (error %d)\n",
1425 device_xname(sc->sc_dev), error);
1426 m_freem(m);
1427 return error;
1428 }
1429 }
1430 bf->bf_m = m;
1431 bf->bf_ni = ni;
1432 bf->bf_txflags = txflags;
1433
1434 wh = mtod(m, struct ieee80211_frame *);
1435
1436 totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN;
1437
1438 /* Clear all Tx descriptors that we will use. */
1439 memset(bf->bf_descs, 0, bf->bf_map->dm_nsegs * sizeof(*ds));
1440
1441 /* Setup first Tx descriptor. */
1442 ds = bf->bf_descs;
1443
1444 ds->ds_ctl0 = AR_TXC0_INTR_REQ | AR_TXC0_CLR_DEST_MASK;
1445 txpower = AR_MAX_RATE_POWER; /* Get from per-rate registers. */
1446 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, txpower);
1447
1448 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, type);
1449
1450 if (IEEE80211_IS_MULTICAST(wh->i_addr1) ||
1451 (hasqos && (qos & IEEE80211_QOS_ACKPOLICY_MASK) ==
1452 IEEE80211_QOS_ACKPOLICY_NOACK))
1453 ds->ds_ctl1 |= AR_TXC1_NO_ACK;
1454 #if notyet
1455 if (0 && k != NULL) {
1456 uintptr_t entry;
1457
1458 /*
1459 * Map 802.11 cipher to hardware encryption type and
1460 * compute MIC+ICV overhead.
1461 */
1462 totlen += k->wk_keylen;
1463 switch (k->wk_cipher->ic_cipher) {
1464 case IEEE80211_CIPHER_WEP:
1465 encrtype = AR_ENCR_TYPE_WEP;
1466 break;
1467 case IEEE80211_CIPHER_TKIP:
1468 encrtype = AR_ENCR_TYPE_TKIP;
1469 break;
1470 case IEEE80211_CIPHER_AES_OCB:
1471 case IEEE80211_CIPHER_AES_CCM:
1472 encrtype = AR_ENCR_TYPE_AES;
1473 break;
1474 default:
1475 panic("unsupported cipher");
1476 }
1477 /*
1478 * NB: The key cache entry index is stored in the key
1479 * private field when the key is installed.
1480 */
1481 entry = (uintptr_t)k->k_priv;
1482 ds->ds_ctl1 |= SM(AR_TXC1_DEST_IDX, entry);
1483 ds->ds_ctl0 |= AR_TXC0_DEST_IDX_VALID;
1484 }
1485 else
1486 #endif
1487 encrtype = AR_ENCR_TYPE_CLEAR;
1488 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, encrtype);
1489
1490 /* Check if frame must be protected using RTS/CTS or CTS-to-self. */
1491 if (!IEEE80211_IS_MULTICAST(wh->i_addr1)) {
1492 /* NB: Group frames are sent using CCK in 802.11b/g. */
1493 if (totlen > ic->ic_rtsthreshold) {
1494 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE;
1495 }
1496 else if ((ic->ic_flags & IEEE80211_F_USEPROT) &&
1497 athn_rates[ridx[0]].phy == IEEE80211_T_OFDM) {
1498 if (ic->ic_protmode == IEEE80211_PROT_RTSCTS)
1499 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE;
1500 else if (ic->ic_protmode == IEEE80211_PROT_CTSONLY)
1501 ds->ds_ctl0 |= AR_TXC0_CTS_ENABLE;
1502 }
1503 }
1504 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) {
1505 /* Disable multi-rate retries when protection is used. */
1506 ridx[1] = ridx[2] = ridx[3] = ridx[0];
1507 }
1508 /* Setup multi-rate retries. */
1509 for (i = 0; i < 4; i++) {
1510 series[i].hwrate = athn_rates[ridx[i]].hwrate;
1511 if (athn_rates[ridx[i]].phy == IEEE80211_T_DS &&
1512 ridx[i] != ATHN_RIDX_CCK1 &&
1513 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1514 series[i].hwrate |= 0x04;
1515 series[i].dur = 0;
1516 }
1517 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) {
1518 /* Compute duration for each series. */
1519 for (i = 0; i < 4; i++) {
1520 series[i].dur = athn_txtime(sc, IEEE80211_ACK_LEN,
1521 athn_rates[ridx[i]].rspridx, ic->ic_flags);
1522 }
1523 }
1524
1525 /* Write number of tries for each series. */
1526 ds->ds_ctl2 =
1527 SM(AR_TXC2_XMIT_DATA_TRIES0, 2) |
1528 SM(AR_TXC2_XMIT_DATA_TRIES1, 2) |
1529 SM(AR_TXC2_XMIT_DATA_TRIES2, 2) |
1530 SM(AR_TXC2_XMIT_DATA_TRIES3, 4);
1531
1532 /* Tell HW to update duration field in 802.11 header. */
1533 if (type != AR_FRAME_TYPE_PSPOLL)
1534 ds->ds_ctl2 |= AR_TXC2_DUR_UPDATE_ENA;
1535
1536 /* Write Tx rate for each series. */
1537 ds->ds_ctl3 =
1538 SM(AR_TXC3_XMIT_RATE0, series[0].hwrate) |
1539 SM(AR_TXC3_XMIT_RATE1, series[1].hwrate) |
1540 SM(AR_TXC3_XMIT_RATE2, series[2].hwrate) |
1541 SM(AR_TXC3_XMIT_RATE3, series[3].hwrate);
1542
1543 /* Write duration for each series. */
1544 ds->ds_ctl4 =
1545 SM(AR_TXC4_PACKET_DUR0, series[0].dur) |
1546 SM(AR_TXC4_PACKET_DUR1, series[1].dur);
1547 ds->ds_ctl5 =
1548 SM(AR_TXC5_PACKET_DUR2, series[2].dur) |
1549 SM(AR_TXC5_PACKET_DUR3, series[3].dur);
1550
1551 /* Use the same Tx chains for all tries. */
1552 ds->ds_ctl7 =
1553 SM(AR_TXC7_CHAIN_SEL0, sc->sc_txchainmask) |
1554 SM(AR_TXC7_CHAIN_SEL1, sc->sc_txchainmask) |
1555 SM(AR_TXC7_CHAIN_SEL2, sc->sc_txchainmask) |
1556 SM(AR_TXC7_CHAIN_SEL3, sc->sc_txchainmask);
1557 #ifdef notyet
1558 #ifndef IEEE80211_NO_HT
1559 /* Use the same short GI setting for all tries. */
1560 if (ic->ic_flags & IEEE80211_F_SHGI)
1561 ds->ds_ctl7 |= AR_TXC7_GI0123;
1562 /* Use the same channel width for all tries. */
1563 if (ic->ic_flags & IEEE80211_F_CBW40)
1564 ds->ds_ctl7 |= AR_TXC7_2040_0123;
1565 #endif
1566 #endif
1567
1568 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) {
1569 uint8_t protridx, hwrate;
1570 uint16_t dur = 0;
1571
1572 /* Use the same protection mode for all tries. */
1573 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) {
1574 ds->ds_ctl4 |= AR_TXC4_RTSCTS_QUAL01;
1575 ds->ds_ctl5 |= AR_TXC5_RTSCTS_QUAL23;
1576 }
1577 /* Select protection rate (suboptimal but ok). */
1578 protridx = (ic->ic_curmode == IEEE80211_MODE_11A) ?
1579 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK2;
1580 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) {
1581 /* Account for CTS duration. */
1582 dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1583 athn_rates[protridx].rspridx, ic->ic_flags);
1584 }
1585 dur += athn_txtime(sc, totlen, ridx[0], ic->ic_flags);
1586 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) {
1587 /* Account for ACK duration. */
1588 dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1589 athn_rates[ridx[0]].rspridx, ic->ic_flags);
1590 }
1591 /* Write protection frame duration and rate. */
1592 ds->ds_ctl2 |= SM(AR_TXC2_BURST_DUR, dur);
1593 hwrate = athn_rates[protridx].hwrate;
1594 if (protridx == ATHN_RIDX_CCK2 &&
1595 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1596 hwrate |= 0x04;
1597 ds->ds_ctl7 |= SM(AR_TXC7_RTSCTS_RATE, hwrate);
1598 }
1599
1600 /* Finalize first Tx descriptor and fill others (if any). */
1601 ds->ds_ctl0 |= SM(AR_TXC0_FRAME_LEN, totlen);
1602
1603 lastds = NULL; /* XXX: gcc */
1604 for (i = 0; i < bf->bf_map->dm_nsegs; i++, ds++) {
1605 ds->ds_data = bf->bf_map->dm_segs[i].ds_addr;
1606 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,
1607 bf->bf_map->dm_segs[i].ds_len);
1608
1609 if (i != bf->bf_map->dm_nsegs - 1)
1610 ds->ds_ctl1 |= AR_TXC1_MORE;
1611 ds->ds_link = 0;
1612
1613 /* Chain Tx descriptor. */
1614 if (i != 0)
1615 lastds->ds_link = bf->bf_daddr + i * sizeof(*ds);
1616 lastds = ds;
1617 }
1618 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1619 BUS_DMASYNC_PREWRITE);
1620
1621 if (!SIMPLEQ_EMPTY(&txq->head))
1622 ((struct ar_tx_desc *)txq->lastds)->ds_link = bf->bf_daddr;
1623 else
1624 AR_WRITE(sc, AR_QTXDP(qid), bf->bf_daddr);
1625 txq->lastds = lastds;
1626 SIMPLEQ_REMOVE_HEAD(&sc->sc_txbufs, bf_list);
1627 SIMPLEQ_INSERT_TAIL(&txq->head, bf, bf_list);
1628
1629 ds = bf->bf_descs;
1630 DPRINTFN(DBG_TX, sc,
1631 "Tx qid=%d nsegs=%d ctl0=0x%x ctl1=0x%x ctl3=0x%x\n",
1632 qid, bf->bf_map->dm_nsegs, ds->ds_ctl0, ds->ds_ctl1, ds->ds_ctl3);
1633
1634 /* Kick Tx. */
1635 AR_WRITE(sc, AR_Q_TXE, 1 << qid);
1636 AR_WRITE_BARRIER(sc);
1637 return 0;
1638 }
1639
1640 Static void
1641 ar5008_set_rf_mode(struct athn_softc *sc, struct ieee80211_channel *c)
1642 {
1643 uint32_t reg;
1644
1645 reg = IEEE80211_IS_CHAN_2GHZ(c) ?
1646 AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1647 if (!AR_SREV_9280_10_OR_LATER(sc)) {
1648 reg |= IEEE80211_IS_CHAN_2GHZ(c) ?
1649 AR_PHY_MODE_RF2GHZ : AR_PHY_MODE_RF5GHZ;
1650 }
1651 else if (IEEE80211_IS_CHAN_5GHZ(c) &&
1652 (sc->sc_flags & ATHN_FLAG_FAST_PLL_CLOCK)) {
1653 reg |= AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE;
1654 }
1655 AR_WRITE(sc, AR_PHY_MODE, reg);
1656 AR_WRITE_BARRIER(sc);
1657 }
1658
1659 static __inline uint32_t
1660 ar5008_synth_delay(struct athn_softc *sc)
1661 {
1662 uint32_t delay;
1663
1664 delay = MS(AR_READ(sc, AR_PHY_RX_DELAY), AR_PHY_RX_DELAY_DELAY);
1665 if (sc->sc_ic.ic_curmode == IEEE80211_MODE_11B)
1666 delay = (delay * 4) / 22;
1667 else
1668 delay = delay / 10; /* in 100ns steps */
1669 return delay;
1670 }
1671
1672 Static int
1673 ar5008_rf_bus_request(struct athn_softc *sc)
1674 {
1675 int ntries;
1676
1677 /* Request RF Bus grant. */
1678 AR_WRITE(sc, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1679 for (ntries = 0; ntries < 10000; ntries++) {
1680 if (AR_READ(sc, AR_PHY_RFBUS_GRANT) & AR_PHY_RFBUS_GRANT_EN)
1681 return 0;
1682 DELAY(10);
1683 }
1684 DPRINTFN(DBG_RF, sc, "could not kill baseband Rx");
1685 return ETIMEDOUT;
1686 }
1687
1688 Static void
1689 ar5008_rf_bus_release(struct athn_softc *sc)
1690 {
1691
1692 /* Wait for the synthesizer to settle. */
1693 DELAY(AR_BASE_PHY_ACTIVE_DELAY + ar5008_synth_delay(sc));
1694
1695 /* Release the RF Bus grant. */
1696 AR_WRITE(sc, AR_PHY_RFBUS_REQ, 0);
1697 AR_WRITE_BARRIER(sc);
1698 }
1699
1700 Static void
1701 ar5008_set_phy(struct athn_softc *sc, struct ieee80211_channel *c,
1702 struct ieee80211_channel *extc)
1703 {
1704 uint32_t phy;
1705
1706 if (AR_SREV_9285_10_OR_LATER(sc))
1707 phy = AR_READ(sc, AR_PHY_TURBO) & AR_PHY_FC_ENABLE_DAC_FIFO;
1708 else
1709 phy = 0;
1710 phy |= AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40 |
1711 AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH;
1712 #ifndef IEEE80211_NO_HT
1713 if (extc != NULL) {
1714 phy |= AR_PHY_FC_DYN2040_EN;
1715 if (extc > c) /* XXX */
1716 phy |= AR_PHY_FC_DYN2040_PRI_CH;
1717 }
1718 #endif
1719 AR_WRITE(sc, AR_PHY_TURBO, phy);
1720
1721 AR_WRITE(sc, AR_2040_MODE,
1722 (extc != NULL) ? AR_2040_JOINED_RX_CLEAR : 0);
1723
1724 /* Set global transmit timeout. */
1725 AR_WRITE(sc, AR_GTXTO, SM(AR_GTXTO_TIMEOUT_LIMIT, 25));
1726 /* Set carrier sense timeout. */
1727 AR_WRITE(sc, AR_CST, SM(AR_CST_TIMEOUT_LIMIT, 15));
1728 AR_WRITE_BARRIER(sc);
1729 }
1730
1731 Static void
1732 ar5008_set_delta_slope(struct athn_softc *sc, struct ieee80211_channel *c,
1733 struct ieee80211_channel *extc)
1734 {
1735 uint32_t coeff, exp, man, reg;
1736
1737 /* Set Delta Slope (exponent and mantissa). */
1738 coeff = (100 << 24) / c->ic_freq;
1739 athn_get_delta_slope(coeff, &exp, &man);
1740 DPRINTFN(DBG_RX, sc, "delta slope coeff exp=%u man=%u\n", exp, man);
1741
1742 reg = AR_READ(sc, AR_PHY_TIMING3);
1743 reg = RW(reg, AR_PHY_TIMING3_DSC_EXP, exp);
1744 reg = RW(reg, AR_PHY_TIMING3_DSC_MAN, man);
1745 AR_WRITE(sc, AR_PHY_TIMING3, reg);
1746
1747 /* For Short GI, coeff is 9/10 that of normal coeff. */
1748 coeff = (9 * coeff) / 10;
1749 athn_get_delta_slope(coeff, &exp, &man);
1750 DPRINTFN(DBG_RX, sc, "delta slope coeff exp=%u man=%u\n", exp, man);
1751
1752 reg = AR_READ(sc, AR_PHY_HALFGI);
1753 reg = RW(reg, AR_PHY_HALFGI_DSC_EXP, exp);
1754 reg = RW(reg, AR_PHY_HALFGI_DSC_MAN, man);
1755 AR_WRITE(sc, AR_PHY_HALFGI, reg);
1756 AR_WRITE_BARRIER(sc);
1757 }
1758
1759 Static void
1760 ar5008_enable_antenna_diversity(struct athn_softc *sc)
1761 {
1762
1763 AR_SETBITS(sc, AR_PHY_CCK_DETECT,
1764 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV);
1765 AR_WRITE_BARRIER(sc);
1766 }
1767
1768 Static void
1769 ar5008_init_baseband(struct athn_softc *sc)
1770 {
1771 uint32_t synth_delay;
1772
1773 synth_delay = ar5008_synth_delay(sc);
1774 /* Activate the PHY (includes baseband activate and synthesizer on). */
1775 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1776 AR_WRITE_BARRIER(sc);
1777 DELAY(AR_BASE_PHY_ACTIVE_DELAY + synth_delay);
1778 }
1779
1780 Static void
1781 ar5008_disable_phy(struct athn_softc *sc)
1782 {
1783
1784 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1785 AR_WRITE_BARRIER(sc);
1786 }
1787
1788 Static void
1789 ar5008_init_chains(struct athn_softc *sc)
1790 {
1791
1792 if (sc->sc_rxchainmask == 0x5 || sc->sc_txchainmask == 0x5)
1793 AR_SETBITS(sc, AR_PHY_ANALOG_SWAP, AR_PHY_SWAP_ALT_CHAIN);
1794
1795 /* Setup chain masks. */
1796 if (sc->sc_mac_ver <= AR_SREV_VERSION_9160 &&
1797 (sc->sc_rxchainmask == 0x3 || sc->sc_rxchainmask == 0x5)) {
1798 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, 0x7);
1799 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, 0x7);
1800 }
1801 else {
1802 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->sc_rxchainmask);
1803 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->sc_rxchainmask);
1804 }
1805 AR_WRITE(sc, AR_SELFGEN_MASK, sc->sc_txchainmask);
1806 AR_WRITE_BARRIER(sc);
1807 }
1808
1809 Static void
1810 ar5008_set_rxchains(struct athn_softc *sc)
1811 {
1812
1813 if (sc->sc_rxchainmask == 0x3 || sc->sc_rxchainmask == 0x5) {
1814 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->sc_rxchainmask);
1815 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->sc_rxchainmask);
1816 AR_WRITE_BARRIER(sc);
1817 }
1818 }
1819
1820 #ifdef notused
1821 Static void
1822 ar5008_read_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
1823 {
1824 /* Sign-extends 9-bit value (assumes upper bits are zeroes). */
1825 #define SIGN_EXT(v) (((v) ^ 0x100) - 0x100)
1826 uint32_t reg;
1827 int i;
1828
1829 for (i = 0; i < sc->sc_nrxchains; i++) {
1830 reg = AR_READ(sc, AR_PHY_CCA(i));
1831 if (AR_SREV_9280_10_OR_LATER(sc))
1832 nf[i] = MS(reg, AR9280_PHY_MINCCA_PWR);
1833 else
1834 nf[i] = MS(reg, AR_PHY_MINCCA_PWR);
1835 nf[i] = SIGN_EXT(nf[i]);
1836
1837 reg = AR_READ(sc, AR_PHY_EXT_CCA(i));
1838 if (AR_SREV_9280_10_OR_LATER(sc))
1839 nf_ext[i] = MS(reg, AR9280_PHY_EXT_MINCCA_PWR);
1840 else
1841 nf_ext[i] = MS(reg, AR_PHY_EXT_MINCCA_PWR);
1842 nf_ext[i] = SIGN_EXT(nf_ext[i]);
1843 }
1844 #undef SIGN_EXT
1845 }
1846 #endif /* notused */
1847
1848 #ifdef notused
1849 Static void
1850 ar5008_write_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
1851 {
1852 uint32_t reg;
1853 int i;
1854
1855 for (i = 0; i < sc->sc_nrxchains; i++) {
1856 reg = AR_READ(sc, AR_PHY_CCA(i));
1857 reg = RW(reg, AR_PHY_MAXCCA_PWR, nf[i]);
1858 AR_WRITE(sc, AR_PHY_CCA(i), reg);
1859
1860 reg = AR_READ(sc, AR_PHY_EXT_CCA(i));
1861 reg = RW(reg, AR_PHY_EXT_MAXCCA_PWR, nf_ext[i]);
1862 AR_WRITE(sc, AR_PHY_EXT_CCA(i), reg);
1863 }
1864 AR_WRITE_BARRIER(sc);
1865 }
1866 #endif /* notused */
1867
1868 #ifdef notused
1869 Static void
1870 ar5008_get_noisefloor(struct athn_softc *sc, struct ieee80211_channel *c)
1871 {
1872 int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS];
1873 int i;
1874
1875 if (AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF) {
1876 /* Noisefloor calibration not finished. */
1877 return;
1878 }
1879 /* Noisefloor calibration is finished. */
1880 ar5008_read_noisefloor(sc, nf, nf_ext);
1881
1882 /* Update noisefloor history. */
1883 for (i = 0; i < sc->sc_nrxchains; i++) {
1884 sc->sc_nf_hist[sc->sc_nf_hist_cur].nf[i] = nf[i];
1885 sc->sc_nf_hist[sc->sc_nf_hist_cur].nf_ext[i] = nf_ext[i];
1886 }
1887 if (++sc->sc_nf_hist_cur >= ATHN_NF_CAL_HIST_MAX)
1888 sc->sc_nf_hist_cur = 0;
1889 }
1890 #endif /* notused */
1891
1892 #ifdef notused
1893 Static void
1894 ar5008_bb_load_noisefloor(struct athn_softc *sc)
1895 {
1896 int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS];
1897 int i, ntries;
1898
1899 /* Write filtered noisefloor values. */
1900 for (i = 0; i < sc->sc_nrxchains; i++) {
1901 nf[i] = sc->sc_nf_priv[i] * 2;
1902 nf_ext[i] = sc->sc_nf_ext_priv[i] * 2;
1903 }
1904 ar5008_write_noisefloor(sc, nf, nf_ext);
1905
1906 /* Load filtered noisefloor values into baseband. */
1907 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF);
1908 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
1909 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1910 /* Wait for load to complete. */
1911 for (ntries = 0; ntries < 1000; ntries++) {
1912 if (!(AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF))
1913 break;
1914 DELAY(50);
1915 }
1916 if (ntries == 1000) {
1917 DPRINTFN(DBG_RF, sc, "failed to load noisefloor values\n");
1918 return;
1919 }
1920
1921 /* Restore noisefloor values to initial (max) values. */
1922 for (i = 0; i < AR_MAX_CHAINS; i++)
1923 nf[i] = nf_ext[i] = -50 * 2;
1924 ar5008_write_noisefloor(sc, nf, nf_ext);
1925 }
1926 #endif /* notused */
1927
1928 #ifdef notused
1929 Static void
1930 ar5008_noisefloor_calib(struct athn_softc *sc)
1931 {
1932
1933 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF);
1934 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
1935 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1936 AR_WRITE_BARRIER(sc);
1937 }
1938 #endif /* notused */
1939
1940 Static void
1941 ar5008_do_noisefloor_calib(struct athn_softc *sc)
1942 {
1943
1944 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1945 AR_WRITE_BARRIER(sc);
1946 }
1947
1948 Static void
1949 ar5008_do_calib(struct athn_softc *sc)
1950 {
1951 uint32_t mode, reg;
1952 int log;
1953
1954 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4_0);
1955 log = AR_SREV_9280_10_OR_LATER(sc) ? 10 : 2;
1956 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCAL_LOG_COUNT_MAX, log);
1957 AR_WRITE(sc, AR_PHY_TIMING_CTRL4_0, reg);
1958
1959 if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_GAIN)
1960 mode = AR_PHY_CALMODE_ADC_GAIN;
1961 else if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_DC)
1962 mode = AR_PHY_CALMODE_ADC_DC_PER;
1963 else /* ATHN_CAL_IQ */
1964 mode = AR_PHY_CALMODE_IQ;
1965 AR_WRITE(sc, AR_PHY_CALMODE, mode);
1966
1967 DPRINTFN(DBG_RF, sc, "starting calibration mode=0x%x\n", mode);
1968 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0, AR_PHY_TIMING_CTRL4_DO_CAL);
1969 AR_WRITE_BARRIER(sc);
1970 }
1971
1972 Static void
1973 ar5008_next_calib(struct athn_softc *sc)
1974 {
1975
1976 /* Check if we have any calibration in progress. */
1977 if (sc->sc_cur_calib_mask != 0) {
1978 if (!(AR_READ(sc, AR_PHY_TIMING_CTRL4_0) &
1979 AR_PHY_TIMING_CTRL4_DO_CAL)) {
1980 /* Calibration completed for current sample. */
1981 if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_GAIN)
1982 ar5008_calib_adc_gain(sc);
1983 else if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_DC)
1984 ar5008_calib_adc_dc_off(sc);
1985 else /* ATHN_CAL_IQ */
1986 ar5008_calib_iq(sc);
1987 }
1988 }
1989 }
1990
1991 Static void
1992 ar5008_calib_iq(struct athn_softc *sc)
1993 {
1994 struct athn_iq_cal *cal;
1995 uint32_t reg, i_coff_denom, q_coff_denom;
1996 int32_t i_coff, q_coff;
1997 int i, iq_corr_neg;
1998
1999 for (i = 0; i < AR_MAX_CHAINS; i++) {
2000 cal = &sc->sc_calib.iq[i];
2001
2002 /* Accumulate IQ calibration measures (clear on read). */
2003 cal->pwr_meas_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2004 cal->pwr_meas_q += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2005 cal->iq_corr_meas +=
2006 (int32_t)AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2007 }
2008 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2009 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2010 /* Not enough samples accumulated, continue. */
2011 ar5008_do_calib(sc);
2012 return;
2013 }
2014
2015 for (i = 0; i < sc->sc_nrxchains; i++) {
2016 cal = &sc->sc_calib.iq[i];
2017
2018 if (cal->pwr_meas_q == 0)
2019 continue;
2020
2021 if ((iq_corr_neg = cal->iq_corr_meas < 0))
2022 cal->iq_corr_meas = -cal->iq_corr_meas;
2023
2024 i_coff_denom =
2025 (cal->pwr_meas_i / 2 + cal->pwr_meas_q / 2) / 128;
2026 q_coff_denom = cal->pwr_meas_q / 64;
2027
2028 if (i_coff_denom == 0 || q_coff_denom == 0)
2029 continue; /* Prevents division by zero. */
2030
2031 i_coff = cal->iq_corr_meas / i_coff_denom;
2032 q_coff = (cal->pwr_meas_i / q_coff_denom) - 64;
2033
2034 /* Negate i_coff if iq_corr_meas is positive. */
2035 if (!iq_corr_neg)
2036 i_coff = 0x40 - (i_coff & 0x3f);
2037 if (q_coff > 15)
2038 q_coff = 15;
2039 else if (q_coff <= -16)
2040 q_coff = -16; /* XXX Linux has a bug here? */
2041
2042 DPRINTFN(DBG_RF, sc, "IQ calibration for chain %d\n", i);
2043 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4(i));
2044 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_I_COFF, i_coff);
2045 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_Q_COFF, q_coff);
2046 AR_WRITE(sc, AR_PHY_TIMING_CTRL4(i), reg);
2047 }
2048
2049 /* Apply new settings. */
2050 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0,
2051 AR_PHY_TIMING_CTRL4_IQCORR_ENABLE);
2052 AR_WRITE_BARRIER(sc);
2053
2054 /* IQ calibration done. */
2055 sc->sc_cur_calib_mask &= ~ATHN_CAL_IQ;
2056 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2057 }
2058
2059 Static void
2060 ar5008_calib_adc_gain(struct athn_softc *sc)
2061 {
2062 struct athn_adc_cal *cal;
2063 uint32_t reg, gain_mismatch_i, gain_mismatch_q;
2064 int i;
2065
2066 for (i = 0; i < AR_MAX_CHAINS; i++) {
2067 cal = &sc->sc_calib.adc_gain[i];
2068
2069 /* Accumulate ADC gain measures (clear on read). */
2070 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2071 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2072 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2073 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i));
2074 }
2075 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2076 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2077 /* Not enough samples accumulated, continue. */
2078 ar5008_do_calib(sc);
2079 return;
2080 }
2081
2082 for (i = 0; i < sc->sc_nrxchains; i++) {
2083 cal = &sc->sc_calib.adc_gain[i];
2084
2085 if (cal->pwr_meas_odd_i == 0 || cal->pwr_meas_even_q == 0)
2086 continue; /* Prevents division by zero. */
2087
2088 gain_mismatch_i =
2089 (cal->pwr_meas_even_i * 32) / cal->pwr_meas_odd_i;
2090 gain_mismatch_q =
2091 (cal->pwr_meas_odd_q * 32) / cal->pwr_meas_even_q;
2092
2093 DPRINTFN(DBG_RF, sc, "ADC gain calibration for chain %d\n", i);
2094 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i));
2095 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IGAIN, gain_mismatch_i);
2096 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QGAIN, gain_mismatch_q);
2097 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg);
2098 }
2099
2100 /* Apply new settings. */
2101 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),
2102 AR_PHY_NEW_ADC_GAIN_CORR_ENABLE);
2103 AR_WRITE_BARRIER(sc);
2104
2105 /* ADC gain calibration done. */
2106 sc->sc_cur_calib_mask &= ~ATHN_CAL_ADC_GAIN;
2107 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2108 }
2109
2110 Static void
2111 ar5008_calib_adc_dc_off(struct athn_softc *sc)
2112 {
2113 struct athn_adc_cal *cal;
2114 int32_t dc_offset_mismatch_i, dc_offset_mismatch_q;
2115 uint32_t reg;
2116 int count, i;
2117
2118 for (i = 0; i < AR_MAX_CHAINS; i++) {
2119 cal = &sc->sc_calib.adc_dc_offset[i];
2120
2121 /* Accumulate ADC DC offset measures (clear on read). */
2122 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2123 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2124 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2125 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i));
2126 }
2127 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2128 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2129 /* Not enough samples accumulated, continue. */
2130 ar5008_do_calib(sc);
2131 return;
2132 }
2133
2134 if (AR_SREV_9280_10_OR_LATER(sc))
2135 count = (1 << (10 + 5));
2136 else
2137 count = (1 << ( 2 + 5)) * AR_CAL_SAMPLES;
2138 for (i = 0; i < sc->sc_nrxchains; i++) {
2139 cal = &sc->sc_calib.adc_dc_offset[i];
2140
2141 dc_offset_mismatch_i =
2142 (cal->pwr_meas_even_i - cal->pwr_meas_odd_i * 2) / count;
2143 dc_offset_mismatch_q =
2144 (cal->pwr_meas_odd_q - cal->pwr_meas_even_q * 2) / count;
2145
2146 DPRINTFN(DBG_RF, sc, "ADC DC offset calibration for chain %d\n", i);
2147 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i));
2148 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QDC,
2149 dc_offset_mismatch_q);
2150 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IDC,
2151 dc_offset_mismatch_i);
2152 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg);
2153 }
2154
2155 /* Apply new settings. */
2156 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),
2157 AR_PHY_NEW_ADC_DC_OFFSET_CORR_ENABLE);
2158 AR_WRITE_BARRIER(sc);
2159
2160 /* ADC DC offset calibration done. */
2161 sc->sc_cur_calib_mask &= ~ATHN_CAL_ADC_DC;
2162 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2163 }
2164
2165 PUBLIC void
2166 ar5008_write_txpower(struct athn_softc *sc, int16_t power[ATHN_POWER_COUNT])
2167 {
2168
2169 AR_WRITE(sc, AR_PHY_POWER_TX_RATE1,
2170 (power[ATHN_POWER_OFDM18 ] & 0x3f) << 24 |
2171 (power[ATHN_POWER_OFDM12 ] & 0x3f) << 16 |
2172 (power[ATHN_POWER_OFDM9 ] & 0x3f) << 8 |
2173 (power[ATHN_POWER_OFDM6 ] & 0x3f));
2174 AR_WRITE(sc, AR_PHY_POWER_TX_RATE2,
2175 (power[ATHN_POWER_OFDM54 ] & 0x3f) << 24 |
2176 (power[ATHN_POWER_OFDM48 ] & 0x3f) << 16 |
2177 (power[ATHN_POWER_OFDM36 ] & 0x3f) << 8 |
2178 (power[ATHN_POWER_OFDM24 ] & 0x3f));
2179 AR_WRITE(sc, AR_PHY_POWER_TX_RATE3,
2180 (power[ATHN_POWER_CCK2_SP ] & 0x3f) << 24 |
2181 (power[ATHN_POWER_CCK2_LP ] & 0x3f) << 16 |
2182 (power[ATHN_POWER_XR ] & 0x3f) << 8 |
2183 (power[ATHN_POWER_CCK1_LP ] & 0x3f));
2184 AR_WRITE(sc, AR_PHY_POWER_TX_RATE4,
2185 (power[ATHN_POWER_CCK11_SP] & 0x3f) << 24 |
2186 (power[ATHN_POWER_CCK11_LP] & 0x3f) << 16 |
2187 (power[ATHN_POWER_CCK55_SP] & 0x3f) << 8 |
2188 (power[ATHN_POWER_CCK55_LP] & 0x3f));
2189 #ifndef IEEE80211_NO_HT
2190 AR_WRITE(sc, AR_PHY_POWER_TX_RATE5,
2191 (power[ATHN_POWER_HT20(3) ] & 0x3f) << 24 |
2192 (power[ATHN_POWER_HT20(2) ] & 0x3f) << 16 |
2193 (power[ATHN_POWER_HT20(1) ] & 0x3f) << 8 |
2194 (power[ATHN_POWER_HT20(0) ] & 0x3f));
2195 AR_WRITE(sc, AR_PHY_POWER_TX_RATE6,
2196 (power[ATHN_POWER_HT20(7) ] & 0x3f) << 24 |
2197 (power[ATHN_POWER_HT20(6) ] & 0x3f) << 16 |
2198 (power[ATHN_POWER_HT20(5) ] & 0x3f) << 8 |
2199 (power[ATHN_POWER_HT20(4) ] & 0x3f));
2200 AR_WRITE(sc, AR_PHY_POWER_TX_RATE7,
2201 (power[ATHN_POWER_HT40(3) ] & 0x3f) << 24 |
2202 (power[ATHN_POWER_HT40(2) ] & 0x3f) << 16 |
2203 (power[ATHN_POWER_HT40(1) ] & 0x3f) << 8 |
2204 (power[ATHN_POWER_HT40(0) ] & 0x3f));
2205 AR_WRITE(sc, AR_PHY_POWER_TX_RATE8,
2206 (power[ATHN_POWER_HT40(7) ] & 0x3f) << 24 |
2207 (power[ATHN_POWER_HT40(6) ] & 0x3f) << 16 |
2208 (power[ATHN_POWER_HT40(5) ] & 0x3f) << 8 |
2209 (power[ATHN_POWER_HT40(4) ] & 0x3f));
2210 AR_WRITE(sc, AR_PHY_POWER_TX_RATE9,
2211 (power[ATHN_POWER_OFDM_EXT] & 0x3f) << 24 |
2212 (power[ATHN_POWER_CCK_EXT ] & 0x3f) << 16 |
2213 (power[ATHN_POWER_OFDM_DUP] & 0x3f) << 8 |
2214 (power[ATHN_POWER_CCK_DUP ] & 0x3f));
2215 #endif
2216 AR_WRITE_BARRIER(sc);
2217 }
2218
2219 PUBLIC void
2220 ar5008_set_viterbi_mask(struct athn_softc *sc, int bin)
2221 {
2222 uint32_t mask[4], reg;
2223 uint8_t m[62], p[62]; /* XXX use bit arrays? */
2224 int i, bit, cur;
2225
2226 /* Compute pilot mask. */
2227 cur = -6000;
2228 for (i = 0; i < 4; i++) {
2229 mask[i] = 0;
2230 for (bit = 0; bit < 30; bit++) {
2231 if (abs(cur - bin) < 100)
2232 mask[i] |= 1 << bit;
2233 cur += 100;
2234 }
2235 if (cur == 0) /* Skip entry "0". */
2236 cur = 100;
2237 }
2238 /* Write entries from -6000 to -3100. */
2239 AR_WRITE(sc, AR_PHY_TIMING7, mask[0]);
2240 AR_WRITE(sc, AR_PHY_TIMING9, mask[0]);
2241 /* Write entries from -3000 to -100. */
2242 AR_WRITE(sc, AR_PHY_TIMING8, mask[1]);
2243 AR_WRITE(sc, AR_PHY_TIMING10, mask[1]);
2244 /* Write entries from 100 to 3000. */
2245 AR_WRITE(sc, AR_PHY_PILOT_MASK_01_30, mask[2]);
2246 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_01_30, mask[2]);
2247 /* Write entries from 3100 to 6000. */
2248 AR_WRITE(sc, AR_PHY_PILOT_MASK_31_60, mask[3]);
2249 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_31_60, mask[3]);
2250
2251 /* Compute viterbi mask. */
2252 for (cur = 6100; cur >= 0; cur -= 100)
2253 p[+cur / 100] = abs(cur - bin) < 75;
2254 for (cur = -100; cur >= -6100; cur -= 100)
2255 m[-cur / 100] = abs(cur - bin) < 75;
2256
2257 /* Write viterbi mask (XXX needs to be reworked). */
2258 reg =
2259 m[46] << 30 | m[47] << 28 | m[48] << 26 | m[49] << 24 |
2260 m[50] << 22 | m[51] << 20 | m[52] << 18 | m[53] << 16 |
2261 m[54] << 14 | m[55] << 12 | m[56] << 10 | m[57] << 8 |
2262 m[58] << 6 | m[59] << 4 | m[60] << 2 | m[61] << 0;
2263 AR_WRITE(sc, AR_PHY_BIN_MASK_1, reg);
2264 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_46_61, reg);
2265
2266 /* XXX m[48] should be m[38] ? */
2267 reg = m[31] << 28 | m[32] << 26 | m[33] << 24 |
2268 m[34] << 22 | m[35] << 20 | m[36] << 18 | m[37] << 16 |
2269 m[48] << 14 | m[39] << 12 | m[40] << 10 | m[41] << 8 |
2270 m[42] << 6 | m[43] << 4 | m[44] << 2 | m[45] << 0;
2271 AR_WRITE(sc, AR_PHY_BIN_MASK_2, reg);
2272 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_31_45, reg);
2273
2274 /* XXX This one is weird too. */
2275 reg =
2276 m[16] << 30 | m[16] << 28 | m[18] << 26 | m[18] << 24 |
2277 m[20] << 22 | m[20] << 20 | m[22] << 18 | m[22] << 16 |
2278 m[24] << 14 | m[24] << 12 | m[25] << 10 | m[26] << 8 |
2279 m[27] << 6 | m[28] << 4 | m[29] << 2 | m[30] << 0;
2280 AR_WRITE(sc, AR_PHY_BIN_MASK_3, reg);
2281 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_16_30, reg);
2282
2283 reg =
2284 m[ 0] << 30 | m[ 1] << 28 | m[ 2] << 26 | m[ 3] << 24 |
2285 m[ 4] << 22 | m[ 5] << 20 | m[ 6] << 18 | m[ 7] << 16 |
2286 m[ 8] << 14 | m[ 9] << 12 | m[10] << 10 | m[11] << 8 |
2287 m[12] << 6 | m[13] << 4 | m[14] << 2 | m[15] << 0;
2288 AR_WRITE(sc, AR_PHY_MASK_CTL, reg);
2289 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_00_15, reg);
2290
2291 reg = p[15] << 28 | p[14] << 26 | p[13] << 24 |
2292 p[12] << 22 | p[11] << 20 | p[10] << 18 | p[ 9] << 16 |
2293 p[ 8] << 14 | p[ 7] << 12 | p[ 6] << 10 | p[ 5] << 8 |
2294 p[ 4] << 6 | p[ 3] << 4 | p[ 2] << 2 | p[ 1] << 0;
2295 AR_WRITE(sc, AR_PHY_BIN_MASK2_1, reg);
2296 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_15_01, reg);
2297
2298 reg = p[30] << 28 | p[29] << 26 | p[28] << 24 |
2299 p[27] << 22 | p[26] << 20 | p[25] << 18 | p[24] << 16 |
2300 p[23] << 14 | p[22] << 12 | p[21] << 10 | p[20] << 8 |
2301 p[19] << 6 | p[18] << 4 | p[17] << 2 | p[16] << 0;
2302 AR_WRITE(sc, AR_PHY_BIN_MASK2_2, reg);
2303 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_30_16, reg);
2304
2305 reg = p[45] << 28 | p[44] << 26 | p[43] << 24 |
2306 p[42] << 22 | p[41] << 20 | p[40] << 18 | p[39] << 16 |
2307 p[38] << 14 | p[37] << 12 | p[36] << 10 | p[35] << 8 |
2308 p[34] << 6 | p[33] << 4 | p[32] << 2 | p[31] << 0;
2309 AR_WRITE(sc, AR_PHY_BIN_MASK2_3, reg);
2310 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_45_31, reg);
2311
2312 reg =
2313 p[61] << 30 | p[60] << 28 | p[59] << 26 | p[58] << 24 |
2314 p[57] << 22 | p[56] << 20 | p[55] << 18 | p[54] << 16 |
2315 p[53] << 14 | p[52] << 12 | p[51] << 10 | p[50] << 8 |
2316 p[49] << 6 | p[48] << 4 | p[47] << 2 | p[46] << 0;
2317 AR_WRITE(sc, AR_PHY_BIN_MASK2_4, reg);
2318 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_61_46, reg);
2319 AR_WRITE_BARRIER(sc);
2320 }
2321
2322 Static void
2323 ar5008_hw_init(struct athn_softc *sc, struct ieee80211_channel *c,
2324 struct ieee80211_channel *extc)
2325 {
2326 struct athn_ops *ops = &sc->sc_ops;
2327 const struct athn_ini *ini = sc->sc_ini;
2328 const uint32_t *pvals;
2329 uint32_t reg;
2330 int i;
2331
2332 AR_WRITE(sc, AR_PHY(0), 0x00000007);
2333 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
2334
2335 if (!AR_SINGLE_CHIP(sc))
2336 ar5416_reset_addac(sc, c);
2337
2338 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
2339
2340 /* First initialization step (depends on channel band/bandwidth). */
2341 #ifndef IEEE80211_NO_HT
2342 if (extc != NULL) {
2343 if (IEEE80211_IS_CHAN_2GHZ(c))
2344 pvals = ini->vals_2g40;
2345 else
2346 pvals = ini->vals_5g40;
2347 }
2348 else
2349 #endif
2350 {
2351 if (IEEE80211_IS_CHAN_2GHZ(c))
2352 pvals = ini->vals_2g20;
2353 else
2354 pvals = ini->vals_5g20;
2355 }
2356 DPRINTFN(DBG_INIT, sc, "writing modal init vals\n");
2357 for (i = 0; i < ini->nregs; i++) {
2358 uint32_t val = pvals[i];
2359
2360 /* Fix AR_AN_TOP2 initialization value if required. */
2361 if (ini->regs[i] == AR_AN_TOP2 &&
2362 (sc->sc_flags & ATHN_FLAG_AN_TOP2_FIXUP))
2363 val &= ~AR_AN_TOP2_PWDCLKIND;
2364 AR_WRITE(sc, ini->regs[i], val);
2365 if (AR_IS_ANALOG_REG(ini->regs[i])) {
2366 AR_WRITE_BARRIER(sc);
2367 DELAY(100);
2368 }
2369 if ((i & 0x1f) == 0)
2370 DELAY(1);
2371 }
2372 AR_WRITE_BARRIER(sc);
2373
2374 if (sc->sc_rx_gain != NULL)
2375 ar9280_reset_rx_gain(sc, c);
2376 if (sc->sc_tx_gain != NULL)
2377 ar9280_reset_tx_gain(sc, c);
2378
2379 if (AR_SREV_9271_10(sc)) {
2380 AR_WRITE(sc, AR_PHY(68), 0x30002311);
2381 AR_WRITE(sc, AR_PHY_RF_CTL3, 0x0a020001);
2382 }
2383 AR_WRITE_BARRIER(sc);
2384
2385 /* Second initialization step (common to all channels). */
2386 DPRINTFN(DBG_INIT, sc, "writing common init vals\n");
2387 for (i = 0; i < ini->ncmregs; i++) {
2388 AR_WRITE(sc, ini->cmregs[i], ini->cmvals[i]);
2389 if (AR_IS_ANALOG_REG(ini->cmregs[i])) {
2390 AR_WRITE_BARRIER(sc);
2391 DELAY(100);
2392 }
2393 if ((i & 0x1f) == 0)
2394 DELAY(1);
2395 }
2396 AR_WRITE_BARRIER(sc);
2397
2398 if (!AR_SINGLE_CHIP(sc))
2399 ar5416_reset_bb_gain(sc, c);
2400
2401 if (IEEE80211_IS_CHAN_5GHZ(c) &&
2402 (sc->sc_flags & ATHN_FLAG_FAST_PLL_CLOCK)) {
2403 /* Update modal values for fast PLL clock. */
2404 #ifndef IEEE80211_NO_HT
2405 if (extc != NULL)
2406 pvals = ini->fastvals_5g40;
2407 else
2408 #endif
2409 pvals = ini->fastvals_5g20;
2410 DPRINTFN(DBG_INIT, sc, "writing fast pll clock init vals\n");
2411 for (i = 0; i < ini->nfastregs; i++) {
2412 AR_WRITE(sc, ini->fastregs[i], pvals[i]);
2413 if (AR_IS_ANALOG_REG(ini->fastregs[i])) {
2414 AR_WRITE_BARRIER(sc);
2415 DELAY(100);
2416 }
2417 if ((i & 0x1f) == 0)
2418 DELAY(1);
2419 }
2420 }
2421
2422 /*
2423 * Set the RX_ABORT and RX_DIS bits to prevent frames with corrupted
2424 * descriptor status.
2425 */
2426 AR_SETBITS(sc, AR_DIAG_SW, AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT);
2427
2428 /* Hardware workarounds for occasional Rx data corruption. */
2429 if (AR_SREV_9280_10_OR_LATER(sc)) {
2430 reg = AR_READ(sc, AR_PCU_MISC_MODE2);
2431 if (!AR_SREV_9271(sc))
2432 reg &= ~AR_PCU_MISC_MODE2_HWWAR1;
2433 if (AR_SREV_9287_10_OR_LATER(sc))
2434 reg &= ~AR_PCU_MISC_MODE2_HWWAR2;
2435 AR_WRITE(sc, AR_PCU_MISC_MODE2, reg);
2436
2437 }
2438 else if (AR_SREV_5416_20_OR_LATER(sc)) {
2439 /* Disable baseband clock gating. */
2440 AR_WRITE(sc, AR_PHY(651), 0x11);
2441
2442 if (AR_SREV_9160(sc)) {
2443 /* Disable RIFS search to fix baseband hang. */
2444 AR_CLRBITS(sc, AR_PHY_HEAVY_CLIP_FACTOR_RIFS,
2445 AR_PHY_RIFS_INIT_DELAY_M);
2446 }
2447 }
2448 AR_WRITE_BARRIER(sc);
2449
2450 ar5008_set_phy(sc, c, extc);
2451 ar5008_init_chains(sc);
2452
2453 if (sc->sc_flags & ATHN_FLAG_OLPC) {
2454 extern int ticks;
2455 sc->sc_olpc_ticks = ticks;
2456 ops->olpc_init(sc);
2457 }
2458
2459 ops->set_txpower(sc, c, extc);
2460
2461 if (!AR_SINGLE_CHIP(sc))
2462 ar5416_rf_reset(sc, c);
2463 }
2464
2465 Static uint8_t
2466 ar5008_get_vpd(uint8_t pwr, const uint8_t *pwrPdg, const uint8_t *vpdPdg,
2467 int nicepts)
2468 {
2469 uint8_t vpd;
2470 int i, lo, hi;
2471
2472 for (i = 0; i < nicepts; i++)
2473 if (pwrPdg[i] > pwr)
2474 break;
2475 hi = i;
2476 lo = hi - 1;
2477 if (lo == -1)
2478 lo = hi;
2479 else if (hi == nicepts)
2480 hi = lo;
2481
2482 vpd = athn_interpolate(pwr, pwrPdg[lo], vpdPdg[lo],
2483 pwrPdg[hi], vpdPdg[hi]);
2484 return vpd;
2485 }
2486
2487 PUBLIC void
2488 ar5008_get_pdadcs(struct athn_softc *sc, uint8_t fbin,
2489 struct athn_pier *lopier, struct athn_pier *hipier, int nxpdgains,
2490 int nicepts, uint8_t overlap, uint8_t *boundaries, uint8_t *pdadcs)
2491 {
2492 #define DB(x) ((x) / 2) /* Convert half dB to dB. */
2493 uint8_t minpwr[AR_PD_GAINS_IN_MASK], maxpwr[AR_PD_GAINS_IN_MASK];
2494 uint8_t vpd[AR_MAX_PWR_RANGE_IN_HALF_DB], pwr;
2495 uint8_t lovpd, hivpd, boundary;
2496 int16_t ss, delta, vpdstep, val;
2497 int i, j, npdadcs, nvpds, maxidx, tgtidx;
2498
2499 /* Compute min and max power in half dB for each pdGain. */
2500 for (i = 0; i < nxpdgains; i++) {
2501 minpwr[i] = MAX(lopier->pwr[i][0], hipier->pwr[i][0]);
2502 maxpwr[i] = MIN(lopier->pwr[i][nicepts - 1],
2503 hipier->pwr[i][nicepts - 1]);
2504 }
2505
2506 /* Fill phase domain analog-to-digital converter (PDADC) table. */
2507 npdadcs = 0;
2508 for (i = 0; i < nxpdgains; i++) {
2509 if (i != nxpdgains - 1)
2510 boundaries[i] = DB(maxpwr[i] + minpwr[i + 1]) / 2;
2511 else
2512 boundaries[i] = DB(maxpwr[i]);
2513 if (boundaries[i] > AR_MAX_RATE_POWER)
2514 boundaries[i] = AR_MAX_RATE_POWER;
2515
2516 if (i == 0 && !AR_SREV_5416_20_OR_LATER(sc)) {
2517 /* Fix the gain delta (AR5416 1.0 only). */
2518 delta = boundaries[0] - 23;
2519 boundaries[0] = 23;
2520 }
2521 else
2522 delta = 0;
2523
2524 /* Find starting index for this pdGain. */
2525 if (i != 0) {
2526 ss = boundaries[i - 1] - DB(minpwr[i]) -
2527 overlap + 1 + delta;
2528 }
2529 else if (AR_SREV_9280_10_OR_LATER(sc))
2530 ss = -DB(minpwr[i]);
2531 else
2532 ss = 0;
2533
2534 /* Compute Vpd table for this pdGain. */
2535 nvpds = DB(maxpwr[i] - minpwr[i]) + 1;
2536 memset(vpd, 0, sizeof(vpd));
2537 pwr = minpwr[i];
2538 for (j = 0; j < nvpds; j++) {
2539 /* Get lower and higher Vpd. */
2540 lovpd = ar5008_get_vpd(pwr, lopier->pwr[i],
2541 lopier->vpd[i], nicepts);
2542 hivpd = ar5008_get_vpd(pwr, hipier->pwr[i],
2543 hipier->vpd[i], nicepts);
2544
2545 /* Interpolate the final Vpd. */
2546 vpd[j] = athn_interpolate(fbin,
2547 lopier->fbin, lovpd, hipier->fbin, hivpd);
2548
2549 pwr += 2; /* In half dB. */
2550 }
2551
2552 /* Extrapolate data for ss < 0. */
2553 if (vpd[1] > vpd[0])
2554 vpdstep = vpd[1] - vpd[0];
2555 else
2556 vpdstep = 1;
2557 while (ss < 0 && npdadcs < AR_NUM_PDADC_VALUES - 1) {
2558 val = vpd[0] + ss * vpdstep;
2559 pdadcs[npdadcs++] = MAX(val, 0);
2560 ss++;
2561 }
2562
2563 tgtidx = boundaries[i] + overlap - DB(minpwr[i]);
2564 maxidx = MIN(tgtidx, nvpds);
2565 while (ss < maxidx && npdadcs < AR_NUM_PDADC_VALUES - 1)
2566 pdadcs[npdadcs++] = vpd[ss++];
2567
2568 if (tgtidx < maxidx)
2569 continue;
2570
2571 /* Extrapolate data for maxidx <= ss <= tgtidx. */
2572 if (vpd[nvpds - 1] > vpd[nvpds - 2])
2573 vpdstep = vpd[nvpds - 1] - vpd[nvpds - 2];
2574 else
2575 vpdstep = 1;
2576 while (ss <= tgtidx && npdadcs < AR_NUM_PDADC_VALUES - 1) {
2577 val = vpd[nvpds - 1] + (ss - maxidx + 1) * vpdstep;
2578 pdadcs[npdadcs++] = MIN(val, 255);
2579 ss++;
2580 }
2581 }
2582
2583 /* Fill remaining PDADC and boundaries entries. */
2584 if (AR_SREV_9285(sc))
2585 boundary = AR9285_PD_GAIN_BOUNDARY_DEFAULT;
2586 else /* Fill with latest. */
2587 boundary = boundaries[nxpdgains - 1];
2588
2589 for (; nxpdgains < AR_PD_GAINS_IN_MASK; nxpdgains++)
2590 boundaries[nxpdgains] = boundary;
2591
2592 for (; npdadcs < AR_NUM_PDADC_VALUES; npdadcs++)
2593 pdadcs[npdadcs] = pdadcs[npdadcs - 1];
2594 #undef DB
2595 }
2596
2597 PUBLIC void
2598 ar5008_get_lg_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2599 uint8_t ctl, const struct ar_cal_target_power_leg *tgt, int nchans,
2600 uint8_t tpow[4])
2601 {
2602 uint8_t fbin;
2603 int i, lo, hi;
2604
2605 /* Find interval (lower and upper indices). */
2606 fbin = athn_chan2fbin(c);
2607 for (i = 0; i < nchans; i++) {
2608 if (tgt[i].bChannel == AR_BCHAN_UNUSED ||
2609 tgt[i].bChannel > fbin)
2610 break;
2611 }
2612 hi = i;
2613 lo = hi - 1;
2614 if (lo == -1)
2615 lo = hi;
2616 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED)
2617 hi = lo;
2618
2619 /* Interpolate values. */
2620 for (i = 0; i < 4; i++) {
2621 tpow[i] = athn_interpolate(fbin,
2622 tgt[lo].bChannel, tgt[lo].tPow2x[i],
2623 tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2624 }
2625 /* XXX Apply conformance testing limit. */
2626 }
2627
2628 #ifndef IEEE80211_NO_HT
2629 PUBLIC void
2630 ar5008_get_ht_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2631 uint8_t ctl, const struct ar_cal_target_power_ht *tgt, int nchans,
2632 uint8_t tpow[8])
2633 {
2634 uint8_t fbin;
2635 int i, lo, hi;
2636
2637 /* Find interval (lower and upper indices). */
2638 fbin = athn_chan2fbin(c);
2639 for (i = 0; i < nchans; i++) {
2640 if (tgt[i].bChannel == AR_BCHAN_UNUSED ||
2641 tgt[i].bChannel > fbin)
2642 break;
2643 }
2644 hi = i;
2645 lo = hi - 1;
2646 if (lo == -1)
2647 lo = hi;
2648 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED)
2649 hi = lo;
2650
2651 /* Interpolate values. */
2652 for (i = 0; i < 8; i++) {
2653 tpow[i] = athn_interpolate(fbin,
2654 tgt[lo].bChannel, tgt[lo].tPow2x[i],
2655 tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2656 }
2657 /* XXX Apply conformance testing limit. */
2658 }
2659 #endif
2660
2661 /*
2662 * Adaptive noise immunity.
2663 */
2664 Static void
2665 ar5008_set_noise_immunity_level(struct athn_softc *sc, int level)
2666 {
2667 int high = level == 4;
2668 uint32_t reg;
2669
2670 reg = AR_READ(sc, AR_PHY_DESIRED_SZ);
2671 reg = RW(reg, AR_PHY_DESIRED_SZ_TOT_DES, high ? -62 : -55);
2672 AR_WRITE(sc, AR_PHY_DESIRED_SZ, reg);
2673
2674 reg = AR_READ(sc, AR_PHY_AGC_CTL1);
2675 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_LOW, high ? -70 : -64);
2676 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_HIGH, high ? -12 : -14);
2677 AR_WRITE(sc, AR_PHY_AGC_CTL1, reg);
2678
2679 reg = AR_READ(sc, AR_PHY_FIND_SIG);
2680 reg = RW(reg, AR_PHY_FIND_SIG_FIRPWR, high ? -80 : -78);
2681 AR_WRITE(sc, AR_PHY_FIND_SIG, reg);
2682
2683 AR_WRITE_BARRIER(sc);
2684 }
2685
2686 Static void
2687 ar5008_enable_ofdm_weak_signal(struct athn_softc *sc)
2688 {
2689 uint32_t reg;
2690
2691 reg = AR_READ(sc, AR_PHY_SFCORR_LOW);
2692 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 50);
2693 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 40);
2694 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 48);
2695 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg);
2696
2697 reg = AR_READ(sc, AR_PHY_SFCORR);
2698 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 77);
2699 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 64);
2700 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 16);
2701 AR_WRITE(sc, AR_PHY_SFCORR, reg);
2702
2703 reg = AR_READ(sc, AR_PHY_SFCORR_EXT);
2704 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 50);
2705 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 40);
2706 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 77);
2707 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 64);
2708 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg);
2709
2710 AR_SETBITS(sc, AR_PHY_SFCORR_LOW,
2711 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW);
2712 AR_WRITE_BARRIER(sc);
2713 }
2714
2715 Static void
2716 ar5008_disable_ofdm_weak_signal(struct athn_softc *sc)
2717 {
2718 uint32_t reg;
2719
2720 reg = AR_READ(sc, AR_PHY_SFCORR_LOW);
2721 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 127);
2722 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 127);
2723 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 63);
2724 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg);
2725
2726 reg = AR_READ(sc, AR_PHY_SFCORR);
2727 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 127);
2728 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 127);
2729 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 31);
2730 AR_WRITE(sc, AR_PHY_SFCORR, reg);
2731
2732 reg = AR_READ(sc, AR_PHY_SFCORR_EXT);
2733 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 127);
2734 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 127);
2735 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 127);
2736 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 127);
2737 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg);
2738
2739 AR_CLRBITS(sc, AR_PHY_SFCORR_LOW,
2740 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW);
2741 AR_WRITE_BARRIER(sc);
2742 }
2743
2744 Static void
2745 ar5008_set_cck_weak_signal(struct athn_softc *sc, int high)
2746 {
2747 uint32_t reg;
2748
2749 reg = AR_READ(sc, AR_PHY_CCK_DETECT);
2750 reg = RW(reg, AR_PHY_CCK_DETECT_WEAK_SIG_THR_CCK, high ? 6 : 8);
2751 AR_WRITE(sc, AR_PHY_CCK_DETECT, reg);
2752 AR_WRITE_BARRIER(sc);
2753 }
2754
2755 Static void
2756 ar5008_set_firstep_level(struct athn_softc *sc, int level)
2757 {
2758 uint32_t reg;
2759
2760 reg = AR_READ(sc, AR_PHY_FIND_SIG);
2761 reg = RW(reg, AR_PHY_FIND_SIG_FIRSTEP, level * 4);
2762 AR_WRITE(sc, AR_PHY_FIND_SIG, reg);
2763 AR_WRITE_BARRIER(sc);
2764 }
2765
2766 Static void
2767 ar5008_set_spur_immunity_level(struct athn_softc *sc, int level)
2768 {
2769 uint32_t reg;
2770
2771 reg = AR_READ(sc, AR_PHY_TIMING5);
2772 reg = RW(reg, AR_PHY_TIMING5_CYCPWR_THR1, (level + 1) * 2);
2773 AR_WRITE(sc, AR_PHY_TIMING5, reg);
2774 AR_WRITE_BARRIER(sc);
2775 }
2776