arn5008.c revision 1.3.6.1 1 /* $NetBSD: arn5008.c,v 1.3.6.1 2014/05/18 17:45:37 rmind Exp $ */
2 /* $OpenBSD: ar5008.c,v 1.21 2012/08/25 12:14:31 kettenis Exp $ */
3
4 /*-
5 * Copyright (c) 2009 Damien Bergamini <damien.bergamini (at) free.fr>
6 * Copyright (c) 2008-2009 Atheros Communications Inc.
7 *
8 * Permission to use, copy, modify, and/or distribute this software for any
9 * purpose with or without fee is hereby granted, provided that the above
10 * copyright notice and this permission notice appear in all copies.
11 *
12 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
13 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
14 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
15 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
16 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
17 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
18 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19 */
20
21 /*
22 * Driver for Atheros 802.11a/g/n chipsets.
23 * Routines common to AR5008, AR9001 and AR9002 families.
24 */
25
26 #include <sys/cdefs.h>
27 __KERNEL_RCSID(0, "$NetBSD: arn5008.c,v 1.3.6.1 2014/05/18 17:45:37 rmind Exp $");
28
29 #include <sys/param.h>
30 #include <sys/sockio.h>
31 #include <sys/mbuf.h>
32 #include <sys/kernel.h>
33 #include <sys/socket.h>
34 #include <sys/systm.h>
35 #include <sys/malloc.h>
36 #include <sys/queue.h>
37 #include <sys/conf.h>
38 #include <sys/device.h>
39
40 #include <sys/bus.h>
41 #include <sys/endian.h>
42 #include <sys/intr.h>
43
44 #include <net/bpf.h>
45 #include <net/if.h>
46 #include <net/if_arp.h>
47 #include <net/if_dl.h>
48 #include <net/if_ether.h>
49 #include <net/if_media.h>
50 #include <net/if_types.h>
51
52 #include <netinet/in.h>
53 #include <netinet/in_systm.h>
54 #include <netinet/in_var.h>
55 #include <netinet/ip.h>
56
57 #include <net80211/ieee80211_var.h>
58 #include <net80211/ieee80211_amrr.h>
59 #include <net80211/ieee80211_radiotap.h>
60
61 #include <dev/ic/athnreg.h>
62 #include <dev/ic/athnvar.h>
63
64 #include <dev/ic/arn5008reg.h>
65 #include <dev/ic/arn5008.h>
66 #include <dev/ic/arn5416.h>
67 #include <dev/ic/arn9280.h>
68
69 #define Static static
70
71 Static void ar5008_calib_adc_dc_off(struct athn_softc *);
72 Static void ar5008_calib_adc_gain(struct athn_softc *);
73 Static void ar5008_calib_iq(struct athn_softc *);
74 Static void ar5008_disable_ofdm_weak_signal(struct athn_softc *);
75 Static void ar5008_disable_phy(struct athn_softc *);
76 Static int ar5008_dma_alloc(struct athn_softc *);
77 Static void ar5008_dma_free(struct athn_softc *);
78 Static void ar5008_do_calib(struct athn_softc *);
79 Static void ar5008_do_noisefloor_calib(struct athn_softc *);
80 Static void ar5008_enable_antenna_diversity(struct athn_softc *);
81 Static void ar5008_enable_ofdm_weak_signal(struct athn_softc *);
82 Static uint8_t ar5008_get_vpd(uint8_t, const uint8_t *, const uint8_t *, int);
83 Static void ar5008_gpio_config_input(struct athn_softc *, int);
84 Static void ar5008_gpio_config_output(struct athn_softc *, int, int);
85 Static int ar5008_gpio_read(struct athn_softc *, int);
86 Static void ar5008_gpio_write(struct athn_softc *, int, int);
87 Static void ar5008_hw_init(struct athn_softc *, struct ieee80211_channel *,
88 struct ieee80211_channel *);
89 Static void ar5008_init_baseband(struct athn_softc *);
90 Static void ar5008_init_chains(struct athn_softc *);
91 Static int ar5008_intr(struct athn_softc *);
92 Static void ar5008_next_calib(struct athn_softc *);
93 Static int ar5008_read_eep_word(struct athn_softc *, uint32_t,
94 uint16_t *);
95 Static int ar5008_read_rom(struct athn_softc *);
96 Static void ar5008_rf_bus_release(struct athn_softc *);
97 Static int ar5008_rf_bus_request(struct athn_softc *);
98 Static void ar5008_rfsilent_init(struct athn_softc *);
99 Static int ar5008_rx_alloc(struct athn_softc *);
100 Static void ar5008_rx_enable(struct athn_softc *);
101 Static void ar5008_rx_free(struct athn_softc *);
102 Static void ar5008_rx_intr(struct athn_softc *);
103 Static void ar5008_rx_radiotap(struct athn_softc *, struct mbuf *,
104 struct ar_rx_desc *);
105 Static void ar5008_set_cck_weak_signal(struct athn_softc *, int);
106 Static void ar5008_set_delta_slope(struct athn_softc *,
107 struct ieee80211_channel *, struct ieee80211_channel *);
108 Static void ar5008_set_firstep_level(struct athn_softc *, int);
109 Static void ar5008_set_noise_immunity_level(struct athn_softc *, int);
110 Static void ar5008_set_phy(struct athn_softc *, struct ieee80211_channel *,
111 struct ieee80211_channel *);
112 Static void ar5008_set_rf_mode(struct athn_softc *,
113 struct ieee80211_channel *);
114 Static void ar5008_set_rxchains(struct athn_softc *);
115 Static void ar5008_set_spur_immunity_level(struct athn_softc *, int);
116 Static void ar5008_swap_rom(struct athn_softc *);
117 Static int ar5008_swba_intr(struct athn_softc *);
118 Static int ar5008_tx(struct athn_softc *, struct mbuf *,
119 struct ieee80211_node *, int);
120 Static int ar5008_tx_alloc(struct athn_softc *);
121 Static void ar5008_tx_free(struct athn_softc *);
122 Static void ar5008_tx_intr(struct athn_softc *);
123 Static int ar5008_tx_process(struct athn_softc *, int);
124
125 #ifdef notused
126 Static void ar5008_bb_load_noisefloor(struct athn_softc *);
127 Static void ar5008_get_noisefloor(struct athn_softc *,
128 struct ieee80211_channel *);
129 Static void ar5008_noisefloor_calib(struct athn_softc *);
130 Static void ar5008_read_noisefloor(struct athn_softc *, int16_t *,
131 int16_t *);
132 Static void ar5008_write_noisefloor(struct athn_softc *, int16_t *,
133 int16_t *);
134 #endif /* notused */
135
136 // bf->bf_m = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
137
138 /*
139 * XXX: see if_iwn.c:MCLGETIalt() for a better solution.
140 */
141 static struct mbuf *
142 MCLGETI(struct athn_softc *sc __unused, int how,
143 struct ifnet *ifp __unused, u_int size)
144 {
145 struct mbuf *m;
146
147 MGETHDR(m, how, MT_DATA);
148 if (m == NULL)
149 return NULL;
150
151 MEXTMALLOC(m, size, how);
152 if ((m->m_flags & M_EXT) == 0) {
153 m_freem(m);
154 return NULL;
155 }
156 return m;
157 }
158
159 PUBLIC int
160 ar5008_attach(struct athn_softc *sc)
161 {
162 struct athn_ops *ops = &sc->sc_ops;
163 struct ieee80211com *ic = &sc->sc_ic;
164 struct ar_base_eep_header *base;
165 uint8_t eep_ver, kc_entries_log;
166 int error;
167
168 /* Set callbacks for AR5008, AR9001 and AR9002 families. */
169 ops->gpio_read = ar5008_gpio_read;
170 ops->gpio_write = ar5008_gpio_write;
171 ops->gpio_config_input = ar5008_gpio_config_input;
172 ops->gpio_config_output = ar5008_gpio_config_output;
173 ops->rfsilent_init = ar5008_rfsilent_init;
174
175 ops->dma_alloc = ar5008_dma_alloc;
176 ops->dma_free = ar5008_dma_free;
177 ops->rx_enable = ar5008_rx_enable;
178 ops->intr = ar5008_intr;
179 ops->tx = ar5008_tx;
180
181 ops->set_rf_mode = ar5008_set_rf_mode;
182 ops->rf_bus_request = ar5008_rf_bus_request;
183 ops->rf_bus_release = ar5008_rf_bus_release;
184 ops->set_phy = ar5008_set_phy;
185 ops->set_delta_slope = ar5008_set_delta_slope;
186 ops->enable_antenna_diversity = ar5008_enable_antenna_diversity;
187 ops->init_baseband = ar5008_init_baseband;
188 ops->disable_phy = ar5008_disable_phy;
189 ops->set_rxchains = ar5008_set_rxchains;
190 ops->noisefloor_calib = ar5008_do_noisefloor_calib;
191 ops->do_calib = ar5008_do_calib;
192 ops->next_calib = ar5008_next_calib;
193 ops->hw_init = ar5008_hw_init;
194
195 ops->set_noise_immunity_level = ar5008_set_noise_immunity_level;
196 ops->enable_ofdm_weak_signal = ar5008_enable_ofdm_weak_signal;
197 ops->disable_ofdm_weak_signal = ar5008_disable_ofdm_weak_signal;
198 ops->set_cck_weak_signal = ar5008_set_cck_weak_signal;
199 ops->set_firstep_level = ar5008_set_firstep_level;
200 ops->set_spur_immunity_level = ar5008_set_spur_immunity_level;
201
202 /* Set MAC registers offsets. */
203 sc->sc_obs_off = AR_OBS;
204 sc->sc_gpio_input_en_off = AR_GPIO_INPUT_EN_VAL;
205
206 if (!(sc->sc_flags & ATHN_FLAG_PCIE))
207 athn_config_nonpcie(sc);
208 else
209 athn_config_pcie(sc);
210
211 /* Read entire ROM content in memory. */
212 if ((error = ar5008_read_rom(sc)) != 0) {
213 aprint_error_dev(sc->sc_dev, "could not read ROM\n");
214 return error;
215 }
216
217 /* Get RF revision. */
218 sc->sc_rf_rev = ar5416_get_rf_rev(sc);
219
220 base = sc->sc_eep;
221 eep_ver = (base->version >> 12) & 0xf;
222 sc->sc_eep_rev = (base->version & 0xfff);
223 if (eep_ver != AR_EEP_VER || sc->sc_eep_rev == 0) {
224 aprint_error_dev(sc->sc_dev, "unsupported ROM version %d.%d\n",
225 eep_ver, sc->sc_eep_rev);
226 return EINVAL;
227 }
228
229 if (base->opCapFlags & AR_OPFLAGS_11A)
230 sc->sc_flags |= ATHN_FLAG_11A;
231 if (base->opCapFlags & AR_OPFLAGS_11G)
232 sc->sc_flags |= ATHN_FLAG_11G;
233 if (base->opCapFlags & AR_OPFLAGS_11N)
234 sc->sc_flags |= ATHN_FLAG_11N;
235
236 IEEE80211_ADDR_COPY(ic->ic_myaddr, base->macAddr);
237
238 /* Check if we have a hardware radio switch. */
239 if (base->rfSilent & AR_EEP_RFSILENT_ENABLED) {
240 sc->sc_flags |= ATHN_FLAG_RFSILENT;
241 /* Get GPIO pin used by hardware radio switch. */
242 sc->sc_rfsilent_pin = MS(base->rfSilent,
243 AR_EEP_RFSILENT_GPIO_SEL);
244 /* Get polarity of hardware radio switch. */
245 if (base->rfSilent & AR_EEP_RFSILENT_POLARITY)
246 sc->sc_flags |= ATHN_FLAG_RFSILENT_REVERSED;
247 }
248
249 /* Get the number of HW key cache entries. */
250 kc_entries_log = MS(base->deviceCap, AR_EEP_DEVCAP_KC_ENTRIES);
251 sc->sc_kc_entries = kc_entries_log != 0 ?
252 1 << kc_entries_log : AR_KEYTABLE_SIZE;
253
254 sc->sc_txchainmask = base->txMask;
255 if (sc->sc_mac_ver == AR_SREV_VERSION_5416_PCI &&
256 !(base->opCapFlags & AR_OPFLAGS_11A)) {
257 /* For single-band AR5416 PCI, use GPIO pin 0. */
258 sc->sc_rxchainmask = ar5008_gpio_read(sc, 0) ? 0x5 : 0x7;
259 }
260 else
261 sc->sc_rxchainmask = base->rxMask;
262
263 ops->setup(sc);
264 return 0;
265 }
266
267 /*
268 * Read 16-bit word from ROM.
269 */
270 Static int
271 ar5008_read_eep_word(struct athn_softc *sc, uint32_t addr, uint16_t *val)
272 {
273 uint32_t reg;
274 int ntries;
275
276 reg = AR_READ(sc, AR_EEPROM_OFFSET(addr));
277 for (ntries = 0; ntries < 1000; ntries++) {
278 reg = AR_READ(sc, AR_EEPROM_STATUS_DATA);
279 if (!(reg & (AR_EEPROM_STATUS_DATA_BUSY |
280 AR_EEPROM_STATUS_DATA_PROT_ACCESS))) {
281 *val = MS(reg, AR_EEPROM_STATUS_DATA_VAL);
282 return 0;
283 }
284 DELAY(10);
285 }
286 *val = 0xffff;
287 return ETIMEDOUT;
288 }
289
290 Static int
291 ar5008_read_rom(struct athn_softc *sc)
292 {
293 uint32_t addr, end;
294 uint16_t magic, sum, *eep;
295 int need_swap = 0;
296 int error;
297
298 /* Determine ROM endianness. */
299 error = ar5008_read_eep_word(sc, AR_EEPROM_MAGIC_OFFSET, &magic);
300 if (error != 0)
301 return error;
302 if (magic != AR_EEPROM_MAGIC) {
303 if (magic != bswap16(AR_EEPROM_MAGIC)) {
304 DPRINTFN(DBG_INIT, sc,
305 "invalid ROM magic 0x%x != 0x%x\n",
306 magic, AR_EEPROM_MAGIC);
307 return EIO;
308 }
309 DPRINTFN(DBG_INIT, sc, "non-native ROM endianness\n");
310 need_swap = 1;
311 }
312
313 /* Allocate space to store ROM in host memory. */
314 sc->sc_eep = malloc(sc->sc_eep_size, M_DEVBUF, M_NOWAIT);
315 if (sc->sc_eep == NULL)
316 return ENOMEM;
317
318 /* Read entire ROM and compute checksum. */
319 sum = 0;
320 eep = sc->sc_eep;
321 end = sc->sc_eep_base + sc->sc_eep_size / sizeof(uint16_t);
322 for (addr = sc->sc_eep_base; addr < end; addr++, eep++) {
323 if ((error = ar5008_read_eep_word(sc, addr, eep)) != 0) {
324 DPRINTFN(DBG_INIT, sc,
325 "could not read ROM at 0x%x\n", addr);
326 return error;
327 }
328 if (need_swap)
329 *eep = bswap16(*eep);
330 sum ^= *eep;
331 }
332 if (sum != 0xffff) {
333 aprint_error_dev(sc->sc_dev, "bad ROM checksum 0x%04x\n", sum);
334 return EIO;
335 }
336 if (need_swap)
337 ar5008_swap_rom(sc);
338
339 return 0;
340 }
341
342 Static void
343 ar5008_swap_rom(struct athn_softc *sc)
344 {
345 struct ar_base_eep_header *base = sc->sc_eep;
346
347 /* Swap common fields first. */
348 base->length = bswap16(base->length);
349 base->version = bswap16(base->version);
350 base->regDmn[0] = bswap16(base->regDmn[0]);
351 base->regDmn[1] = bswap16(base->regDmn[1]);
352 base->rfSilent = bswap16(base->rfSilent);
353 base->blueToothOptions = bswap16(base->blueToothOptions);
354 base->deviceCap = bswap16(base->deviceCap);
355
356 /* Swap device-dependent fields. */
357 sc->sc_ops.swap_rom(sc);
358 }
359
360 /*
361 * Access to General Purpose Input/Output ports.
362 */
363 Static int
364 ar5008_gpio_read(struct athn_softc *sc, int pin)
365 {
366
367 KASSERT(pin < sc->sc_ngpiopins);
368 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc))
369 return !((AR_READ(sc, AR7010_GPIO_IN) >> pin) & 1);
370 return (AR_READ(sc, AR_GPIO_IN_OUT) >> (sc->sc_ngpiopins + pin)) & 1;
371 }
372
373 Static void
374 ar5008_gpio_write(struct athn_softc *sc, int pin, int set)
375 {
376 uint32_t reg;
377
378 KASSERT(pin < sc->sc_ngpiopins);
379
380 if (sc->sc_flags & ATHN_FLAG_USB)
381 set = !set; /* AR9271/AR7010 is reversed. */
382
383 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
384 /* Special case for AR7010. */
385 reg = AR_READ(sc, AR7010_GPIO_OUT);
386 if (set)
387 reg |= 1 << pin;
388 else
389 reg &= ~(1 << pin);
390 AR_WRITE(sc, AR7010_GPIO_OUT, reg);
391 }
392 else {
393 reg = AR_READ(sc, AR_GPIO_IN_OUT);
394 if (set)
395 reg |= 1 << pin;
396 else
397 reg &= ~(1 << pin);
398 AR_WRITE(sc, AR_GPIO_IN_OUT, reg);
399 }
400 AR_WRITE_BARRIER(sc);
401 }
402
403 Static void
404 ar5008_gpio_config_input(struct athn_softc *sc, int pin)
405 {
406 uint32_t reg;
407
408 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
409 /* Special case for AR7010. */
410 AR_SETBITS(sc, AR7010_GPIO_OE, 1 << pin);
411 }
412 else {
413 reg = AR_READ(sc, AR_GPIO_OE_OUT);
414 reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2));
415 reg |= AR_GPIO_OE_OUT_DRV_NO << (pin * 2);
416 AR_WRITE(sc, AR_GPIO_OE_OUT, reg);
417 }
418 AR_WRITE_BARRIER(sc);
419 }
420
421 Static void
422 ar5008_gpio_config_output(struct athn_softc *sc, int pin, int type)
423 {
424 uint32_t reg;
425 int mux, off;
426
427 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
428 /* Special case for AR7010. */
429 AR_CLRBITS(sc, AR7010_GPIO_OE, 1 << pin);
430 AR_WRITE_BARRIER(sc);
431 return;
432 }
433 mux = pin / 6;
434 off = pin % 6;
435
436 reg = AR_READ(sc, AR_GPIO_OUTPUT_MUX(mux));
437 if (!AR_SREV_9280_20_OR_LATER(sc) && mux == 0)
438 reg = (reg & ~0x1f0) | (reg & 0x1f0) << 1;
439 reg &= ~(0x1f << (off * 5));
440 reg |= (type & 0x1f) << (off * 5);
441 AR_WRITE(sc, AR_GPIO_OUTPUT_MUX(mux), reg);
442
443 reg = AR_READ(sc, AR_GPIO_OE_OUT);
444 reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2));
445 reg |= AR_GPIO_OE_OUT_DRV_ALL << (pin * 2);
446 AR_WRITE(sc, AR_GPIO_OE_OUT, reg);
447 AR_WRITE_BARRIER(sc);
448 }
449
450 Static void
451 ar5008_rfsilent_init(struct athn_softc *sc)
452 {
453 uint32_t reg;
454
455 /* Configure hardware radio switch. */
456 AR_SETBITS(sc, AR_GPIO_INPUT_EN_VAL, AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
457 reg = AR_READ(sc, AR_GPIO_INPUT_MUX2);
458 reg = RW(reg, AR_GPIO_INPUT_MUX2_RFSILENT, 0);
459 AR_WRITE(sc, AR_GPIO_INPUT_MUX2, reg);
460 ar5008_gpio_config_input(sc, sc->sc_rfsilent_pin);
461 AR_SETBITS(sc, AR_PHY_TEST, AR_PHY_TEST_RFSILENT_BB);
462 if (!(sc->sc_flags & ATHN_FLAG_RFSILENT_REVERSED)) {
463 AR_SETBITS(sc, AR_GPIO_INTR_POL,
464 AR_GPIO_INTR_POL_PIN(sc->sc_rfsilent_pin));
465 }
466 AR_WRITE_BARRIER(sc);
467 }
468
469 Static int
470 ar5008_dma_alloc(struct athn_softc *sc)
471 {
472 int error;
473
474 error = ar5008_tx_alloc(sc);
475 if (error != 0)
476 return error;
477
478 error = ar5008_rx_alloc(sc);
479 if (error != 0)
480 return error;
481
482 return 0;
483 }
484
485 Static void
486 ar5008_dma_free(struct athn_softc *sc)
487 {
488
489 ar5008_tx_free(sc);
490 ar5008_rx_free(sc);
491 }
492
493 Static int
494 ar5008_tx_alloc(struct athn_softc *sc)
495 {
496 struct athn_tx_buf *bf;
497 bus_size_t size;
498 int error, nsegs, i;
499
500 /*
501 * Allocate a pool of Tx descriptors shared between all Tx queues.
502 */
503 size = ATHN_NTXBUFS * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc);
504
505 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,
506 BUS_DMA_NOWAIT, &sc->sc_map);
507 if (error != 0)
508 goto fail;
509
510 error = bus_dmamem_alloc(sc->sc_dmat, size, 4, 0, &sc->sc_seg, 1,
511 // XXX &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO);
512 &nsegs, BUS_DMA_NOWAIT);
513 if (error != 0)
514 goto fail;
515
516 error = bus_dmamem_map(sc->sc_dmat, &sc->sc_seg, 1, size,
517 (void **)&sc->sc_descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT);
518 if (error != 0)
519 goto fail;
520
521 error = bus_dmamap_load_raw(sc->sc_dmat, sc->sc_map, &sc->sc_seg, 1, size,
522 BUS_DMA_NOWAIT);
523 if (error != 0)
524 goto fail;
525
526 SIMPLEQ_INIT(&sc->sc_txbufs);
527 for (i = 0; i < ATHN_NTXBUFS; i++) {
528 bf = &sc->sc_txpool[i];
529
530 error = bus_dmamap_create(sc->sc_dmat, ATHN_TXBUFSZ,
531 AR5008_MAX_SCATTER, ATHN_TXBUFSZ, 0, BUS_DMA_NOWAIT,
532 &bf->bf_map);
533 if (error != 0) {
534 aprint_error_dev(sc->sc_dev,
535 "could not create Tx buf DMA map\n");
536 goto fail;
537 }
538
539 bf->bf_descs =
540 &((struct ar_tx_desc *)sc->sc_descs)[i * AR5008_MAX_SCATTER];
541 bf->bf_daddr = sc->sc_map->dm_segs[0].ds_addr +
542 i * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc);
543
544 SIMPLEQ_INSERT_TAIL(&sc->sc_txbufs, bf, bf_list);
545 }
546 return 0;
547 fail:
548 ar5008_tx_free(sc);
549 return error;
550 }
551
552 Static void
553 ar5008_tx_free(struct athn_softc *sc)
554 {
555 struct athn_tx_buf *bf;
556 int i;
557
558 for (i = 0; i < ATHN_NTXBUFS; i++) {
559 bf = &sc->sc_txpool[i];
560
561 if (bf->bf_map != NULL)
562 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map);
563 }
564 /* Free Tx descriptors. */
565 if (sc->sc_map != NULL) {
566 if (sc->sc_descs != NULL) {
567 bus_dmamap_unload(sc->sc_dmat, sc->sc_map);
568 bus_dmamem_unmap(sc->sc_dmat, (void *)sc->sc_descs,
569 ATHN_NTXBUFS * AR5008_MAX_SCATTER *
570 sizeof(struct ar_tx_desc));
571 bus_dmamem_free(sc->sc_dmat, &sc->sc_seg, 1);
572 }
573 bus_dmamap_destroy(sc->sc_dmat, sc->sc_map);
574 }
575 }
576
577 Static int
578 ar5008_rx_alloc(struct athn_softc *sc)
579 {
580 struct athn_rxq *rxq = &sc->sc_rxq[0];
581 struct athn_rx_buf *bf;
582 struct ar_rx_desc *ds;
583 bus_size_t size;
584 int error, nsegs, i;
585
586 rxq->bf = malloc(ATHN_NRXBUFS * sizeof(*bf), M_DEVBUF,
587 M_NOWAIT | M_ZERO);
588 if (rxq->bf == NULL)
589 return ENOMEM;
590
591 size = ATHN_NRXBUFS * sizeof(struct ar_rx_desc);
592
593 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,
594 BUS_DMA_NOWAIT, &rxq->map);
595 if (error != 0)
596 goto fail;
597
598 error = bus_dmamem_alloc(sc->sc_dmat, size, 0, 0, &rxq->seg, 1,
599 // &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO);
600 &nsegs, BUS_DMA_NOWAIT);
601 if (error != 0)
602 goto fail;
603
604 error = bus_dmamem_map(sc->sc_dmat, &rxq->seg, 1, size,
605 (void **)&rxq->descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT);
606 if (error != 0)
607 goto fail;
608
609 error = bus_dmamap_load_raw(sc->sc_dmat, rxq->map, &rxq->seg, 1,
610 size, BUS_DMA_NOWAIT);
611 if (error != 0)
612 goto fail;
613
614 for (i = 0; i < ATHN_NRXBUFS; i++) {
615 bf = &rxq->bf[i];
616 ds = &((struct ar_rx_desc *)rxq->descs)[i];
617
618 error = bus_dmamap_create(sc->sc_dmat, ATHN_RXBUFSZ, 1,
619 ATHN_RXBUFSZ, 0, BUS_DMA_NOWAIT | BUS_DMA_ALLOCNOW,
620 &bf->bf_map);
621 if (error != 0) {
622 aprint_error_dev(sc->sc_dev,
623 " could not create Rx buf DMA map\n");
624 goto fail;
625 }
626 /*
627 * Assumes MCLGETI returns cache-line-size aligned buffers.
628 * XXX: does ours?
629 */
630 bf->bf_m = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
631 if (bf->bf_m == NULL) {
632 aprint_error_dev(sc->sc_dev,
633 "could not allocate Rx mbuf\n");
634 error = ENOBUFS;
635 goto fail;
636 }
637
638 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,
639 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,
640 BUS_DMA_NOWAIT | BUS_DMA_READ);
641 if (error != 0) {
642 aprint_error_dev(sc->sc_dev,
643 "could not DMA map Rx buffer\n");
644 goto fail;
645 }
646
647 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
648 BUS_DMASYNC_PREREAD);
649
650 bf->bf_desc = ds;
651 bf->bf_daddr = rxq->map->dm_segs[0].ds_addr +
652 i * sizeof(struct ar_rx_desc);
653 }
654 return 0;
655 fail:
656 ar5008_rx_free(sc);
657 return error;
658 }
659
660 Static void
661 ar5008_rx_free(struct athn_softc *sc)
662 {
663 struct athn_rxq *rxq = &sc->sc_rxq[0];
664 struct athn_rx_buf *bf;
665 int i;
666
667 if (rxq->bf == NULL)
668 return;
669 for (i = 0; i < ATHN_NRXBUFS; i++) {
670 bf = &rxq->bf[i];
671
672 if (bf->bf_map != NULL)
673 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map);
674 if (bf->bf_m != NULL)
675 m_freem(bf->bf_m);
676 }
677 free(rxq->bf, M_DEVBUF);
678
679 /* Free Rx descriptors. */
680 if (rxq->map != NULL) {
681 if (rxq->descs != NULL) {
682 bus_dmamap_unload(sc->sc_dmat, rxq->map);
683 bus_dmamem_unmap(sc->sc_dmat, (void *)rxq->descs,
684 ATHN_NRXBUFS * sizeof(struct ar_rx_desc));
685 bus_dmamem_free(sc->sc_dmat, &rxq->seg, 1);
686 }
687 bus_dmamap_destroy(sc->sc_dmat, rxq->map);
688 }
689 }
690
691 Static void
692 ar5008_rx_enable(struct athn_softc *sc)
693 {
694 struct athn_rxq *rxq = &sc->sc_rxq[0];
695 struct athn_rx_buf *bf;
696 struct ar_rx_desc *ds;
697 int i;
698
699 /* Setup and link Rx descriptors. */
700 SIMPLEQ_INIT(&rxq->head);
701 rxq->lastds = NULL;
702 for (i = 0; i < ATHN_NRXBUFS; i++) {
703 bf = &rxq->bf[i];
704 ds = bf->bf_desc;
705
706 memset(ds, 0, sizeof(*ds));
707 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
708 ds->ds_ctl1 = SM(AR_RXC1_BUF_LEN, ATHN_RXBUFSZ);
709
710 if (rxq->lastds != NULL) {
711 ((struct ar_rx_desc *)rxq->lastds)->ds_link =
712 bf->bf_daddr;
713 }
714 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list);
715 rxq->lastds = ds;
716 }
717 bus_dmamap_sync(sc->sc_dmat, rxq->map, 0, rxq->map->dm_mapsize,
718 BUS_DMASYNC_PREREAD);
719
720 /* Enable Rx. */
721 AR_WRITE(sc, AR_RXDP, SIMPLEQ_FIRST(&rxq->head)->bf_daddr);
722 AR_WRITE(sc, AR_CR, AR_CR_RXE);
723 AR_WRITE_BARRIER(sc);
724 }
725
726 Static void
727 ar5008_rx_radiotap(struct athn_softc *sc, struct mbuf *m,
728 struct ar_rx_desc *ds)
729 {
730 struct athn_rx_radiotap_header *tap = &sc->sc_rxtap;
731 struct ieee80211com *ic = &sc->sc_ic;
732 uint64_t tsf;
733 uint32_t tstamp;
734 uint8_t rate;
735
736 /* Extend the 15-bit timestamp from Rx descriptor to 64-bit TSF. */
737 tstamp = ds->ds_status2;
738 tsf = AR_READ(sc, AR_TSF_U32);
739 tsf = tsf << 32 | AR_READ(sc, AR_TSF_L32);
740 if ((tsf & 0x7fff) < tstamp)
741 tsf -= 0x8000;
742 tsf = (tsf & ~0x7fff) | tstamp;
743
744 tap->wr_flags = IEEE80211_RADIOTAP_F_FCS;
745 tap->wr_tsft = htole64(tsf);
746 tap->wr_chan_freq = htole16(ic->ic_curchan->ic_freq);
747 tap->wr_chan_flags = htole16(ic->ic_curchan->ic_flags);
748 tap->wr_dbm_antsignal = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED);
749 /* XXX noise. */
750 tap->wr_antenna = MS(ds->ds_status3, AR_RXS3_ANTENNA);
751 tap->wr_rate = 0; /* In case it can't be found below. */
752 if (AR_SREV_5416_20_OR_LATER(sc))
753 rate = MS(ds->ds_status0, AR_RXS0_RATE);
754 else
755 rate = MS(ds->ds_status3, AR_RXS3_RATE);
756 if (rate & 0x80) { /* HT. */
757 /* Bit 7 set means HT MCS instead of rate. */
758 tap->wr_rate = rate;
759 if (!(ds->ds_status3 & AR_RXS3_GI))
760 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTGI;
761
762 }
763 else if (rate & 0x10) { /* CCK. */
764 if (rate & 0x04)
765 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTPRE;
766 switch (rate & ~0x14) {
767 case 0xb: tap->wr_rate = 2; break;
768 case 0xa: tap->wr_rate = 4; break;
769 case 0x9: tap->wr_rate = 11; break;
770 case 0x8: tap->wr_rate = 22; break;
771 }
772 }
773 else { /* OFDM. */
774 switch (rate) {
775 case 0xb: tap->wr_rate = 12; break;
776 case 0xf: tap->wr_rate = 18; break;
777 case 0xa: tap->wr_rate = 24; break;
778 case 0xe: tap->wr_rate = 36; break;
779 case 0x9: tap->wr_rate = 48; break;
780 case 0xd: tap->wr_rate = 72; break;
781 case 0x8: tap->wr_rate = 96; break;
782 case 0xc: tap->wr_rate = 108; break;
783 }
784 }
785 bpf_mtap2(sc->sc_drvbpf, tap, sc->sc_rxtap_len, m);
786 }
787
788 static __inline int
789 ar5008_rx_process(struct athn_softc *sc)
790 {
791 struct ieee80211com *ic = &sc->sc_ic;
792 struct ifnet *ifp = &sc->sc_if;
793 struct athn_rxq *rxq = &sc->sc_rxq[0];
794 struct athn_rx_buf *bf, *nbf;
795 struct ar_rx_desc *ds;
796 struct ieee80211_frame *wh;
797 struct ieee80211_node *ni;
798 struct mbuf *m, *m1;
799 u_int32_t rstamp;
800 int error, len, rssi;
801
802 bf = SIMPLEQ_FIRST(&rxq->head);
803 if (__predict_false(bf == NULL)) { /* Should not happen. */
804 aprint_error_dev(sc->sc_dev, "Rx queue is empty!\n");
805 return ENOENT;
806 }
807 ds = bf->bf_desc;
808
809 if (!(ds->ds_status8 & AR_RXS8_DONE)) {
810 /*
811 * On some parts, the status words can get corrupted
812 * (including the "done" bit), so we check the next
813 * descriptor "done" bit. If it is set, it is a good
814 * indication that the status words are corrupted, so
815 * we skip this descriptor and drop the frame.
816 */
817 nbf = SIMPLEQ_NEXT(bf, bf_list);
818 if (nbf != NULL &&
819 (((struct ar_rx_desc *)nbf->bf_desc)->ds_status8 &
820 AR_RXS8_DONE)) {
821 DPRINTFN(DBG_RX, sc,
822 "corrupted descriptor status=0x%x\n",
823 ds->ds_status8);
824 /* HW will not "move" RXDP in this case, so do it. */
825 AR_WRITE(sc, AR_RXDP, nbf->bf_daddr);
826 AR_WRITE_BARRIER(sc);
827 ifp->if_ierrors++;
828 goto skip;
829 }
830 return EBUSY;
831 }
832
833 if (__predict_false(ds->ds_status1 & AR_RXS1_MORE)) {
834 /* Drop frames that span multiple Rx descriptors. */
835 DPRINTFN(DBG_RX, sc, "dropping split frame\n");
836 ifp->if_ierrors++;
837 goto skip;
838 }
839 if (!(ds->ds_status8 & AR_RXS8_FRAME_OK)) {
840 if (ds->ds_status8 & AR_RXS8_CRC_ERR)
841 DPRINTFN(DBG_RX, sc, "CRC error\n");
842 else if (ds->ds_status8 & AR_RXS8_PHY_ERR)
843 DPRINTFN(DBG_RX, sc, "PHY error=0x%x\n",
844 MS(ds->ds_status8, AR_RXS8_PHY_ERR_CODE));
845 else if (ds->ds_status8 & AR_RXS8_DECRYPT_CRC_ERR)
846 DPRINTFN(DBG_RX, sc, "Decryption CRC error\n");
847 else if (ds->ds_status8 & AR_RXS8_MICHAEL_ERR) {
848 DPRINTFN(DBG_RX, sc, "Michael MIC failure\n");
849
850 len = MS(ds->ds_status1, AR_RXS1_DATA_LEN);
851 m = bf->bf_m;
852 m->m_pkthdr.rcvif = ifp;
853 m->m_pkthdr.len = m->m_len = len;
854 wh = mtod(m, struct ieee80211_frame *);
855
856 /* Report Michael MIC failures to net80211. */
857 ieee80211_notify_michael_failure(ic, wh, 0 /* XXX: keyix */);
858 }
859 ifp->if_ierrors++;
860 goto skip;
861 }
862
863 len = MS(ds->ds_status1, AR_RXS1_DATA_LEN);
864 if (__predict_false(len < (int)IEEE80211_MIN_LEN || len > ATHN_RXBUFSZ)) {
865 DPRINTFN(DBG_RX, sc, "corrupted descriptor length=%d\n", len);
866 ifp->if_ierrors++;
867 goto skip;
868 }
869
870 /* Allocate a new Rx buffer. */
871 m1 = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
872 if (__predict_false(m1 == NULL)) {
873 ic->ic_stats.is_rx_nobuf++;
874 ifp->if_ierrors++;
875 goto skip;
876 }
877
878 /* Sync and unmap the old Rx buffer. */
879 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
880 BUS_DMASYNC_POSTREAD);
881 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
882
883 /* Map the new Rx buffer. */
884 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map, mtod(m1, void *),
885 ATHN_RXBUFSZ, NULL, BUS_DMA_NOWAIT | BUS_DMA_READ);
886 if (__predict_false(error != 0)) {
887 m_freem(m1);
888
889 /* Remap the old Rx buffer or panic. */
890 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,
891 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,
892 BUS_DMA_NOWAIT | BUS_DMA_READ);
893 KASSERT(error != 0);
894 ifp->if_ierrors++;
895 goto skip;
896 }
897
898 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
899 BUS_DMASYNC_PREREAD);
900
901 /* Write physical address of new Rx buffer. */
902 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
903
904 m = bf->bf_m;
905 bf->bf_m = m1;
906
907 /* Finalize mbuf. */
908 m->m_pkthdr.rcvif = ifp;
909 m->m_pkthdr.len = m->m_len = len;
910
911 /* Grab a reference to the source node. */
912 wh = mtod(m, struct ieee80211_frame *);
913 ni = ieee80211_find_rxnode(ic, (struct ieee80211_frame_min *)wh);
914
915 /* Remove any HW padding after the 802.11 header. */
916 if (!(wh->i_fc[0] & IEEE80211_FC0_TYPE_CTL)) {
917 u_int hdrlen = ieee80211_anyhdrsize(wh);
918 if (hdrlen & 3) {
919 ovbcopy(wh, (uint8_t *)wh + 2, hdrlen);
920 m_adj(m, 2);
921 }
922 }
923 if (__predict_false(sc->sc_drvbpf != NULL))
924 ar5008_rx_radiotap(sc, m, ds);
925
926 /* Trim 802.11 FCS after radiotap. */
927 m_adj(m, -IEEE80211_CRC_LEN);
928
929 /* Send the frame to the 802.11 layer. */
930 rssi = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED);
931 rstamp = ds->ds_status2;
932 ieee80211_input(ic, m, ni, rssi, rstamp);
933
934 /* Node is no longer needed. */
935 ieee80211_free_node(ni);
936
937 skip:
938 /* Unlink this descriptor from head. */
939 SIMPLEQ_REMOVE_HEAD(&rxq->head, bf_list);
940 memset(&ds->ds_status0, 0, 36); /* XXX Really needed? */
941 ds->ds_status8 &= ~AR_RXS8_DONE;
942 ds->ds_link = 0;
943
944 /* Re-use this descriptor and link it to tail. */
945 if (__predict_true(!SIMPLEQ_EMPTY(&rxq->head)))
946 ((struct ar_rx_desc *)rxq->lastds)->ds_link = bf->bf_daddr;
947 else
948 AR_WRITE(sc, AR_RXDP, bf->bf_daddr);
949 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list);
950 rxq->lastds = ds;
951
952 /* Re-enable Rx. */
953 AR_WRITE(sc, AR_CR, AR_CR_RXE);
954 AR_WRITE_BARRIER(sc);
955 return 0;
956 }
957
958 Static void
959 ar5008_rx_intr(struct athn_softc *sc)
960 {
961
962 while (ar5008_rx_process(sc) == 0)
963 continue;
964 }
965
966 Static int
967 ar5008_tx_process(struct athn_softc *sc, int qid)
968 {
969 struct ifnet *ifp = &sc->sc_if;
970 struct athn_txq *txq = &sc->sc_txq[qid];
971 struct athn_node *an;
972 struct athn_tx_buf *bf;
973 struct ar_tx_desc *ds;
974 uint8_t failcnt;
975
976 bf = SIMPLEQ_FIRST(&txq->head);
977 if (bf == NULL)
978 return ENOENT;
979 /* Get descriptor of last DMA segment. */
980 ds = &((struct ar_tx_desc *)bf->bf_descs)[bf->bf_map->dm_nsegs - 1];
981
982 if (!(ds->ds_status9 & AR_TXS9_DONE))
983 return EBUSY;
984
985 SIMPLEQ_REMOVE_HEAD(&txq->head, bf_list);
986 ifp->if_opackets++;
987
988 sc->sc_tx_timer = 0;
989
990 if (ds->ds_status1 & AR_TXS1_EXCESSIVE_RETRIES)
991 ifp->if_oerrors++;
992
993 if (ds->ds_status1 & AR_TXS1_UNDERRUN)
994 athn_inc_tx_trigger_level(sc);
995
996 an = (struct athn_node *)bf->bf_ni;
997 /*
998 * NB: the data fail count contains the number of un-acked tries
999 * for the final series used. We must add the number of tries for
1000 * each series that was fully processed.
1001 */
1002 failcnt = MS(ds->ds_status1, AR_TXS1_DATA_FAIL_CNT);
1003 /* NB: Assume two tries per series. */
1004 failcnt += MS(ds->ds_status9, AR_TXS9_FINAL_IDX) * 2;
1005
1006 /* Update rate control statistics. */
1007 an->amn.amn_txcnt++;
1008 if (failcnt > 0)
1009 an->amn.amn_retrycnt++;
1010
1011 DPRINTFN(DBG_TX, sc, "Tx done qid=%d status1=%d fail count=%d\n",
1012 qid, ds->ds_status1, failcnt);
1013
1014 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1015 BUS_DMASYNC_POSTWRITE);
1016 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
1017
1018 m_freem(bf->bf_m);
1019 bf->bf_m = NULL;
1020 ieee80211_free_node(bf->bf_ni);
1021 bf->bf_ni = NULL;
1022
1023 /* Link Tx buffer back to global free list. */
1024 SIMPLEQ_INSERT_TAIL(&sc->sc_txbufs, bf, bf_list);
1025 return 0;
1026 }
1027
1028 Static void
1029 ar5008_tx_intr(struct athn_softc *sc)
1030 {
1031 struct ifnet *ifp = &sc->sc_if;
1032 uint16_t mask = 0;
1033 uint32_t reg;
1034 int qid;
1035
1036 reg = AR_READ(sc, AR_ISR_S0_S);
1037 mask |= MS(reg, AR_ISR_S0_QCU_TXOK);
1038 mask |= MS(reg, AR_ISR_S0_QCU_TXDESC);
1039
1040 reg = AR_READ(sc, AR_ISR_S1_S);
1041 mask |= MS(reg, AR_ISR_S1_QCU_TXERR);
1042 mask |= MS(reg, AR_ISR_S1_QCU_TXEOL);
1043
1044 DPRINTFN(DBG_TX, sc, "Tx interrupt mask=0x%x\n", mask);
1045 for (qid = 0; mask != 0; mask >>= 1, qid++) {
1046 if (mask & 1)
1047 while (ar5008_tx_process(sc, qid) == 0);
1048 }
1049 if (!SIMPLEQ_EMPTY(&sc->sc_txbufs)) {
1050 ifp->if_flags &= ~IFF_OACTIVE;
1051 ifp->if_start(ifp);
1052 }
1053 }
1054
1055 #ifndef IEEE80211_STA_ONLY
1056 /*
1057 * Process Software Beacon Alert interrupts.
1058 */
1059 Static int
1060 ar5008_swba_intr(struct athn_softc *sc)
1061 {
1062 struct ieee80211com *ic = &sc->sc_ic;
1063 struct ifnet *ifp = &sc->sc_if;
1064 struct ieee80211_node *ni = ic->ic_bss;
1065 struct athn_tx_buf *bf = sc->sc_bcnbuf;
1066 struct ieee80211_frame *wh;
1067 struct ieee80211_beacon_offsets bo;
1068 struct ar_tx_desc *ds;
1069 struct mbuf *m;
1070 uint8_t ridx, hwrate;
1071 int error, totlen;
1072
1073 #if notyet
1074 if (ic->ic_tim_mcast_pending &&
1075 IF_IS_EMPTY(&ni->ni_savedq) &&
1076 SIMPLEQ_EMPTY(&sc->sc_txq[ATHN_QID_CAB].head))
1077 ic->ic_tim_mcast_pending = 0;
1078 #endif
1079 if (ic->ic_dtim_count == 0)
1080 ic->ic_dtim_count = ic->ic_dtim_period - 1;
1081 else
1082 ic->ic_dtim_count--;
1083
1084 /* Make sure previous beacon has been sent. */
1085 if (athn_tx_pending(sc, ATHN_QID_BEACON)) {
1086 DPRINTFN(DBG_INTR, sc, "beacon stuck\n");
1087 return EBUSY;
1088 }
1089 /* Get new beacon. */
1090 m = ieee80211_beacon_alloc(ic, ic->ic_bss, &bo);
1091 if (__predict_false(m == NULL))
1092 return ENOBUFS;
1093 /* Assign sequence number. */
1094 /* XXX: use non-QoS tid? */
1095 wh = mtod(m, struct ieee80211_frame *);
1096 *(uint16_t *)&wh->i_seq[0] =
1097 htole16(ic->ic_bss->ni_txseqs[0] << IEEE80211_SEQ_SEQ_SHIFT);
1098 ic->ic_bss->ni_txseqs[0]++;
1099
1100 /* Unmap and free old beacon if any. */
1101 if (__predict_true(bf->bf_m != NULL)) {
1102 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0,
1103 bf->bf_map->dm_mapsize, BUS_DMASYNC_POSTWRITE);
1104 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
1105 m_freem(bf->bf_m);
1106 bf->bf_m = NULL;
1107 }
1108 /* DMA map new beacon. */
1109 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1110 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1111 if (__predict_false(error != 0)) {
1112 m_freem(m);
1113 return error;
1114 }
1115 bf->bf_m = m;
1116
1117 /* Setup Tx descriptor (simplified ar5008_tx()). */
1118 ds = bf->bf_descs;
1119 memset(ds, 0, sizeof(*ds));
1120
1121 totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN;
1122 ds->ds_ctl0 = SM(AR_TXC0_FRAME_LEN, totlen);
1123 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, AR_MAX_RATE_POWER);
1124 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, AR_FRAME_TYPE_BEACON);
1125 ds->ds_ctl1 |= AR_TXC1_NO_ACK;
1126 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, AR_ENCR_TYPE_CLEAR);
1127
1128 /* Write number of tries. */
1129 ds->ds_ctl2 = SM(AR_TXC2_XMIT_DATA_TRIES0, 1);
1130
1131 /* Write Tx rate. */
1132 ridx = (ic->ic_curmode == IEEE80211_MODE_11A) ?
1133 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1;
1134 hwrate = athn_rates[ridx].hwrate;
1135 ds->ds_ctl3 = SM(AR_TXC3_XMIT_RATE0, hwrate);
1136
1137 /* Write Tx chains. */
1138 ds->ds_ctl7 = SM(AR_TXC7_CHAIN_SEL0, sc->sc_txchainmask);
1139
1140 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
1141 /* Segment length must be a multiple of 4. */
1142 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,
1143 (bf->bf_map->dm_segs[0].ds_len + 3) & ~3);
1144
1145 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1146 BUS_DMASYNC_PREWRITE);
1147
1148 /* Stop Tx DMA before putting the new beacon on the queue. */
1149 athn_stop_tx_dma(sc, ATHN_QID_BEACON);
1150
1151 AR_WRITE(sc, AR_QTXDP(ATHN_QID_BEACON), bf->bf_daddr);
1152
1153 for(;;) {
1154 if (SIMPLEQ_EMPTY(&sc->sc_txbufs))
1155 break;
1156
1157 IF_DEQUEUE(&ni->ni_savedq, m);
1158 if (m == NULL)
1159 break;
1160 if (!IF_IS_EMPTY(&ni->ni_savedq)) {
1161 /* more queued frames, set the more data bit */
1162 wh = mtod(m, struct ieee80211_frame *);
1163 wh->i_fc[1] |= IEEE80211_FC1_MORE_DATA;
1164 }
1165
1166 if (sc->sc_ops.tx(sc, m, ni, ATHN_TXFLAG_CAB) != 0) {
1167 ieee80211_free_node(ni);
1168 ifp->if_oerrors++;
1169 break;
1170 }
1171 }
1172
1173 /* Kick Tx. */
1174 AR_WRITE(sc, AR_Q_TXE, 1 << ATHN_QID_BEACON);
1175 AR_WRITE_BARRIER(sc);
1176 return 0;
1177 }
1178 #endif
1179
1180 Static int
1181 ar5008_intr(struct athn_softc *sc)
1182 {
1183 uint32_t intr, intr5, sync;
1184
1185 /* Get pending interrupts. */
1186 intr = AR_READ(sc, AR_INTR_ASYNC_CAUSE);
1187 if (!(intr & AR_INTR_MAC_IRQ) || intr == AR_INTR_SPURIOUS) {
1188 intr = AR_READ(sc, AR_INTR_SYNC_CAUSE);
1189 if (intr == AR_INTR_SPURIOUS || (intr & sc->sc_isync) == 0)
1190 return 0; /* Not for us. */
1191 }
1192
1193 if ((AR_READ(sc, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) &&
1194 (AR_READ(sc, AR_RTC_STATUS) & AR_RTC_STATUS_M) == AR_RTC_STATUS_ON)
1195 intr = AR_READ(sc, AR_ISR);
1196 else
1197 intr = 0;
1198 sync = AR_READ(sc, AR_INTR_SYNC_CAUSE) & sc->sc_isync;
1199 if (intr == 0 && sync == 0)
1200 return 0; /* Not for us. */
1201
1202 if (intr != 0) {
1203 if (intr & AR_ISR_BCNMISC) {
1204 uint32_t intr2 = AR_READ(sc, AR_ISR_S2);
1205 #if notyet
1206 if (intr2 & AR_ISR_S2_TIM)
1207 /* TBD */;
1208 if (intr2 & AR_ISR_S2_TSFOOR)
1209 /* TBD */;
1210 #else
1211 __USE(intr2);
1212 #endif
1213 }
1214 intr = AR_READ(sc, AR_ISR_RAC);
1215 if (intr == AR_INTR_SPURIOUS)
1216 return 1;
1217
1218 #ifndef IEEE80211_STA_ONLY
1219 if (intr & AR_ISR_SWBA)
1220 ar5008_swba_intr(sc);
1221 #endif
1222 if (intr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
1223 ar5008_rx_intr(sc);
1224 if (intr & (AR_ISR_RXOK | AR_ISR_RXERR | AR_ISR_RXORN))
1225 ar5008_rx_intr(sc);
1226
1227 if (intr & (AR_ISR_TXOK | AR_ISR_TXDESC |
1228 AR_ISR_TXERR | AR_ISR_TXEOL))
1229 ar5008_tx_intr(sc);
1230
1231 intr5 = AR_READ(sc, AR_ISR_S5_S);
1232 if (intr & AR_ISR_GENTMR) {
1233 if (intr5 & AR_ISR_GENTMR) {
1234 DPRINTFN(DBG_INTR, sc,
1235 "GENTMR trigger=%d thresh=%d\n",
1236 MS(intr5, AR_ISR_S5_GENTIMER_TRIG),
1237 MS(intr5, AR_ISR_S5_GENTIMER_THRESH));
1238 }
1239 }
1240 #if notyet
1241 if (intr5 & AR_ISR_S5_TIM_TIMER) {
1242 /* TBD */;
1243 }
1244 #endif
1245 }
1246 if (sync != 0) {
1247 #if notyet
1248 if (sync &
1249 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR)) {
1250 /* TBD */;
1251 }
1252 #endif
1253 if (sync & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
1254 AR_WRITE(sc, AR_RC, AR_RC_HOSTIF);
1255 AR_WRITE(sc, AR_RC, 0);
1256 }
1257
1258 if ((sc->sc_flags & ATHN_FLAG_RFSILENT) &&
1259 (sync & AR_INTR_SYNC_GPIO_PIN(sc->sc_rfsilent_pin))) {
1260 pmf_event_inject(sc->sc_dev, PMFE_RADIO_OFF);
1261 return 1;
1262 }
1263
1264 AR_WRITE(sc, AR_INTR_SYNC_CAUSE, sync);
1265 (void)AR_READ(sc, AR_INTR_SYNC_CAUSE);
1266 }
1267 return 1;
1268 }
1269
1270 Static int
1271 ar5008_tx(struct athn_softc *sc, struct mbuf *m, struct ieee80211_node *ni,
1272 int txflags)
1273 {
1274 struct ieee80211com *ic = &sc->sc_ic;
1275 struct ieee80211_key *k = NULL;
1276 struct ieee80211_frame *wh;
1277 struct athn_series series[4];
1278 struct ar_tx_desc *ds, *lastds;
1279 struct athn_txq *txq;
1280 struct athn_tx_buf *bf;
1281 struct athn_node *an = (void *)ni;
1282 struct mbuf *m1;
1283 uint16_t qos;
1284 uint8_t txpower, type, encrtype, ridx[4];
1285 int i, error, totlen, hasqos, qid;
1286
1287 /* Grab a Tx buffer from our global free list. */
1288 bf = SIMPLEQ_FIRST(&sc->sc_txbufs);
1289 KASSERT(bf != NULL);
1290
1291 /* Map 802.11 frame type to hardware frame type. */
1292 wh = mtod(m, struct ieee80211_frame *);
1293 if ((wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) ==
1294 IEEE80211_FC0_TYPE_MGT) {
1295 /* NB: Beacons do not use ar5008_tx(). */
1296 if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) ==
1297 IEEE80211_FC0_SUBTYPE_PROBE_RESP)
1298 type = AR_FRAME_TYPE_PROBE_RESP;
1299 else if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) ==
1300 IEEE80211_FC0_SUBTYPE_ATIM)
1301 type = AR_FRAME_TYPE_ATIM;
1302 else
1303 type = AR_FRAME_TYPE_NORMAL;
1304 }
1305 else if ((wh->i_fc[0] &
1306 (IEEE80211_FC0_TYPE_MASK | IEEE80211_FC0_SUBTYPE_MASK)) ==
1307 (IEEE80211_FC0_TYPE_CTL | IEEE80211_FC0_SUBTYPE_PS_POLL)) {
1308 type = AR_FRAME_TYPE_PSPOLL;
1309 }
1310 else
1311 type = AR_FRAME_TYPE_NORMAL;
1312
1313 if (wh->i_fc[1] & IEEE80211_FC1_PROTECTED) {
1314 k = ieee80211_crypto_encap(ic, ni, m);
1315 if (k == NULL)
1316 return ENOBUFS;
1317
1318 /* packet header may have moved, reset our local pointer */
1319 wh = mtod(m, struct ieee80211_frame *);
1320 }
1321
1322 /* XXX 2-byte padding for QoS and 4-addr headers. */
1323
1324 /* Select the HW Tx queue to use for this frame. */
1325 if ((hasqos = ieee80211_has_qos(wh))) {
1326 #ifdef notyet_edca
1327 uint8_t tid;
1328
1329 qos = ieee80211_get_qos(wh);
1330 tid = qos & IEEE80211_QOS_TID;
1331 qid = athn_ac2qid[ieee80211_up_to_ac(ic, tid)];
1332 #else
1333 qos = ieee80211_get_qos(wh);
1334 qid = ATHN_QID_AC_BE;
1335 #endif /* notyet_edca */
1336 }
1337 else if (type == AR_FRAME_TYPE_PSPOLL) {
1338 qos = 0;
1339 qid = ATHN_QID_PSPOLL;
1340 }
1341 else if (txflags & ATHN_TXFLAG_CAB) {
1342 qos = 0;
1343 qid = ATHN_QID_CAB;
1344 }
1345 else {
1346 qos = 0;
1347 qid = ATHN_QID_AC_BE;
1348 }
1349 txq = &sc->sc_txq[qid];
1350
1351 /* Select the transmit rates to use for this frame. */
1352 if (IEEE80211_IS_MULTICAST(wh->i_addr1) ||
1353 (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) !=
1354 IEEE80211_FC0_TYPE_DATA) {
1355 /* Use lowest rate for all tries. */
1356 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1357 (ic->ic_curmode == IEEE80211_MODE_11A) ?
1358 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1;
1359 }
1360 else if (ic->ic_fixed_rate != -1) {
1361 /* Use same fixed rate for all tries. */
1362 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1363 sc->sc_fixed_ridx;
1364 }
1365 else {
1366 int txrate = ni->ni_txrate;
1367 /* Use fallback table of the node. */
1368 for (i = 0; i < 4; i++) {
1369 ridx[i] = an->ridx[txrate];
1370 txrate = an->fallback[txrate];
1371 }
1372 }
1373
1374 if (__predict_false(sc->sc_drvbpf != NULL)) {
1375 struct athn_tx_radiotap_header *tap = &sc->sc_txtap;
1376
1377 tap->wt_flags = 0;
1378 /* Use initial transmit rate. */
1379 tap->wt_rate = athn_rates[ridx[0]].rate;
1380 tap->wt_chan_freq = htole16(ic->ic_curchan->ic_freq);
1381 tap->wt_chan_flags = htole16(ic->ic_curchan->ic_flags);
1382 // XXX tap->wt_hwqueue = qid;
1383 if (ridx[0] != ATHN_RIDX_CCK1 &&
1384 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1385 tap->wt_flags |= IEEE80211_RADIOTAP_F_SHORTPRE;
1386
1387 bpf_mtap2(sc->sc_drvbpf, tap, sc->sc_txtap_len, m);
1388 }
1389
1390 /* DMA map mbuf. */
1391 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1392 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1393 if (__predict_false(error != 0)) {
1394 if (error != EFBIG) {
1395 aprint_error_dev(sc->sc_dev,
1396 "can't map mbuf (error %d)\n", error);
1397 m_freem(m);
1398 return error;
1399 }
1400 /*
1401 * DMA mapping requires too many DMA segments; linearize
1402 * mbuf in kernel virtual address space and retry.
1403 */
1404 MGETHDR(m1, M_DONTWAIT, MT_DATA);
1405 if (m1 == NULL) {
1406 m_freem(m);
1407 return ENOBUFS;
1408 }
1409 if (m->m_pkthdr.len > (int)MHLEN) {
1410 MCLGET(m1, M_DONTWAIT);
1411 if (!(m1->m_flags & M_EXT)) {
1412 m_freem(m);
1413 m_freem(m1);
1414 return ENOBUFS;
1415 }
1416 }
1417 m_copydata(m, 0, m->m_pkthdr.len, mtod(m1, void *));
1418 m1->m_pkthdr.len = m1->m_len = m->m_pkthdr.len;
1419 m_freem(m);
1420 m = m1;
1421
1422 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1423 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1424 if (error != 0) {
1425 aprint_error_dev(sc->sc_dev,
1426 "can't map mbuf (error %d)\n", error);
1427 m_freem(m);
1428 return error;
1429 }
1430 }
1431 bf->bf_m = m;
1432 bf->bf_ni = ni;
1433 bf->bf_txflags = txflags;
1434
1435 wh = mtod(m, struct ieee80211_frame *);
1436
1437 totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN;
1438
1439 /* Clear all Tx descriptors that we will use. */
1440 memset(bf->bf_descs, 0, bf->bf_map->dm_nsegs * sizeof(*ds));
1441
1442 /* Setup first Tx descriptor. */
1443 ds = bf->bf_descs;
1444
1445 ds->ds_ctl0 = AR_TXC0_INTR_REQ | AR_TXC0_CLR_DEST_MASK;
1446 txpower = AR_MAX_RATE_POWER; /* Get from per-rate registers. */
1447 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, txpower);
1448
1449 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, type);
1450
1451 if (IEEE80211_IS_MULTICAST(wh->i_addr1) ||
1452 (hasqos && (qos & IEEE80211_QOS_ACKPOLICY_MASK) ==
1453 IEEE80211_QOS_ACKPOLICY_NOACK))
1454 ds->ds_ctl1 |= AR_TXC1_NO_ACK;
1455 #if notyet
1456 if (0 && k != NULL) {
1457 uintptr_t entry;
1458
1459 /*
1460 * Map 802.11 cipher to hardware encryption type and
1461 * compute MIC+ICV overhead.
1462 */
1463 totlen += k->wk_keylen;
1464 switch (k->wk_cipher->ic_cipher) {
1465 case IEEE80211_CIPHER_WEP:
1466 encrtype = AR_ENCR_TYPE_WEP;
1467 break;
1468 case IEEE80211_CIPHER_TKIP:
1469 encrtype = AR_ENCR_TYPE_TKIP;
1470 break;
1471 case IEEE80211_CIPHER_AES_OCB:
1472 case IEEE80211_CIPHER_AES_CCM:
1473 encrtype = AR_ENCR_TYPE_AES;
1474 break;
1475 default:
1476 panic("unsupported cipher");
1477 }
1478 /*
1479 * NB: The key cache entry index is stored in the key
1480 * private field when the key is installed.
1481 */
1482 entry = (uintptr_t)k->k_priv;
1483 ds->ds_ctl1 |= SM(AR_TXC1_DEST_IDX, entry);
1484 ds->ds_ctl0 |= AR_TXC0_DEST_IDX_VALID;
1485 }
1486 else
1487 #endif
1488 encrtype = AR_ENCR_TYPE_CLEAR;
1489 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, encrtype);
1490
1491 /* Check if frame must be protected using RTS/CTS or CTS-to-self. */
1492 if (!IEEE80211_IS_MULTICAST(wh->i_addr1)) {
1493 /* NB: Group frames are sent using CCK in 802.11b/g. */
1494 if (totlen > ic->ic_rtsthreshold) {
1495 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE;
1496 }
1497 else if ((ic->ic_flags & IEEE80211_F_USEPROT) &&
1498 athn_rates[ridx[0]].phy == IEEE80211_T_OFDM) {
1499 if (ic->ic_protmode == IEEE80211_PROT_RTSCTS)
1500 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE;
1501 else if (ic->ic_protmode == IEEE80211_PROT_CTSONLY)
1502 ds->ds_ctl0 |= AR_TXC0_CTS_ENABLE;
1503 }
1504 }
1505 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) {
1506 /* Disable multi-rate retries when protection is used. */
1507 ridx[1] = ridx[2] = ridx[3] = ridx[0];
1508 }
1509 /* Setup multi-rate retries. */
1510 for (i = 0; i < 4; i++) {
1511 series[i].hwrate = athn_rates[ridx[i]].hwrate;
1512 if (athn_rates[ridx[i]].phy == IEEE80211_T_DS &&
1513 ridx[i] != ATHN_RIDX_CCK1 &&
1514 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1515 series[i].hwrate |= 0x04;
1516 series[i].dur = 0;
1517 }
1518 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) {
1519 /* Compute duration for each series. */
1520 for (i = 0; i < 4; i++) {
1521 series[i].dur = athn_txtime(sc, IEEE80211_ACK_LEN,
1522 athn_rates[ridx[i]].rspridx, ic->ic_flags);
1523 }
1524 }
1525
1526 /* Write number of tries for each series. */
1527 ds->ds_ctl2 =
1528 SM(AR_TXC2_XMIT_DATA_TRIES0, 2) |
1529 SM(AR_TXC2_XMIT_DATA_TRIES1, 2) |
1530 SM(AR_TXC2_XMIT_DATA_TRIES2, 2) |
1531 SM(AR_TXC2_XMIT_DATA_TRIES3, 4);
1532
1533 /* Tell HW to update duration field in 802.11 header. */
1534 if (type != AR_FRAME_TYPE_PSPOLL)
1535 ds->ds_ctl2 |= AR_TXC2_DUR_UPDATE_ENA;
1536
1537 /* Write Tx rate for each series. */
1538 ds->ds_ctl3 =
1539 SM(AR_TXC3_XMIT_RATE0, series[0].hwrate) |
1540 SM(AR_TXC3_XMIT_RATE1, series[1].hwrate) |
1541 SM(AR_TXC3_XMIT_RATE2, series[2].hwrate) |
1542 SM(AR_TXC3_XMIT_RATE3, series[3].hwrate);
1543
1544 /* Write duration for each series. */
1545 ds->ds_ctl4 =
1546 SM(AR_TXC4_PACKET_DUR0, series[0].dur) |
1547 SM(AR_TXC4_PACKET_DUR1, series[1].dur);
1548 ds->ds_ctl5 =
1549 SM(AR_TXC5_PACKET_DUR2, series[2].dur) |
1550 SM(AR_TXC5_PACKET_DUR3, series[3].dur);
1551
1552 /* Use the same Tx chains for all tries. */
1553 ds->ds_ctl7 =
1554 SM(AR_TXC7_CHAIN_SEL0, sc->sc_txchainmask) |
1555 SM(AR_TXC7_CHAIN_SEL1, sc->sc_txchainmask) |
1556 SM(AR_TXC7_CHAIN_SEL2, sc->sc_txchainmask) |
1557 SM(AR_TXC7_CHAIN_SEL3, sc->sc_txchainmask);
1558 #ifdef notyet
1559 #ifndef IEEE80211_NO_HT
1560 /* Use the same short GI setting for all tries. */
1561 if (ic->ic_flags & IEEE80211_F_SHGI)
1562 ds->ds_ctl7 |= AR_TXC7_GI0123;
1563 /* Use the same channel width for all tries. */
1564 if (ic->ic_flags & IEEE80211_F_CBW40)
1565 ds->ds_ctl7 |= AR_TXC7_2040_0123;
1566 #endif
1567 #endif
1568
1569 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) {
1570 uint8_t protridx, hwrate;
1571 uint16_t dur = 0;
1572
1573 /* Use the same protection mode for all tries. */
1574 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) {
1575 ds->ds_ctl4 |= AR_TXC4_RTSCTS_QUAL01;
1576 ds->ds_ctl5 |= AR_TXC5_RTSCTS_QUAL23;
1577 }
1578 /* Select protection rate (suboptimal but ok). */
1579 protridx = (ic->ic_curmode == IEEE80211_MODE_11A) ?
1580 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK2;
1581 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) {
1582 /* Account for CTS duration. */
1583 dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1584 athn_rates[protridx].rspridx, ic->ic_flags);
1585 }
1586 dur += athn_txtime(sc, totlen, ridx[0], ic->ic_flags);
1587 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) {
1588 /* Account for ACK duration. */
1589 dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1590 athn_rates[ridx[0]].rspridx, ic->ic_flags);
1591 }
1592 /* Write protection frame duration and rate. */
1593 ds->ds_ctl2 |= SM(AR_TXC2_BURST_DUR, dur);
1594 hwrate = athn_rates[protridx].hwrate;
1595 if (protridx == ATHN_RIDX_CCK2 &&
1596 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1597 hwrate |= 0x04;
1598 ds->ds_ctl7 |= SM(AR_TXC7_RTSCTS_RATE, hwrate);
1599 }
1600
1601 /* Finalize first Tx descriptor and fill others (if any). */
1602 ds->ds_ctl0 |= SM(AR_TXC0_FRAME_LEN, totlen);
1603
1604 lastds = NULL; /* XXX: gcc */
1605 for (i = 0; i < bf->bf_map->dm_nsegs; i++, ds++) {
1606 ds->ds_data = bf->bf_map->dm_segs[i].ds_addr;
1607 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,
1608 bf->bf_map->dm_segs[i].ds_len);
1609
1610 if (i != bf->bf_map->dm_nsegs - 1)
1611 ds->ds_ctl1 |= AR_TXC1_MORE;
1612 ds->ds_link = 0;
1613
1614 /* Chain Tx descriptor. */
1615 if (i != 0)
1616 lastds->ds_link = bf->bf_daddr + i * sizeof(*ds);
1617 lastds = ds;
1618 }
1619 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1620 BUS_DMASYNC_PREWRITE);
1621
1622 if (!SIMPLEQ_EMPTY(&txq->head))
1623 ((struct ar_tx_desc *)txq->lastds)->ds_link = bf->bf_daddr;
1624 else
1625 AR_WRITE(sc, AR_QTXDP(qid), bf->bf_daddr);
1626 txq->lastds = lastds;
1627 SIMPLEQ_REMOVE_HEAD(&sc->sc_txbufs, bf_list);
1628 SIMPLEQ_INSERT_TAIL(&txq->head, bf, bf_list);
1629
1630 ds = bf->bf_descs;
1631 DPRINTFN(DBG_TX, sc,
1632 "Tx qid=%d nsegs=%d ctl0=0x%x ctl1=0x%x ctl3=0x%x\n",
1633 qid, bf->bf_map->dm_nsegs, ds->ds_ctl0, ds->ds_ctl1, ds->ds_ctl3);
1634
1635 /* Kick Tx. */
1636 AR_WRITE(sc, AR_Q_TXE, 1 << qid);
1637 AR_WRITE_BARRIER(sc);
1638 return 0;
1639 }
1640
1641 Static void
1642 ar5008_set_rf_mode(struct athn_softc *sc, struct ieee80211_channel *c)
1643 {
1644 uint32_t reg;
1645
1646 reg = IEEE80211_IS_CHAN_2GHZ(c) ?
1647 AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1648 if (!AR_SREV_9280_10_OR_LATER(sc)) {
1649 reg |= IEEE80211_IS_CHAN_2GHZ(c) ?
1650 AR_PHY_MODE_RF2GHZ : AR_PHY_MODE_RF5GHZ;
1651 }
1652 else if (IEEE80211_IS_CHAN_5GHZ(c) &&
1653 (sc->sc_flags & ATHN_FLAG_FAST_PLL_CLOCK)) {
1654 reg |= AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE;
1655 }
1656 AR_WRITE(sc, AR_PHY_MODE, reg);
1657 AR_WRITE_BARRIER(sc);
1658 }
1659
1660 static __inline uint32_t
1661 ar5008_synth_delay(struct athn_softc *sc)
1662 {
1663 uint32_t synth_delay;
1664
1665 synth_delay = MS(AR_READ(sc, AR_PHY_RX_DELAY), AR_PHY_RX_DELAY_DELAY);
1666 if (sc->sc_ic.ic_curmode == IEEE80211_MODE_11B)
1667 synth_delay = (synth_delay * 4) / 22;
1668 else
1669 synth_delay = synth_delay / 10; /* in 100ns steps */
1670 return synth_delay;
1671 }
1672
1673 Static int
1674 ar5008_rf_bus_request(struct athn_softc *sc)
1675 {
1676 int ntries;
1677
1678 /* Request RF Bus grant. */
1679 AR_WRITE(sc, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1680 for (ntries = 0; ntries < 10000; ntries++) {
1681 if (AR_READ(sc, AR_PHY_RFBUS_GRANT) & AR_PHY_RFBUS_GRANT_EN)
1682 return 0;
1683 DELAY(10);
1684 }
1685 DPRINTFN(DBG_RF, sc, "could not kill baseband Rx");
1686 return ETIMEDOUT;
1687 }
1688
1689 Static void
1690 ar5008_rf_bus_release(struct athn_softc *sc)
1691 {
1692
1693 /* Wait for the synthesizer to settle. */
1694 DELAY(AR_BASE_PHY_ACTIVE_DELAY + ar5008_synth_delay(sc));
1695
1696 /* Release the RF Bus grant. */
1697 AR_WRITE(sc, AR_PHY_RFBUS_REQ, 0);
1698 AR_WRITE_BARRIER(sc);
1699 }
1700
1701 Static void
1702 ar5008_set_phy(struct athn_softc *sc, struct ieee80211_channel *c,
1703 struct ieee80211_channel *extc)
1704 {
1705 uint32_t phy;
1706
1707 if (AR_SREV_9285_10_OR_LATER(sc))
1708 phy = AR_READ(sc, AR_PHY_TURBO) & AR_PHY_FC_ENABLE_DAC_FIFO;
1709 else
1710 phy = 0;
1711 phy |= AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40 |
1712 AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH;
1713 #ifndef IEEE80211_NO_HT
1714 if (extc != NULL) {
1715 phy |= AR_PHY_FC_DYN2040_EN;
1716 if (extc > c) /* XXX */
1717 phy |= AR_PHY_FC_DYN2040_PRI_CH;
1718 }
1719 #endif
1720 AR_WRITE(sc, AR_PHY_TURBO, phy);
1721
1722 AR_WRITE(sc, AR_2040_MODE,
1723 (extc != NULL) ? AR_2040_JOINED_RX_CLEAR : 0);
1724
1725 /* Set global transmit timeout. */
1726 AR_WRITE(sc, AR_GTXTO, SM(AR_GTXTO_TIMEOUT_LIMIT, 25));
1727 /* Set carrier sense timeout. */
1728 AR_WRITE(sc, AR_CST, SM(AR_CST_TIMEOUT_LIMIT, 15));
1729 AR_WRITE_BARRIER(sc);
1730 }
1731
1732 Static void
1733 ar5008_set_delta_slope(struct athn_softc *sc, struct ieee80211_channel *c,
1734 struct ieee80211_channel *extc)
1735 {
1736 uint32_t coeff, exp, man, reg;
1737
1738 /* Set Delta Slope (exponent and mantissa). */
1739 coeff = (100 << 24) / c->ic_freq;
1740 athn_get_delta_slope(coeff, &exp, &man);
1741 DPRINTFN(DBG_RX, sc, "delta slope coeff exp=%u man=%u\n", exp, man);
1742
1743 reg = AR_READ(sc, AR_PHY_TIMING3);
1744 reg = RW(reg, AR_PHY_TIMING3_DSC_EXP, exp);
1745 reg = RW(reg, AR_PHY_TIMING3_DSC_MAN, man);
1746 AR_WRITE(sc, AR_PHY_TIMING3, reg);
1747
1748 /* For Short GI, coeff is 9/10 that of normal coeff. */
1749 coeff = (9 * coeff) / 10;
1750 athn_get_delta_slope(coeff, &exp, &man);
1751 DPRINTFN(DBG_RX, sc, "delta slope coeff exp=%u man=%u\n", exp, man);
1752
1753 reg = AR_READ(sc, AR_PHY_HALFGI);
1754 reg = RW(reg, AR_PHY_HALFGI_DSC_EXP, exp);
1755 reg = RW(reg, AR_PHY_HALFGI_DSC_MAN, man);
1756 AR_WRITE(sc, AR_PHY_HALFGI, reg);
1757 AR_WRITE_BARRIER(sc);
1758 }
1759
1760 Static void
1761 ar5008_enable_antenna_diversity(struct athn_softc *sc)
1762 {
1763
1764 AR_SETBITS(sc, AR_PHY_CCK_DETECT,
1765 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV);
1766 AR_WRITE_BARRIER(sc);
1767 }
1768
1769 Static void
1770 ar5008_init_baseband(struct athn_softc *sc)
1771 {
1772 uint32_t synth_delay;
1773
1774 synth_delay = ar5008_synth_delay(sc);
1775 /* Activate the PHY (includes baseband activate and synthesizer on). */
1776 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1777 AR_WRITE_BARRIER(sc);
1778 DELAY(AR_BASE_PHY_ACTIVE_DELAY + synth_delay);
1779 }
1780
1781 Static void
1782 ar5008_disable_phy(struct athn_softc *sc)
1783 {
1784
1785 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1786 AR_WRITE_BARRIER(sc);
1787 }
1788
1789 Static void
1790 ar5008_init_chains(struct athn_softc *sc)
1791 {
1792
1793 if (sc->sc_rxchainmask == 0x5 || sc->sc_txchainmask == 0x5)
1794 AR_SETBITS(sc, AR_PHY_ANALOG_SWAP, AR_PHY_SWAP_ALT_CHAIN);
1795
1796 /* Setup chain masks. */
1797 if (sc->sc_mac_ver <= AR_SREV_VERSION_9160 &&
1798 (sc->sc_rxchainmask == 0x3 || sc->sc_rxchainmask == 0x5)) {
1799 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, 0x7);
1800 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, 0x7);
1801 }
1802 else {
1803 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->sc_rxchainmask);
1804 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->sc_rxchainmask);
1805 }
1806 AR_WRITE(sc, AR_SELFGEN_MASK, sc->sc_txchainmask);
1807 AR_WRITE_BARRIER(sc);
1808 }
1809
1810 Static void
1811 ar5008_set_rxchains(struct athn_softc *sc)
1812 {
1813
1814 if (sc->sc_rxchainmask == 0x3 || sc->sc_rxchainmask == 0x5) {
1815 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->sc_rxchainmask);
1816 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->sc_rxchainmask);
1817 AR_WRITE_BARRIER(sc);
1818 }
1819 }
1820
1821 #ifdef notused
1822 Static void
1823 ar5008_read_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
1824 {
1825 /* Sign-extends 9-bit value (assumes upper bits are zeroes). */
1826 #define SIGN_EXT(v) (((v) ^ 0x100) - 0x100)
1827 uint32_t reg;
1828 int i;
1829
1830 for (i = 0; i < sc->sc_nrxchains; i++) {
1831 reg = AR_READ(sc, AR_PHY_CCA(i));
1832 if (AR_SREV_9280_10_OR_LATER(sc))
1833 nf[i] = MS(reg, AR9280_PHY_MINCCA_PWR);
1834 else
1835 nf[i] = MS(reg, AR_PHY_MINCCA_PWR);
1836 nf[i] = SIGN_EXT(nf[i]);
1837
1838 reg = AR_READ(sc, AR_PHY_EXT_CCA(i));
1839 if (AR_SREV_9280_10_OR_LATER(sc))
1840 nf_ext[i] = MS(reg, AR9280_PHY_EXT_MINCCA_PWR);
1841 else
1842 nf_ext[i] = MS(reg, AR_PHY_EXT_MINCCA_PWR);
1843 nf_ext[i] = SIGN_EXT(nf_ext[i]);
1844 }
1845 #undef SIGN_EXT
1846 }
1847 #endif /* notused */
1848
1849 #ifdef notused
1850 Static void
1851 ar5008_write_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
1852 {
1853 uint32_t reg;
1854 int i;
1855
1856 for (i = 0; i < sc->sc_nrxchains; i++) {
1857 reg = AR_READ(sc, AR_PHY_CCA(i));
1858 reg = RW(reg, AR_PHY_MAXCCA_PWR, nf[i]);
1859 AR_WRITE(sc, AR_PHY_CCA(i), reg);
1860
1861 reg = AR_READ(sc, AR_PHY_EXT_CCA(i));
1862 reg = RW(reg, AR_PHY_EXT_MAXCCA_PWR, nf_ext[i]);
1863 AR_WRITE(sc, AR_PHY_EXT_CCA(i), reg);
1864 }
1865 AR_WRITE_BARRIER(sc);
1866 }
1867 #endif /* notused */
1868
1869 #ifdef notused
1870 Static void
1871 ar5008_get_noisefloor(struct athn_softc *sc, struct ieee80211_channel *c)
1872 {
1873 int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS];
1874 int i;
1875
1876 if (AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF) {
1877 /* Noisefloor calibration not finished. */
1878 return;
1879 }
1880 /* Noisefloor calibration is finished. */
1881 ar5008_read_noisefloor(sc, nf, nf_ext);
1882
1883 /* Update noisefloor history. */
1884 for (i = 0; i < sc->sc_nrxchains; i++) {
1885 sc->sc_nf_hist[sc->sc_nf_hist_cur].nf[i] = nf[i];
1886 sc->sc_nf_hist[sc->sc_nf_hist_cur].nf_ext[i] = nf_ext[i];
1887 }
1888 if (++sc->sc_nf_hist_cur >= ATHN_NF_CAL_HIST_MAX)
1889 sc->sc_nf_hist_cur = 0;
1890 }
1891 #endif /* notused */
1892
1893 #ifdef notused
1894 Static void
1895 ar5008_bb_load_noisefloor(struct athn_softc *sc)
1896 {
1897 int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS];
1898 int i, ntries;
1899
1900 /* Write filtered noisefloor values. */
1901 for (i = 0; i < sc->sc_nrxchains; i++) {
1902 nf[i] = sc->sc_nf_priv[i] * 2;
1903 nf_ext[i] = sc->sc_nf_ext_priv[i] * 2;
1904 }
1905 ar5008_write_noisefloor(sc, nf, nf_ext);
1906
1907 /* Load filtered noisefloor values into baseband. */
1908 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF);
1909 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
1910 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1911 /* Wait for load to complete. */
1912 for (ntries = 0; ntries < 1000; ntries++) {
1913 if (!(AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF))
1914 break;
1915 DELAY(50);
1916 }
1917 if (ntries == 1000) {
1918 DPRINTFN(DBG_RF, sc, "failed to load noisefloor values\n");
1919 return;
1920 }
1921
1922 /* Restore noisefloor values to initial (max) values. */
1923 for (i = 0; i < AR_MAX_CHAINS; i++)
1924 nf[i] = nf_ext[i] = -50 * 2;
1925 ar5008_write_noisefloor(sc, nf, nf_ext);
1926 }
1927 #endif /* notused */
1928
1929 #ifdef notused
1930 Static void
1931 ar5008_noisefloor_calib(struct athn_softc *sc)
1932 {
1933
1934 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF);
1935 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
1936 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1937 AR_WRITE_BARRIER(sc);
1938 }
1939 #endif /* notused */
1940
1941 Static void
1942 ar5008_do_noisefloor_calib(struct athn_softc *sc)
1943 {
1944
1945 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1946 AR_WRITE_BARRIER(sc);
1947 }
1948
1949 Static void
1950 ar5008_do_calib(struct athn_softc *sc)
1951 {
1952 uint32_t mode, reg;
1953 int log;
1954
1955 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4_0);
1956 log = AR_SREV_9280_10_OR_LATER(sc) ? 10 : 2;
1957 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCAL_LOG_COUNT_MAX, log);
1958 AR_WRITE(sc, AR_PHY_TIMING_CTRL4_0, reg);
1959
1960 if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_GAIN)
1961 mode = AR_PHY_CALMODE_ADC_GAIN;
1962 else if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_DC)
1963 mode = AR_PHY_CALMODE_ADC_DC_PER;
1964 else /* ATHN_CAL_IQ */
1965 mode = AR_PHY_CALMODE_IQ;
1966 AR_WRITE(sc, AR_PHY_CALMODE, mode);
1967
1968 DPRINTFN(DBG_RF, sc, "starting calibration mode=0x%x\n", mode);
1969 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0, AR_PHY_TIMING_CTRL4_DO_CAL);
1970 AR_WRITE_BARRIER(sc);
1971 }
1972
1973 Static void
1974 ar5008_next_calib(struct athn_softc *sc)
1975 {
1976
1977 /* Check if we have any calibration in progress. */
1978 if (sc->sc_cur_calib_mask != 0) {
1979 if (!(AR_READ(sc, AR_PHY_TIMING_CTRL4_0) &
1980 AR_PHY_TIMING_CTRL4_DO_CAL)) {
1981 /* Calibration completed for current sample. */
1982 if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_GAIN)
1983 ar5008_calib_adc_gain(sc);
1984 else if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_DC)
1985 ar5008_calib_adc_dc_off(sc);
1986 else /* ATHN_CAL_IQ */
1987 ar5008_calib_iq(sc);
1988 }
1989 }
1990 }
1991
1992 Static void
1993 ar5008_calib_iq(struct athn_softc *sc)
1994 {
1995 struct athn_iq_cal *cal;
1996 uint32_t reg, i_coff_denom, q_coff_denom;
1997 int32_t i_coff, q_coff;
1998 int i, iq_corr_neg;
1999
2000 for (i = 0; i < AR_MAX_CHAINS; i++) {
2001 cal = &sc->sc_calib.iq[i];
2002
2003 /* Accumulate IQ calibration measures (clear on read). */
2004 cal->pwr_meas_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2005 cal->pwr_meas_q += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2006 cal->iq_corr_meas +=
2007 (int32_t)AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2008 }
2009 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2010 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2011 /* Not enough samples accumulated, continue. */
2012 ar5008_do_calib(sc);
2013 return;
2014 }
2015
2016 for (i = 0; i < sc->sc_nrxchains; i++) {
2017 cal = &sc->sc_calib.iq[i];
2018
2019 if (cal->pwr_meas_q == 0)
2020 continue;
2021
2022 if ((iq_corr_neg = cal->iq_corr_meas < 0))
2023 cal->iq_corr_meas = -cal->iq_corr_meas;
2024
2025 i_coff_denom =
2026 (cal->pwr_meas_i / 2 + cal->pwr_meas_q / 2) / 128;
2027 q_coff_denom = cal->pwr_meas_q / 64;
2028
2029 if (i_coff_denom == 0 || q_coff_denom == 0)
2030 continue; /* Prevents division by zero. */
2031
2032 i_coff = cal->iq_corr_meas / i_coff_denom;
2033 q_coff = (cal->pwr_meas_i / q_coff_denom) - 64;
2034
2035 /* Negate i_coff if iq_corr_meas is positive. */
2036 if (!iq_corr_neg)
2037 i_coff = 0x40 - (i_coff & 0x3f);
2038 if (q_coff > 15)
2039 q_coff = 15;
2040 else if (q_coff <= -16)
2041 q_coff = -16; /* XXX Linux has a bug here? */
2042
2043 DPRINTFN(DBG_RF, sc, "IQ calibration for chain %d\n", i);
2044 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4(i));
2045 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_I_COFF, i_coff);
2046 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_Q_COFF, q_coff);
2047 AR_WRITE(sc, AR_PHY_TIMING_CTRL4(i), reg);
2048 }
2049
2050 /* Apply new settings. */
2051 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0,
2052 AR_PHY_TIMING_CTRL4_IQCORR_ENABLE);
2053 AR_WRITE_BARRIER(sc);
2054
2055 /* IQ calibration done. */
2056 sc->sc_cur_calib_mask &= ~ATHN_CAL_IQ;
2057 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2058 }
2059
2060 Static void
2061 ar5008_calib_adc_gain(struct athn_softc *sc)
2062 {
2063 struct athn_adc_cal *cal;
2064 uint32_t reg, gain_mismatch_i, gain_mismatch_q;
2065 int i;
2066
2067 for (i = 0; i < AR_MAX_CHAINS; i++) {
2068 cal = &sc->sc_calib.adc_gain[i];
2069
2070 /* Accumulate ADC gain measures (clear on read). */
2071 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2072 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2073 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2074 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i));
2075 }
2076 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2077 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2078 /* Not enough samples accumulated, continue. */
2079 ar5008_do_calib(sc);
2080 return;
2081 }
2082
2083 for (i = 0; i < sc->sc_nrxchains; i++) {
2084 cal = &sc->sc_calib.adc_gain[i];
2085
2086 if (cal->pwr_meas_odd_i == 0 || cal->pwr_meas_even_q == 0)
2087 continue; /* Prevents division by zero. */
2088
2089 gain_mismatch_i =
2090 (cal->pwr_meas_even_i * 32) / cal->pwr_meas_odd_i;
2091 gain_mismatch_q =
2092 (cal->pwr_meas_odd_q * 32) / cal->pwr_meas_even_q;
2093
2094 DPRINTFN(DBG_RF, sc, "ADC gain calibration for chain %d\n", i);
2095 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i));
2096 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IGAIN, gain_mismatch_i);
2097 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QGAIN, gain_mismatch_q);
2098 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg);
2099 }
2100
2101 /* Apply new settings. */
2102 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),
2103 AR_PHY_NEW_ADC_GAIN_CORR_ENABLE);
2104 AR_WRITE_BARRIER(sc);
2105
2106 /* ADC gain calibration done. */
2107 sc->sc_cur_calib_mask &= ~ATHN_CAL_ADC_GAIN;
2108 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2109 }
2110
2111 Static void
2112 ar5008_calib_adc_dc_off(struct athn_softc *sc)
2113 {
2114 struct athn_adc_cal *cal;
2115 int32_t dc_offset_mismatch_i, dc_offset_mismatch_q;
2116 uint32_t reg;
2117 int count, i;
2118
2119 for (i = 0; i < AR_MAX_CHAINS; i++) {
2120 cal = &sc->sc_calib.adc_dc_offset[i];
2121
2122 /* Accumulate ADC DC offset measures (clear on read). */
2123 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2124 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2125 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2126 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i));
2127 }
2128 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2129 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2130 /* Not enough samples accumulated, continue. */
2131 ar5008_do_calib(sc);
2132 return;
2133 }
2134
2135 if (AR_SREV_9280_10_OR_LATER(sc))
2136 count = (1 << (10 + 5));
2137 else
2138 count = (1 << ( 2 + 5)) * AR_CAL_SAMPLES;
2139 for (i = 0; i < sc->sc_nrxchains; i++) {
2140 cal = &sc->sc_calib.adc_dc_offset[i];
2141
2142 dc_offset_mismatch_i =
2143 (cal->pwr_meas_even_i - cal->pwr_meas_odd_i * 2) / count;
2144 dc_offset_mismatch_q =
2145 (cal->pwr_meas_odd_q - cal->pwr_meas_even_q * 2) / count;
2146
2147 DPRINTFN(DBG_RF, sc, "ADC DC offset calibration for chain %d\n", i);
2148 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i));
2149 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QDC,
2150 dc_offset_mismatch_q);
2151 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IDC,
2152 dc_offset_mismatch_i);
2153 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg);
2154 }
2155
2156 /* Apply new settings. */
2157 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),
2158 AR_PHY_NEW_ADC_DC_OFFSET_CORR_ENABLE);
2159 AR_WRITE_BARRIER(sc);
2160
2161 /* ADC DC offset calibration done. */
2162 sc->sc_cur_calib_mask &= ~ATHN_CAL_ADC_DC;
2163 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2164 }
2165
2166 PUBLIC void
2167 ar5008_write_txpower(struct athn_softc *sc, int16_t power[ATHN_POWER_COUNT])
2168 {
2169
2170 AR_WRITE(sc, AR_PHY_POWER_TX_RATE1,
2171 (power[ATHN_POWER_OFDM18 ] & 0x3f) << 24 |
2172 (power[ATHN_POWER_OFDM12 ] & 0x3f) << 16 |
2173 (power[ATHN_POWER_OFDM9 ] & 0x3f) << 8 |
2174 (power[ATHN_POWER_OFDM6 ] & 0x3f));
2175 AR_WRITE(sc, AR_PHY_POWER_TX_RATE2,
2176 (power[ATHN_POWER_OFDM54 ] & 0x3f) << 24 |
2177 (power[ATHN_POWER_OFDM48 ] & 0x3f) << 16 |
2178 (power[ATHN_POWER_OFDM36 ] & 0x3f) << 8 |
2179 (power[ATHN_POWER_OFDM24 ] & 0x3f));
2180 AR_WRITE(sc, AR_PHY_POWER_TX_RATE3,
2181 (power[ATHN_POWER_CCK2_SP ] & 0x3f) << 24 |
2182 (power[ATHN_POWER_CCK2_LP ] & 0x3f) << 16 |
2183 (power[ATHN_POWER_XR ] & 0x3f) << 8 |
2184 (power[ATHN_POWER_CCK1_LP ] & 0x3f));
2185 AR_WRITE(sc, AR_PHY_POWER_TX_RATE4,
2186 (power[ATHN_POWER_CCK11_SP] & 0x3f) << 24 |
2187 (power[ATHN_POWER_CCK11_LP] & 0x3f) << 16 |
2188 (power[ATHN_POWER_CCK55_SP] & 0x3f) << 8 |
2189 (power[ATHN_POWER_CCK55_LP] & 0x3f));
2190 #ifndef IEEE80211_NO_HT
2191 AR_WRITE(sc, AR_PHY_POWER_TX_RATE5,
2192 (power[ATHN_POWER_HT20(3) ] & 0x3f) << 24 |
2193 (power[ATHN_POWER_HT20(2) ] & 0x3f) << 16 |
2194 (power[ATHN_POWER_HT20(1) ] & 0x3f) << 8 |
2195 (power[ATHN_POWER_HT20(0) ] & 0x3f));
2196 AR_WRITE(sc, AR_PHY_POWER_TX_RATE6,
2197 (power[ATHN_POWER_HT20(7) ] & 0x3f) << 24 |
2198 (power[ATHN_POWER_HT20(6) ] & 0x3f) << 16 |
2199 (power[ATHN_POWER_HT20(5) ] & 0x3f) << 8 |
2200 (power[ATHN_POWER_HT20(4) ] & 0x3f));
2201 AR_WRITE(sc, AR_PHY_POWER_TX_RATE7,
2202 (power[ATHN_POWER_HT40(3) ] & 0x3f) << 24 |
2203 (power[ATHN_POWER_HT40(2) ] & 0x3f) << 16 |
2204 (power[ATHN_POWER_HT40(1) ] & 0x3f) << 8 |
2205 (power[ATHN_POWER_HT40(0) ] & 0x3f));
2206 AR_WRITE(sc, AR_PHY_POWER_TX_RATE8,
2207 (power[ATHN_POWER_HT40(7) ] & 0x3f) << 24 |
2208 (power[ATHN_POWER_HT40(6) ] & 0x3f) << 16 |
2209 (power[ATHN_POWER_HT40(5) ] & 0x3f) << 8 |
2210 (power[ATHN_POWER_HT40(4) ] & 0x3f));
2211 AR_WRITE(sc, AR_PHY_POWER_TX_RATE9,
2212 (power[ATHN_POWER_OFDM_EXT] & 0x3f) << 24 |
2213 (power[ATHN_POWER_CCK_EXT ] & 0x3f) << 16 |
2214 (power[ATHN_POWER_OFDM_DUP] & 0x3f) << 8 |
2215 (power[ATHN_POWER_CCK_DUP ] & 0x3f));
2216 #endif
2217 AR_WRITE_BARRIER(sc);
2218 }
2219
2220 PUBLIC void
2221 ar5008_set_viterbi_mask(struct athn_softc *sc, int bin)
2222 {
2223 uint32_t mask[4], reg;
2224 uint8_t m[62], p[62]; /* XXX use bit arrays? */
2225 int i, bit, cur;
2226
2227 /* Compute pilot mask. */
2228 cur = -6000;
2229 for (i = 0; i < 4; i++) {
2230 mask[i] = 0;
2231 for (bit = 0; bit < 30; bit++) {
2232 if (abs(cur - bin) < 100)
2233 mask[i] |= 1 << bit;
2234 cur += 100;
2235 }
2236 if (cur == 0) /* Skip entry "0". */
2237 cur = 100;
2238 }
2239 /* Write entries from -6000 to -3100. */
2240 AR_WRITE(sc, AR_PHY_TIMING7, mask[0]);
2241 AR_WRITE(sc, AR_PHY_TIMING9, mask[0]);
2242 /* Write entries from -3000 to -100. */
2243 AR_WRITE(sc, AR_PHY_TIMING8, mask[1]);
2244 AR_WRITE(sc, AR_PHY_TIMING10, mask[1]);
2245 /* Write entries from 100 to 3000. */
2246 AR_WRITE(sc, AR_PHY_PILOT_MASK_01_30, mask[2]);
2247 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_01_30, mask[2]);
2248 /* Write entries from 3100 to 6000. */
2249 AR_WRITE(sc, AR_PHY_PILOT_MASK_31_60, mask[3]);
2250 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_31_60, mask[3]);
2251
2252 /* Compute viterbi mask. */
2253 for (cur = 6100; cur >= 0; cur -= 100)
2254 p[+cur / 100] = abs(cur - bin) < 75;
2255 for (cur = -100; cur >= -6100; cur -= 100)
2256 m[-cur / 100] = abs(cur - bin) < 75;
2257
2258 /* Write viterbi mask (XXX needs to be reworked). */
2259 reg =
2260 m[46] << 30 | m[47] << 28 | m[48] << 26 | m[49] << 24 |
2261 m[50] << 22 | m[51] << 20 | m[52] << 18 | m[53] << 16 |
2262 m[54] << 14 | m[55] << 12 | m[56] << 10 | m[57] << 8 |
2263 m[58] << 6 | m[59] << 4 | m[60] << 2 | m[61] << 0;
2264 AR_WRITE(sc, AR_PHY_BIN_MASK_1, reg);
2265 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_46_61, reg);
2266
2267 /* XXX m[48] should be m[38] ? */
2268 reg = m[31] << 28 | m[32] << 26 | m[33] << 24 |
2269 m[34] << 22 | m[35] << 20 | m[36] << 18 | m[37] << 16 |
2270 m[48] << 14 | m[39] << 12 | m[40] << 10 | m[41] << 8 |
2271 m[42] << 6 | m[43] << 4 | m[44] << 2 | m[45] << 0;
2272 AR_WRITE(sc, AR_PHY_BIN_MASK_2, reg);
2273 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_31_45, reg);
2274
2275 /* XXX This one is weird too. */
2276 reg =
2277 m[16] << 30 | m[16] << 28 | m[18] << 26 | m[18] << 24 |
2278 m[20] << 22 | m[20] << 20 | m[22] << 18 | m[22] << 16 |
2279 m[24] << 14 | m[24] << 12 | m[25] << 10 | m[26] << 8 |
2280 m[27] << 6 | m[28] << 4 | m[29] << 2 | m[30] << 0;
2281 AR_WRITE(sc, AR_PHY_BIN_MASK_3, reg);
2282 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_16_30, reg);
2283
2284 reg =
2285 m[ 0] << 30 | m[ 1] << 28 | m[ 2] << 26 | m[ 3] << 24 |
2286 m[ 4] << 22 | m[ 5] << 20 | m[ 6] << 18 | m[ 7] << 16 |
2287 m[ 8] << 14 | m[ 9] << 12 | m[10] << 10 | m[11] << 8 |
2288 m[12] << 6 | m[13] << 4 | m[14] << 2 | m[15] << 0;
2289 AR_WRITE(sc, AR_PHY_MASK_CTL, reg);
2290 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_00_15, reg);
2291
2292 reg = p[15] << 28 | p[14] << 26 | p[13] << 24 |
2293 p[12] << 22 | p[11] << 20 | p[10] << 18 | p[ 9] << 16 |
2294 p[ 8] << 14 | p[ 7] << 12 | p[ 6] << 10 | p[ 5] << 8 |
2295 p[ 4] << 6 | p[ 3] << 4 | p[ 2] << 2 | p[ 1] << 0;
2296 AR_WRITE(sc, AR_PHY_BIN_MASK2_1, reg);
2297 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_15_01, reg);
2298
2299 reg = p[30] << 28 | p[29] << 26 | p[28] << 24 |
2300 p[27] << 22 | p[26] << 20 | p[25] << 18 | p[24] << 16 |
2301 p[23] << 14 | p[22] << 12 | p[21] << 10 | p[20] << 8 |
2302 p[19] << 6 | p[18] << 4 | p[17] << 2 | p[16] << 0;
2303 AR_WRITE(sc, AR_PHY_BIN_MASK2_2, reg);
2304 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_30_16, reg);
2305
2306 reg = p[45] << 28 | p[44] << 26 | p[43] << 24 |
2307 p[42] << 22 | p[41] << 20 | p[40] << 18 | p[39] << 16 |
2308 p[38] << 14 | p[37] << 12 | p[36] << 10 | p[35] << 8 |
2309 p[34] << 6 | p[33] << 4 | p[32] << 2 | p[31] << 0;
2310 AR_WRITE(sc, AR_PHY_BIN_MASK2_3, reg);
2311 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_45_31, reg);
2312
2313 reg =
2314 p[61] << 30 | p[60] << 28 | p[59] << 26 | p[58] << 24 |
2315 p[57] << 22 | p[56] << 20 | p[55] << 18 | p[54] << 16 |
2316 p[53] << 14 | p[52] << 12 | p[51] << 10 | p[50] << 8 |
2317 p[49] << 6 | p[48] << 4 | p[47] << 2 | p[46] << 0;
2318 AR_WRITE(sc, AR_PHY_BIN_MASK2_4, reg);
2319 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_61_46, reg);
2320 AR_WRITE_BARRIER(sc);
2321 }
2322
2323 Static void
2324 ar5008_hw_init(struct athn_softc *sc, struct ieee80211_channel *c,
2325 struct ieee80211_channel *extc)
2326 {
2327 struct athn_ops *ops = &sc->sc_ops;
2328 const struct athn_ini *ini = sc->sc_ini;
2329 const uint32_t *pvals;
2330 uint32_t reg;
2331 int i;
2332
2333 AR_WRITE(sc, AR_PHY(0), 0x00000007);
2334 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
2335
2336 if (!AR_SINGLE_CHIP(sc))
2337 ar5416_reset_addac(sc, c);
2338
2339 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
2340
2341 /* First initialization step (depends on channel band/bandwidth). */
2342 #ifndef IEEE80211_NO_HT
2343 if (extc != NULL) {
2344 if (IEEE80211_IS_CHAN_2GHZ(c))
2345 pvals = ini->vals_2g40;
2346 else
2347 pvals = ini->vals_5g40;
2348 }
2349 else
2350 #endif
2351 {
2352 if (IEEE80211_IS_CHAN_2GHZ(c))
2353 pvals = ini->vals_2g20;
2354 else
2355 pvals = ini->vals_5g20;
2356 }
2357 DPRINTFN(DBG_INIT, sc, "writing modal init vals\n");
2358 for (i = 0; i < ini->nregs; i++) {
2359 uint32_t val = pvals[i];
2360
2361 /* Fix AR_AN_TOP2 initialization value if required. */
2362 if (ini->regs[i] == AR_AN_TOP2 &&
2363 (sc->sc_flags & ATHN_FLAG_AN_TOP2_FIXUP))
2364 val &= ~AR_AN_TOP2_PWDCLKIND;
2365 AR_WRITE(sc, ini->regs[i], val);
2366 if (AR_IS_ANALOG_REG(ini->regs[i])) {
2367 AR_WRITE_BARRIER(sc);
2368 DELAY(100);
2369 }
2370 if ((i & 0x1f) == 0)
2371 DELAY(1);
2372 }
2373 AR_WRITE_BARRIER(sc);
2374
2375 if (sc->sc_rx_gain != NULL)
2376 ar9280_reset_rx_gain(sc, c);
2377 if (sc->sc_tx_gain != NULL)
2378 ar9280_reset_tx_gain(sc, c);
2379
2380 if (AR_SREV_9271_10(sc)) {
2381 AR_WRITE(sc, AR_PHY(68), 0x30002311);
2382 AR_WRITE(sc, AR_PHY_RF_CTL3, 0x0a020001);
2383 }
2384 AR_WRITE_BARRIER(sc);
2385
2386 /* Second initialization step (common to all channels). */
2387 DPRINTFN(DBG_INIT, sc, "writing common init vals\n");
2388 for (i = 0; i < ini->ncmregs; i++) {
2389 AR_WRITE(sc, ini->cmregs[i], ini->cmvals[i]);
2390 if (AR_IS_ANALOG_REG(ini->cmregs[i])) {
2391 AR_WRITE_BARRIER(sc);
2392 DELAY(100);
2393 }
2394 if ((i & 0x1f) == 0)
2395 DELAY(1);
2396 }
2397 AR_WRITE_BARRIER(sc);
2398
2399 if (!AR_SINGLE_CHIP(sc))
2400 ar5416_reset_bb_gain(sc, c);
2401
2402 if (IEEE80211_IS_CHAN_5GHZ(c) &&
2403 (sc->sc_flags & ATHN_FLAG_FAST_PLL_CLOCK)) {
2404 /* Update modal values for fast PLL clock. */
2405 #ifndef IEEE80211_NO_HT
2406 if (extc != NULL)
2407 pvals = ini->fastvals_5g40;
2408 else
2409 #endif
2410 pvals = ini->fastvals_5g20;
2411 DPRINTFN(DBG_INIT, sc, "writing fast pll clock init vals\n");
2412 for (i = 0; i < ini->nfastregs; i++) {
2413 AR_WRITE(sc, ini->fastregs[i], pvals[i]);
2414 if (AR_IS_ANALOG_REG(ini->fastregs[i])) {
2415 AR_WRITE_BARRIER(sc);
2416 DELAY(100);
2417 }
2418 if ((i & 0x1f) == 0)
2419 DELAY(1);
2420 }
2421 }
2422
2423 /*
2424 * Set the RX_ABORT and RX_DIS bits to prevent frames with corrupted
2425 * descriptor status.
2426 */
2427 AR_SETBITS(sc, AR_DIAG_SW, AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT);
2428
2429 /* Hardware workarounds for occasional Rx data corruption. */
2430 if (AR_SREV_9280_10_OR_LATER(sc)) {
2431 reg = AR_READ(sc, AR_PCU_MISC_MODE2);
2432 if (!AR_SREV_9271(sc))
2433 reg &= ~AR_PCU_MISC_MODE2_HWWAR1;
2434 if (AR_SREV_9287_10_OR_LATER(sc))
2435 reg &= ~AR_PCU_MISC_MODE2_HWWAR2;
2436 AR_WRITE(sc, AR_PCU_MISC_MODE2, reg);
2437
2438 }
2439 else if (AR_SREV_5416_20_OR_LATER(sc)) {
2440 /* Disable baseband clock gating. */
2441 AR_WRITE(sc, AR_PHY(651), 0x11);
2442
2443 if (AR_SREV_9160(sc)) {
2444 /* Disable RIFS search to fix baseband hang. */
2445 AR_CLRBITS(sc, AR_PHY_HEAVY_CLIP_FACTOR_RIFS,
2446 AR_PHY_RIFS_INIT_DELAY_M);
2447 }
2448 }
2449 AR_WRITE_BARRIER(sc);
2450
2451 ar5008_set_phy(sc, c, extc);
2452 ar5008_init_chains(sc);
2453
2454 if (sc->sc_flags & ATHN_FLAG_OLPC) {
2455 extern int ticks;
2456 sc->sc_olpc_ticks = ticks;
2457 ops->olpc_init(sc);
2458 }
2459
2460 ops->set_txpower(sc, c, extc);
2461
2462 if (!AR_SINGLE_CHIP(sc))
2463 ar5416_rf_reset(sc, c);
2464 }
2465
2466 Static uint8_t
2467 ar5008_get_vpd(uint8_t pwr, const uint8_t *pwrPdg, const uint8_t *vpdPdg,
2468 int nicepts)
2469 {
2470 uint8_t vpd;
2471 int i, lo, hi;
2472
2473 for (i = 0; i < nicepts; i++)
2474 if (pwrPdg[i] > pwr)
2475 break;
2476 hi = i;
2477 lo = hi - 1;
2478 if (lo == -1)
2479 lo = hi;
2480 else if (hi == nicepts)
2481 hi = lo;
2482
2483 vpd = athn_interpolate(pwr, pwrPdg[lo], vpdPdg[lo],
2484 pwrPdg[hi], vpdPdg[hi]);
2485 return vpd;
2486 }
2487
2488 PUBLIC void
2489 ar5008_get_pdadcs(struct athn_softc *sc, uint8_t fbin,
2490 struct athn_pier *lopier, struct athn_pier *hipier, int nxpdgains,
2491 int nicepts, uint8_t overlap, uint8_t *boundaries, uint8_t *pdadcs)
2492 {
2493 #define DB(x) ((x) / 2) /* Convert half dB to dB. */
2494 uint8_t minpwr[AR_PD_GAINS_IN_MASK], maxpwr[AR_PD_GAINS_IN_MASK];
2495 uint8_t vpd[AR_MAX_PWR_RANGE_IN_HALF_DB], pwr;
2496 uint8_t lovpd, hivpd, boundary;
2497 int16_t ss, delta, vpdstep, val;
2498 int i, j, npdadcs, nvpds, maxidx, tgtidx;
2499
2500 /* Compute min and max power in half dB for each pdGain. */
2501 for (i = 0; i < nxpdgains; i++) {
2502 minpwr[i] = MAX(lopier->pwr[i][0], hipier->pwr[i][0]);
2503 maxpwr[i] = MIN(lopier->pwr[i][nicepts - 1],
2504 hipier->pwr[i][nicepts - 1]);
2505 }
2506
2507 /* Fill phase domain analog-to-digital converter (PDADC) table. */
2508 npdadcs = 0;
2509 for (i = 0; i < nxpdgains; i++) {
2510 if (i != nxpdgains - 1)
2511 boundaries[i] = DB(maxpwr[i] + minpwr[i + 1]) / 2;
2512 else
2513 boundaries[i] = DB(maxpwr[i]);
2514 if (boundaries[i] > AR_MAX_RATE_POWER)
2515 boundaries[i] = AR_MAX_RATE_POWER;
2516
2517 if (i == 0 && !AR_SREV_5416_20_OR_LATER(sc)) {
2518 /* Fix the gain delta (AR5416 1.0 only). */
2519 delta = boundaries[0] - 23;
2520 boundaries[0] = 23;
2521 }
2522 else
2523 delta = 0;
2524
2525 /* Find starting index for this pdGain. */
2526 if (i != 0) {
2527 ss = boundaries[i - 1] - DB(minpwr[i]) -
2528 overlap + 1 + delta;
2529 }
2530 else if (AR_SREV_9280_10_OR_LATER(sc))
2531 ss = -DB(minpwr[i]);
2532 else
2533 ss = 0;
2534
2535 /* Compute Vpd table for this pdGain. */
2536 nvpds = DB(maxpwr[i] - minpwr[i]) + 1;
2537 memset(vpd, 0, sizeof(vpd));
2538 pwr = minpwr[i];
2539 for (j = 0; j < nvpds; j++) {
2540 /* Get lower and higher Vpd. */
2541 lovpd = ar5008_get_vpd(pwr, lopier->pwr[i],
2542 lopier->vpd[i], nicepts);
2543 hivpd = ar5008_get_vpd(pwr, hipier->pwr[i],
2544 hipier->vpd[i], nicepts);
2545
2546 /* Interpolate the final Vpd. */
2547 vpd[j] = athn_interpolate(fbin,
2548 lopier->fbin, lovpd, hipier->fbin, hivpd);
2549
2550 pwr += 2; /* In half dB. */
2551 }
2552
2553 /* Extrapolate data for ss < 0. */
2554 if (vpd[1] > vpd[0])
2555 vpdstep = vpd[1] - vpd[0];
2556 else
2557 vpdstep = 1;
2558 while (ss < 0 && npdadcs < AR_NUM_PDADC_VALUES - 1) {
2559 val = vpd[0] + ss * vpdstep;
2560 pdadcs[npdadcs++] = MAX(val, 0);
2561 ss++;
2562 }
2563
2564 tgtidx = boundaries[i] + overlap - DB(minpwr[i]);
2565 maxidx = MIN(tgtidx, nvpds);
2566 while (ss < maxidx && npdadcs < AR_NUM_PDADC_VALUES - 1)
2567 pdadcs[npdadcs++] = vpd[ss++];
2568
2569 if (tgtidx < maxidx)
2570 continue;
2571
2572 /* Extrapolate data for maxidx <= ss <= tgtidx. */
2573 if (vpd[nvpds - 1] > vpd[nvpds - 2])
2574 vpdstep = vpd[nvpds - 1] - vpd[nvpds - 2];
2575 else
2576 vpdstep = 1;
2577 while (ss <= tgtidx && npdadcs < AR_NUM_PDADC_VALUES - 1) {
2578 val = vpd[nvpds - 1] + (ss - maxidx + 1) * vpdstep;
2579 pdadcs[npdadcs++] = MIN(val, 255);
2580 ss++;
2581 }
2582 }
2583
2584 /* Fill remaining PDADC and boundaries entries. */
2585 if (AR_SREV_9285(sc))
2586 boundary = AR9285_PD_GAIN_BOUNDARY_DEFAULT;
2587 else /* Fill with latest. */
2588 boundary = boundaries[nxpdgains - 1];
2589
2590 for (; nxpdgains < AR_PD_GAINS_IN_MASK; nxpdgains++)
2591 boundaries[nxpdgains] = boundary;
2592
2593 for (; npdadcs < AR_NUM_PDADC_VALUES; npdadcs++)
2594 pdadcs[npdadcs] = pdadcs[npdadcs - 1];
2595 #undef DB
2596 }
2597
2598 PUBLIC void
2599 ar5008_get_lg_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2600 uint8_t ctl, const struct ar_cal_target_power_leg *tgt, int nchans,
2601 uint8_t tpow[4])
2602 {
2603 uint8_t fbin;
2604 int i, lo, hi;
2605
2606 /* Find interval (lower and upper indices). */
2607 fbin = athn_chan2fbin(c);
2608 for (i = 0; i < nchans; i++) {
2609 if (tgt[i].bChannel == AR_BCHAN_UNUSED ||
2610 tgt[i].bChannel > fbin)
2611 break;
2612 }
2613 hi = i;
2614 lo = hi - 1;
2615 if (lo == -1)
2616 lo = hi;
2617 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED)
2618 hi = lo;
2619
2620 /* Interpolate values. */
2621 for (i = 0; i < 4; i++) {
2622 tpow[i] = athn_interpolate(fbin,
2623 tgt[lo].bChannel, tgt[lo].tPow2x[i],
2624 tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2625 }
2626 /* XXX Apply conformance testing limit. */
2627 }
2628
2629 #ifndef IEEE80211_NO_HT
2630 PUBLIC void
2631 ar5008_get_ht_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2632 uint8_t ctl, const struct ar_cal_target_power_ht *tgt, int nchans,
2633 uint8_t tpow[8])
2634 {
2635 uint8_t fbin;
2636 int i, lo, hi;
2637
2638 /* Find interval (lower and upper indices). */
2639 fbin = athn_chan2fbin(c);
2640 for (i = 0; i < nchans; i++) {
2641 if (tgt[i].bChannel == AR_BCHAN_UNUSED ||
2642 tgt[i].bChannel > fbin)
2643 break;
2644 }
2645 hi = i;
2646 lo = hi - 1;
2647 if (lo == -1)
2648 lo = hi;
2649 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED)
2650 hi = lo;
2651
2652 /* Interpolate values. */
2653 for (i = 0; i < 8; i++) {
2654 tpow[i] = athn_interpolate(fbin,
2655 tgt[lo].bChannel, tgt[lo].tPow2x[i],
2656 tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2657 }
2658 /* XXX Apply conformance testing limit. */
2659 }
2660 #endif
2661
2662 /*
2663 * Adaptive noise immunity.
2664 */
2665 Static void
2666 ar5008_set_noise_immunity_level(struct athn_softc *sc, int level)
2667 {
2668 int high = level == 4;
2669 uint32_t reg;
2670
2671 reg = AR_READ(sc, AR_PHY_DESIRED_SZ);
2672 reg = RW(reg, AR_PHY_DESIRED_SZ_TOT_DES, high ? -62 : -55);
2673 AR_WRITE(sc, AR_PHY_DESIRED_SZ, reg);
2674
2675 reg = AR_READ(sc, AR_PHY_AGC_CTL1);
2676 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_LOW, high ? -70 : -64);
2677 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_HIGH, high ? -12 : -14);
2678 AR_WRITE(sc, AR_PHY_AGC_CTL1, reg);
2679
2680 reg = AR_READ(sc, AR_PHY_FIND_SIG);
2681 reg = RW(reg, AR_PHY_FIND_SIG_FIRPWR, high ? -80 : -78);
2682 AR_WRITE(sc, AR_PHY_FIND_SIG, reg);
2683
2684 AR_WRITE_BARRIER(sc);
2685 }
2686
2687 Static void
2688 ar5008_enable_ofdm_weak_signal(struct athn_softc *sc)
2689 {
2690 uint32_t reg;
2691
2692 reg = AR_READ(sc, AR_PHY_SFCORR_LOW);
2693 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 50);
2694 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 40);
2695 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 48);
2696 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg);
2697
2698 reg = AR_READ(sc, AR_PHY_SFCORR);
2699 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 77);
2700 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 64);
2701 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 16);
2702 AR_WRITE(sc, AR_PHY_SFCORR, reg);
2703
2704 reg = AR_READ(sc, AR_PHY_SFCORR_EXT);
2705 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 50);
2706 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 40);
2707 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 77);
2708 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 64);
2709 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg);
2710
2711 AR_SETBITS(sc, AR_PHY_SFCORR_LOW,
2712 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW);
2713 AR_WRITE_BARRIER(sc);
2714 }
2715
2716 Static void
2717 ar5008_disable_ofdm_weak_signal(struct athn_softc *sc)
2718 {
2719 uint32_t reg;
2720
2721 reg = AR_READ(sc, AR_PHY_SFCORR_LOW);
2722 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 127);
2723 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 127);
2724 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 63);
2725 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg);
2726
2727 reg = AR_READ(sc, AR_PHY_SFCORR);
2728 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 127);
2729 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 127);
2730 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 31);
2731 AR_WRITE(sc, AR_PHY_SFCORR, reg);
2732
2733 reg = AR_READ(sc, AR_PHY_SFCORR_EXT);
2734 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 127);
2735 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 127);
2736 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 127);
2737 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 127);
2738 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg);
2739
2740 AR_CLRBITS(sc, AR_PHY_SFCORR_LOW,
2741 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW);
2742 AR_WRITE_BARRIER(sc);
2743 }
2744
2745 Static void
2746 ar5008_set_cck_weak_signal(struct athn_softc *sc, int high)
2747 {
2748 uint32_t reg;
2749
2750 reg = AR_READ(sc, AR_PHY_CCK_DETECT);
2751 reg = RW(reg, AR_PHY_CCK_DETECT_WEAK_SIG_THR_CCK, high ? 6 : 8);
2752 AR_WRITE(sc, AR_PHY_CCK_DETECT, reg);
2753 AR_WRITE_BARRIER(sc);
2754 }
2755
2756 Static void
2757 ar5008_set_firstep_level(struct athn_softc *sc, int level)
2758 {
2759 uint32_t reg;
2760
2761 reg = AR_READ(sc, AR_PHY_FIND_SIG);
2762 reg = RW(reg, AR_PHY_FIND_SIG_FIRSTEP, level * 4);
2763 AR_WRITE(sc, AR_PHY_FIND_SIG, reg);
2764 AR_WRITE_BARRIER(sc);
2765 }
2766
2767 Static void
2768 ar5008_set_spur_immunity_level(struct athn_softc *sc, int level)
2769 {
2770 uint32_t reg;
2771
2772 reg = AR_READ(sc, AR_PHY_TIMING5);
2773 reg = RW(reg, AR_PHY_TIMING5_CYCPWR_THR1, (level + 1) * 2);
2774 AR_WRITE(sc, AR_PHY_TIMING5, reg);
2775 AR_WRITE_BARRIER(sc);
2776 }
2777