arn5008.c revision 1.11.2.1 1 /* $NetBSD: arn5008.c,v 1.11.2.1 2017/03/20 06:57:28 pgoyette Exp $ */
2 /* $OpenBSD: ar5008.c,v 1.21 2012/08/25 12:14:31 kettenis Exp $ */
3
4 /*-
5 * Copyright (c) 2009 Damien Bergamini <damien.bergamini (at) free.fr>
6 * Copyright (c) 2008-2009 Atheros Communications Inc.
7 *
8 * Permission to use, copy, modify, and/or distribute this software for any
9 * purpose with or without fee is hereby granted, provided that the above
10 * copyright notice and this permission notice appear in all copies.
11 *
12 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
13 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
14 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
15 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
16 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
17 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
18 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19 */
20
21 /*
22 * Driver for Atheros 802.11a/g/n chipsets.
23 * Routines common to AR5008, AR9001 and AR9002 families.
24 */
25
26 #include <sys/cdefs.h>
27 __KERNEL_RCSID(0, "$NetBSD: arn5008.c,v 1.11.2.1 2017/03/20 06:57:28 pgoyette Exp $");
28
29 #include <sys/param.h>
30 #include <sys/sockio.h>
31 #include <sys/mbuf.h>
32 #include <sys/kernel.h>
33 #include <sys/socket.h>
34 #include <sys/systm.h>
35 #include <sys/malloc.h>
36 #include <sys/queue.h>
37 #include <sys/conf.h>
38 #include <sys/device.h>
39
40 #include <sys/bus.h>
41 #include <sys/endian.h>
42 #include <sys/intr.h>
43
44 #include <net/bpf.h>
45 #include <net/if.h>
46 #include <net/if_arp.h>
47 #include <net/if_dl.h>
48 #include <net/if_ether.h>
49 #include <net/if_media.h>
50 #include <net/if_types.h>
51
52 #include <netinet/in.h>
53 #include <netinet/in_systm.h>
54 #include <netinet/in_var.h>
55 #include <netinet/ip.h>
56
57 #include <net80211/ieee80211_var.h>
58 #include <net80211/ieee80211_amrr.h>
59 #include <net80211/ieee80211_radiotap.h>
60
61 #include <dev/ic/athnreg.h>
62 #include <dev/ic/athnvar.h>
63
64 #include <dev/ic/arn5008reg.h>
65 #include <dev/ic/arn5008.h>
66 #include <dev/ic/arn5416.h>
67 #include <dev/ic/arn9280.h>
68
69 #define Static static
70
71 Static void ar5008_calib_adc_dc_off(struct athn_softc *);
72 Static void ar5008_calib_adc_gain(struct athn_softc *);
73 Static void ar5008_calib_iq(struct athn_softc *);
74 Static void ar5008_disable_ofdm_weak_signal(struct athn_softc *);
75 Static void ar5008_disable_phy(struct athn_softc *);
76 Static int ar5008_dma_alloc(struct athn_softc *);
77 Static void ar5008_dma_free(struct athn_softc *);
78 Static void ar5008_do_calib(struct athn_softc *);
79 Static void ar5008_do_noisefloor_calib(struct athn_softc *);
80 Static void ar5008_enable_antenna_diversity(struct athn_softc *);
81 Static void ar5008_enable_ofdm_weak_signal(struct athn_softc *);
82 Static uint8_t ar5008_get_vpd(uint8_t, const uint8_t *, const uint8_t *, int);
83 Static void ar5008_gpio_config_input(struct athn_softc *, int);
84 Static void ar5008_gpio_config_output(struct athn_softc *, int, int);
85 Static int ar5008_gpio_read(struct athn_softc *, int);
86 Static void ar5008_gpio_write(struct athn_softc *, int, int);
87 Static void ar5008_hw_init(struct athn_softc *, struct ieee80211_channel *,
88 struct ieee80211_channel *);
89 Static void ar5008_init_baseband(struct athn_softc *);
90 Static void ar5008_init_chains(struct athn_softc *);
91 Static int ar5008_intr_status(struct athn_softc *);
92 Static int ar5008_intr(struct athn_softc *);
93 Static void ar5008_next_calib(struct athn_softc *);
94 Static int ar5008_read_eep_word(struct athn_softc *, uint32_t,
95 uint16_t *);
96 Static int ar5008_read_rom(struct athn_softc *);
97 Static void ar5008_rf_bus_release(struct athn_softc *);
98 Static int ar5008_rf_bus_request(struct athn_softc *);
99 Static void ar5008_rfsilent_init(struct athn_softc *);
100 Static int ar5008_rx_alloc(struct athn_softc *);
101 Static void ar5008_rx_enable(struct athn_softc *);
102 Static void ar5008_rx_free(struct athn_softc *);
103 Static void ar5008_rx_intr(struct athn_softc *);
104 Static void ar5008_rx_radiotap(struct athn_softc *, struct mbuf *,
105 struct ar_rx_desc *);
106 Static void ar5008_set_cck_weak_signal(struct athn_softc *, int);
107 Static void ar5008_set_delta_slope(struct athn_softc *,
108 struct ieee80211_channel *, struct ieee80211_channel *);
109 Static void ar5008_set_firstep_level(struct athn_softc *, int);
110 Static void ar5008_set_noise_immunity_level(struct athn_softc *, int);
111 Static void ar5008_set_phy(struct athn_softc *, struct ieee80211_channel *,
112 struct ieee80211_channel *);
113 Static void ar5008_set_rf_mode(struct athn_softc *,
114 struct ieee80211_channel *);
115 Static void ar5008_set_rxchains(struct athn_softc *);
116 Static void ar5008_set_spur_immunity_level(struct athn_softc *, int);
117 Static void ar5008_swap_rom(struct athn_softc *);
118 Static int ar5008_swba_intr(struct athn_softc *);
119 Static int ar5008_tx(struct athn_softc *, struct mbuf *,
120 struct ieee80211_node *, int);
121 Static int ar5008_tx_alloc(struct athn_softc *);
122 Static void ar5008_tx_free(struct athn_softc *);
123 Static void ar5008_tx_intr(struct athn_softc *);
124 Static int ar5008_tx_process(struct athn_softc *, int);
125
126 #ifdef notused
127 Static void ar5008_bb_load_noisefloor(struct athn_softc *);
128 Static void ar5008_get_noisefloor(struct athn_softc *,
129 struct ieee80211_channel *);
130 Static void ar5008_noisefloor_calib(struct athn_softc *);
131 Static void ar5008_read_noisefloor(struct athn_softc *, int16_t *,
132 int16_t *);
133 Static void ar5008_write_noisefloor(struct athn_softc *, int16_t *,
134 int16_t *);
135 #endif /* notused */
136
137 // bf->bf_m = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
138
139 /*
140 * XXX: see if_iwn.c:MCLGETIalt() for a better solution.
141 */
142 static struct mbuf *
143 MCLGETI(struct athn_softc *sc __unused, int how,
144 struct ifnet *ifp __unused, u_int size)
145 {
146 struct mbuf *m;
147
148 MGETHDR(m, how, MT_DATA);
149 if (m == NULL)
150 return NULL;
151
152 MEXTMALLOC(m, size, how);
153 if ((m->m_flags & M_EXT) == 0) {
154 m_freem(m);
155 return NULL;
156 }
157 return m;
158 }
159
160 PUBLIC int
161 ar5008_attach(struct athn_softc *sc)
162 {
163 struct athn_ops *ops = &sc->sc_ops;
164 struct ieee80211com *ic = &sc->sc_ic;
165 struct ar_base_eep_header *base;
166 uint8_t eep_ver, kc_entries_log;
167 int error;
168
169 /* Set callbacks for AR5008, AR9001 and AR9002 families. */
170 ops->gpio_read = ar5008_gpio_read;
171 ops->gpio_write = ar5008_gpio_write;
172 ops->gpio_config_input = ar5008_gpio_config_input;
173 ops->gpio_config_output = ar5008_gpio_config_output;
174 ops->rfsilent_init = ar5008_rfsilent_init;
175
176 ops->dma_alloc = ar5008_dma_alloc;
177 ops->dma_free = ar5008_dma_free;
178 ops->rx_enable = ar5008_rx_enable;
179 ops->intr_status = ar5008_intr_status;
180 ops->intr = ar5008_intr;
181 ops->tx = ar5008_tx;
182
183 ops->set_rf_mode = ar5008_set_rf_mode;
184 ops->rf_bus_request = ar5008_rf_bus_request;
185 ops->rf_bus_release = ar5008_rf_bus_release;
186 ops->set_phy = ar5008_set_phy;
187 ops->set_delta_slope = ar5008_set_delta_slope;
188 ops->enable_antenna_diversity = ar5008_enable_antenna_diversity;
189 ops->init_baseband = ar5008_init_baseband;
190 ops->disable_phy = ar5008_disable_phy;
191 ops->set_rxchains = ar5008_set_rxchains;
192 ops->noisefloor_calib = ar5008_do_noisefloor_calib;
193 ops->do_calib = ar5008_do_calib;
194 ops->next_calib = ar5008_next_calib;
195 ops->hw_init = ar5008_hw_init;
196
197 ops->set_noise_immunity_level = ar5008_set_noise_immunity_level;
198 ops->enable_ofdm_weak_signal = ar5008_enable_ofdm_weak_signal;
199 ops->disable_ofdm_weak_signal = ar5008_disable_ofdm_weak_signal;
200 ops->set_cck_weak_signal = ar5008_set_cck_weak_signal;
201 ops->set_firstep_level = ar5008_set_firstep_level;
202 ops->set_spur_immunity_level = ar5008_set_spur_immunity_level;
203
204 /* Set MAC registers offsets. */
205 sc->sc_obs_off = AR_OBS;
206 sc->sc_gpio_input_en_off = AR_GPIO_INPUT_EN_VAL;
207
208 if (!(sc->sc_flags & ATHN_FLAG_PCIE))
209 athn_config_nonpcie(sc);
210 else
211 athn_config_pcie(sc);
212
213 /* Read entire ROM content in memory. */
214 if ((error = ar5008_read_rom(sc)) != 0) {
215 aprint_error_dev(sc->sc_dev, "could not read ROM\n");
216 return error;
217 }
218
219 /* Get RF revision. */
220 sc->sc_rf_rev = ar5416_get_rf_rev(sc);
221
222 base = sc->sc_eep;
223 eep_ver = (base->version >> 12) & 0xf;
224 sc->sc_eep_rev = (base->version & 0xfff);
225 if (eep_ver != AR_EEP_VER || sc->sc_eep_rev == 0) {
226 aprint_error_dev(sc->sc_dev, "unsupported ROM version %d.%d\n",
227 eep_ver, sc->sc_eep_rev);
228 return EINVAL;
229 }
230
231 if (base->opCapFlags & AR_OPFLAGS_11A)
232 sc->sc_flags |= ATHN_FLAG_11A;
233 if (base->opCapFlags & AR_OPFLAGS_11G)
234 sc->sc_flags |= ATHN_FLAG_11G;
235 if (base->opCapFlags & AR_OPFLAGS_11N)
236 sc->sc_flags |= ATHN_FLAG_11N;
237
238 IEEE80211_ADDR_COPY(ic->ic_myaddr, base->macAddr);
239
240 /* Check if we have a hardware radio switch. */
241 if (base->rfSilent & AR_EEP_RFSILENT_ENABLED) {
242 sc->sc_flags |= ATHN_FLAG_RFSILENT;
243 /* Get GPIO pin used by hardware radio switch. */
244 sc->sc_rfsilent_pin = MS(base->rfSilent,
245 AR_EEP_RFSILENT_GPIO_SEL);
246 /* Get polarity of hardware radio switch. */
247 if (base->rfSilent & AR_EEP_RFSILENT_POLARITY)
248 sc->sc_flags |= ATHN_FLAG_RFSILENT_REVERSED;
249 }
250
251 /* Get the number of HW key cache entries. */
252 kc_entries_log = MS(base->deviceCap, AR_EEP_DEVCAP_KC_ENTRIES);
253 sc->sc_kc_entries = kc_entries_log != 0 ?
254 1 << kc_entries_log : AR_KEYTABLE_SIZE;
255
256 sc->sc_txchainmask = base->txMask;
257 if (sc->sc_mac_ver == AR_SREV_VERSION_5416_PCI &&
258 !(base->opCapFlags & AR_OPFLAGS_11A)) {
259 /* For single-band AR5416 PCI, use GPIO pin 0. */
260 sc->sc_rxchainmask = ar5008_gpio_read(sc, 0) ? 0x5 : 0x7;
261 }
262 else
263 sc->sc_rxchainmask = base->rxMask;
264
265 ops->setup(sc);
266 return 0;
267 }
268
269 /*
270 * Read 16-bit word from ROM.
271 */
272 Static int
273 ar5008_read_eep_word(struct athn_softc *sc, uint32_t addr, uint16_t *val)
274 {
275 uint32_t reg;
276 int ntries;
277
278 reg = AR_READ(sc, AR_EEPROM_OFFSET(addr));
279 for (ntries = 0; ntries < 1000; ntries++) {
280 reg = AR_READ(sc, AR_EEPROM_STATUS_DATA);
281 if (!(reg & (AR_EEPROM_STATUS_DATA_BUSY |
282 AR_EEPROM_STATUS_DATA_PROT_ACCESS))) {
283 *val = MS(reg, AR_EEPROM_STATUS_DATA_VAL);
284 return 0;
285 }
286 DELAY(10);
287 }
288 *val = 0xffff;
289 return ETIMEDOUT;
290 }
291
292 Static int
293 ar5008_read_rom(struct athn_softc *sc)
294 {
295 uint32_t addr, end;
296 uint16_t magic, sum, *eep;
297 int need_swap = 0;
298 int error;
299
300 /* Determine ROM endianness. */
301 error = ar5008_read_eep_word(sc, AR_EEPROM_MAGIC_OFFSET, &magic);
302 if (error != 0)
303 return error;
304 if (magic != AR_EEPROM_MAGIC) {
305 if (magic != bswap16(AR_EEPROM_MAGIC)) {
306 DPRINTFN(DBG_INIT, sc,
307 "invalid ROM magic 0x%x != 0x%x\n",
308 magic, AR_EEPROM_MAGIC);
309 return EIO;
310 }
311 DPRINTFN(DBG_INIT, sc, "non-native ROM endianness\n");
312 need_swap = 1;
313 }
314
315 /* Allocate space to store ROM in host memory. */
316 sc->sc_eep = malloc(sc->sc_eep_size, M_DEVBUF, M_NOWAIT);
317 if (sc->sc_eep == NULL)
318 return ENOMEM;
319
320 /* Read entire ROM and compute checksum. */
321 sum = 0;
322 eep = sc->sc_eep;
323 end = sc->sc_eep_base + sc->sc_eep_size / sizeof(uint16_t);
324 for (addr = sc->sc_eep_base; addr < end; addr++, eep++) {
325 if ((error = ar5008_read_eep_word(sc, addr, eep)) != 0) {
326 DPRINTFN(DBG_INIT, sc,
327 "could not read ROM at 0x%x\n", addr);
328 return error;
329 }
330 if (need_swap)
331 *eep = bswap16(*eep);
332 sum ^= *eep;
333 }
334 if (sum != 0xffff) {
335 aprint_error_dev(sc->sc_dev, "bad ROM checksum 0x%04x\n", sum);
336 return EIO;
337 }
338 if (need_swap)
339 ar5008_swap_rom(sc);
340
341 return 0;
342 }
343
344 Static void
345 ar5008_swap_rom(struct athn_softc *sc)
346 {
347 struct ar_base_eep_header *base = sc->sc_eep;
348
349 /* Swap common fields first. */
350 base->length = bswap16(base->length);
351 base->version = bswap16(base->version);
352 base->regDmn[0] = bswap16(base->regDmn[0]);
353 base->regDmn[1] = bswap16(base->regDmn[1]);
354 base->rfSilent = bswap16(base->rfSilent);
355 base->blueToothOptions = bswap16(base->blueToothOptions);
356 base->deviceCap = bswap16(base->deviceCap);
357
358 /* Swap device-dependent fields. */
359 sc->sc_ops.swap_rom(sc);
360 }
361
362 /*
363 * Access to General Purpose Input/Output ports.
364 */
365 Static int
366 ar5008_gpio_read(struct athn_softc *sc, int pin)
367 {
368
369 KASSERT(pin < sc->sc_ngpiopins);
370 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc))
371 return !((AR_READ(sc, AR7010_GPIO_IN) >> pin) & 1);
372 return (AR_READ(sc, AR_GPIO_IN_OUT) >> (sc->sc_ngpiopins + pin)) & 1;
373 }
374
375 Static void
376 ar5008_gpio_write(struct athn_softc *sc, int pin, int set)
377 {
378 uint32_t reg;
379
380 KASSERT(pin < sc->sc_ngpiopins);
381
382 if (sc->sc_flags & ATHN_FLAG_USB)
383 set = !set; /* AR9271/AR7010 is reversed. */
384
385 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
386 /* Special case for AR7010. */
387 reg = AR_READ(sc, AR7010_GPIO_OUT);
388 if (set)
389 reg |= 1 << pin;
390 else
391 reg &= ~(1 << pin);
392 AR_WRITE(sc, AR7010_GPIO_OUT, reg);
393 }
394 else {
395 reg = AR_READ(sc, AR_GPIO_IN_OUT);
396 if (set)
397 reg |= 1 << pin;
398 else
399 reg &= ~(1 << pin);
400 AR_WRITE(sc, AR_GPIO_IN_OUT, reg);
401 }
402 AR_WRITE_BARRIER(sc);
403 }
404
405 Static void
406 ar5008_gpio_config_input(struct athn_softc *sc, int pin)
407 {
408 uint32_t reg;
409
410 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
411 /* Special case for AR7010. */
412 AR_SETBITS(sc, AR7010_GPIO_OE, 1 << pin);
413 }
414 else {
415 reg = AR_READ(sc, AR_GPIO_OE_OUT);
416 reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2));
417 reg |= AR_GPIO_OE_OUT_DRV_NO << (pin * 2);
418 AR_WRITE(sc, AR_GPIO_OE_OUT, reg);
419 }
420 AR_WRITE_BARRIER(sc);
421 }
422
423 Static void
424 ar5008_gpio_config_output(struct athn_softc *sc, int pin, int type)
425 {
426 uint32_t reg;
427 int mux, off;
428
429 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
430 /* Special case for AR7010. */
431 AR_CLRBITS(sc, AR7010_GPIO_OE, 1 << pin);
432 AR_WRITE_BARRIER(sc);
433 return;
434 }
435 mux = pin / 6;
436 off = pin % 6;
437
438 reg = AR_READ(sc, AR_GPIO_OUTPUT_MUX(mux));
439 if (!AR_SREV_9280_20_OR_LATER(sc) && mux == 0)
440 reg = (reg & ~0x1f0) | (reg & 0x1f0) << 1;
441 reg &= ~(0x1f << (off * 5));
442 reg |= (type & 0x1f) << (off * 5);
443 AR_WRITE(sc, AR_GPIO_OUTPUT_MUX(mux), reg);
444
445 reg = AR_READ(sc, AR_GPIO_OE_OUT);
446 reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2));
447 reg |= AR_GPIO_OE_OUT_DRV_ALL << (pin * 2);
448 AR_WRITE(sc, AR_GPIO_OE_OUT, reg);
449 AR_WRITE_BARRIER(sc);
450 }
451
452 Static void
453 ar5008_rfsilent_init(struct athn_softc *sc)
454 {
455 uint32_t reg;
456
457 /* Configure hardware radio switch. */
458 AR_SETBITS(sc, AR_GPIO_INPUT_EN_VAL, AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
459 reg = AR_READ(sc, AR_GPIO_INPUT_MUX2);
460 reg = RW(reg, AR_GPIO_INPUT_MUX2_RFSILENT, 0);
461 AR_WRITE(sc, AR_GPIO_INPUT_MUX2, reg);
462 ar5008_gpio_config_input(sc, sc->sc_rfsilent_pin);
463 AR_SETBITS(sc, AR_PHY_TEST, AR_PHY_TEST_RFSILENT_BB);
464 if (!(sc->sc_flags & ATHN_FLAG_RFSILENT_REVERSED)) {
465 AR_SETBITS(sc, AR_GPIO_INTR_POL,
466 AR_GPIO_INTR_POL_PIN(sc->sc_rfsilent_pin));
467 }
468 AR_WRITE_BARRIER(sc);
469 }
470
471 Static int
472 ar5008_dma_alloc(struct athn_softc *sc)
473 {
474 int error;
475
476 error = ar5008_tx_alloc(sc);
477 if (error != 0)
478 return error;
479
480 error = ar5008_rx_alloc(sc);
481 if (error != 0)
482 return error;
483
484 return 0;
485 }
486
487 Static void
488 ar5008_dma_free(struct athn_softc *sc)
489 {
490
491 ar5008_tx_free(sc);
492 ar5008_rx_free(sc);
493 }
494
495 Static int
496 ar5008_tx_alloc(struct athn_softc *sc)
497 {
498 struct athn_tx_buf *bf;
499 bus_size_t size;
500 int error, nsegs, i;
501
502 /*
503 * Allocate a pool of Tx descriptors shared between all Tx queues.
504 */
505 size = ATHN_NTXBUFS * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc);
506
507 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,
508 BUS_DMA_NOWAIT, &sc->sc_map);
509 if (error != 0)
510 goto fail;
511
512 error = bus_dmamem_alloc(sc->sc_dmat, size, 4, 0, &sc->sc_seg, 1,
513 // XXX &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO);
514 &nsegs, BUS_DMA_NOWAIT);
515 if (error != 0)
516 goto fail;
517
518 error = bus_dmamem_map(sc->sc_dmat, &sc->sc_seg, 1, size,
519 (void **)&sc->sc_descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT);
520 if (error != 0)
521 goto fail;
522
523 error = bus_dmamap_load(sc->sc_dmat, sc->sc_map, sc->sc_descs,
524 size, NULL, BUS_DMA_NOWAIT);
525 if (error != 0)
526 goto fail;
527
528 SIMPLEQ_INIT(&sc->sc_txbufs);
529 for (i = 0; i < ATHN_NTXBUFS; i++) {
530 bf = &sc->sc_txpool[i];
531
532 error = bus_dmamap_create(sc->sc_dmat, ATHN_TXBUFSZ,
533 AR5008_MAX_SCATTER, ATHN_TXBUFSZ, 0, BUS_DMA_NOWAIT,
534 &bf->bf_map);
535 if (error != 0) {
536 aprint_error_dev(sc->sc_dev,
537 "could not create Tx buf DMA map\n");
538 goto fail;
539 }
540
541 bf->bf_descs =
542 &((struct ar_tx_desc *)sc->sc_descs)[i * AR5008_MAX_SCATTER];
543 bf->bf_daddr = sc->sc_map->dm_segs[0].ds_addr +
544 i * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc);
545
546 SIMPLEQ_INSERT_TAIL(&sc->sc_txbufs, bf, bf_list);
547 }
548 return 0;
549 fail:
550 ar5008_tx_free(sc);
551 return error;
552 }
553
554 Static void
555 ar5008_tx_free(struct athn_softc *sc)
556 {
557 struct athn_tx_buf *bf;
558 int i;
559
560 for (i = 0; i < ATHN_NTXBUFS; i++) {
561 bf = &sc->sc_txpool[i];
562
563 if (bf->bf_map != NULL)
564 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map);
565 }
566 /* Free Tx descriptors. */
567 if (sc->sc_map != NULL) {
568 if (sc->sc_descs != NULL) {
569 bus_dmamap_unload(sc->sc_dmat, sc->sc_map);
570 bus_dmamem_unmap(sc->sc_dmat, (void *)sc->sc_descs,
571 ATHN_NTXBUFS * AR5008_MAX_SCATTER *
572 sizeof(struct ar_tx_desc));
573 bus_dmamem_free(sc->sc_dmat, &sc->sc_seg, 1);
574 }
575 bus_dmamap_destroy(sc->sc_dmat, sc->sc_map);
576 }
577 }
578
579 Static int
580 ar5008_rx_alloc(struct athn_softc *sc)
581 {
582 struct athn_rxq *rxq = &sc->sc_rxq[0];
583 struct athn_rx_buf *bf;
584 struct ar_rx_desc *ds;
585 bus_size_t size;
586 int error, nsegs, i;
587
588 rxq->bf = malloc(ATHN_NRXBUFS * sizeof(*bf), M_DEVBUF,
589 M_NOWAIT | M_ZERO);
590 if (rxq->bf == NULL)
591 return ENOMEM;
592
593 size = ATHN_NRXBUFS * sizeof(struct ar_rx_desc);
594
595 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,
596 BUS_DMA_NOWAIT, &rxq->map);
597 if (error != 0)
598 goto fail;
599
600 error = bus_dmamem_alloc(sc->sc_dmat, size, 0, 0, &rxq->seg, 1,
601 // &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO);
602 &nsegs, BUS_DMA_NOWAIT);
603 if (error != 0)
604 goto fail;
605
606 error = bus_dmamem_map(sc->sc_dmat, &rxq->seg, 1, size,
607 (void **)&rxq->descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT);
608 if (error != 0)
609 goto fail;
610
611 error = bus_dmamap_load(sc->sc_dmat, rxq->map, rxq->descs,
612 size, NULL, BUS_DMA_NOWAIT);
613 if (error != 0)
614 goto fail;
615
616 for (i = 0; i < ATHN_NRXBUFS; i++) {
617 bf = &rxq->bf[i];
618 ds = &((struct ar_rx_desc *)rxq->descs)[i];
619
620 error = bus_dmamap_create(sc->sc_dmat, ATHN_RXBUFSZ, 1,
621 ATHN_RXBUFSZ, 0, BUS_DMA_NOWAIT | BUS_DMA_ALLOCNOW,
622 &bf->bf_map);
623 if (error != 0) {
624 aprint_error_dev(sc->sc_dev,
625 " could not create Rx buf DMA map\n");
626 goto fail;
627 }
628 /*
629 * Assumes MCLGETI returns cache-line-size aligned buffers.
630 * XXX: does ours?
631 */
632 bf->bf_m = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
633 if (bf->bf_m == NULL) {
634 aprint_error_dev(sc->sc_dev,
635 "could not allocate Rx mbuf\n");
636 error = ENOBUFS;
637 goto fail;
638 }
639
640 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,
641 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,
642 BUS_DMA_NOWAIT | BUS_DMA_READ);
643 if (error != 0) {
644 aprint_error_dev(sc->sc_dev,
645 "could not DMA map Rx buffer\n");
646 goto fail;
647 }
648
649 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
650 BUS_DMASYNC_PREREAD);
651
652 bf->bf_desc = ds;
653 bf->bf_daddr = rxq->map->dm_segs[0].ds_addr +
654 i * sizeof(struct ar_rx_desc);
655 }
656 return 0;
657 fail:
658 ar5008_rx_free(sc);
659 return error;
660 }
661
662 Static void
663 ar5008_rx_free(struct athn_softc *sc)
664 {
665 struct athn_rxq *rxq = &sc->sc_rxq[0];
666 struct athn_rx_buf *bf;
667 int i;
668
669 if (rxq->bf == NULL)
670 return;
671 for (i = 0; i < ATHN_NRXBUFS; i++) {
672 bf = &rxq->bf[i];
673
674 if (bf->bf_map != NULL)
675 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map);
676 if (bf->bf_m != NULL)
677 m_freem(bf->bf_m);
678 }
679 free(rxq->bf, M_DEVBUF);
680
681 /* Free Rx descriptors. */
682 if (rxq->map != NULL) {
683 if (rxq->descs != NULL) {
684 bus_dmamap_unload(sc->sc_dmat, rxq->map);
685 bus_dmamem_unmap(sc->sc_dmat, (void *)rxq->descs,
686 ATHN_NRXBUFS * sizeof(struct ar_rx_desc));
687 bus_dmamem_free(sc->sc_dmat, &rxq->seg, 1);
688 }
689 bus_dmamap_destroy(sc->sc_dmat, rxq->map);
690 }
691 }
692
693 Static void
694 ar5008_rx_enable(struct athn_softc *sc)
695 {
696 struct athn_rxq *rxq = &sc->sc_rxq[0];
697 struct athn_rx_buf *bf;
698 struct ar_rx_desc *ds;
699 int i;
700
701 /* Setup and link Rx descriptors. */
702 SIMPLEQ_INIT(&rxq->head);
703 rxq->lastds = NULL;
704 for (i = 0; i < ATHN_NRXBUFS; i++) {
705 bf = &rxq->bf[i];
706 ds = bf->bf_desc;
707
708 memset(ds, 0, sizeof(*ds));
709 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
710 ds->ds_ctl1 = SM(AR_RXC1_BUF_LEN, ATHN_RXBUFSZ);
711
712 if (rxq->lastds != NULL) {
713 ((struct ar_rx_desc *)rxq->lastds)->ds_link =
714 bf->bf_daddr;
715 }
716 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list);
717 rxq->lastds = ds;
718 }
719 bus_dmamap_sync(sc->sc_dmat, rxq->map, 0, rxq->map->dm_mapsize,
720 BUS_DMASYNC_PREREAD);
721
722 /* Enable Rx. */
723 AR_WRITE(sc, AR_RXDP, SIMPLEQ_FIRST(&rxq->head)->bf_daddr);
724 AR_WRITE(sc, AR_CR, AR_CR_RXE);
725 AR_WRITE_BARRIER(sc);
726 }
727
728 Static void
729 ar5008_rx_radiotap(struct athn_softc *sc, struct mbuf *m,
730 struct ar_rx_desc *ds)
731 {
732 struct athn_rx_radiotap_header *tap = &sc->sc_rxtap;
733 struct ieee80211com *ic = &sc->sc_ic;
734 uint64_t tsf;
735 uint32_t tstamp;
736 uint8_t rate;
737
738 /* Extend the 15-bit timestamp from Rx descriptor to 64-bit TSF. */
739 tstamp = ds->ds_status2;
740 tsf = AR_READ(sc, AR_TSF_U32);
741 tsf = tsf << 32 | AR_READ(sc, AR_TSF_L32);
742 if ((tsf & 0x7fff) < tstamp)
743 tsf -= 0x8000;
744 tsf = (tsf & ~0x7fff) | tstamp;
745
746 tap->wr_flags = IEEE80211_RADIOTAP_F_FCS;
747 tap->wr_tsft = htole64(tsf);
748 tap->wr_chan_freq = htole16(ic->ic_curchan->ic_freq);
749 tap->wr_chan_flags = htole16(ic->ic_curchan->ic_flags);
750 tap->wr_dbm_antsignal = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED);
751 /* XXX noise. */
752 tap->wr_antenna = MS(ds->ds_status3, AR_RXS3_ANTENNA);
753 tap->wr_rate = 0; /* In case it can't be found below. */
754 if (AR_SREV_5416_20_OR_LATER(sc))
755 rate = MS(ds->ds_status0, AR_RXS0_RATE);
756 else
757 rate = MS(ds->ds_status3, AR_RXS3_RATE);
758 if (rate & 0x80) { /* HT. */
759 /* Bit 7 set means HT MCS instead of rate. */
760 tap->wr_rate = rate;
761 if (!(ds->ds_status3 & AR_RXS3_GI))
762 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTGI;
763
764 }
765 else if (rate & 0x10) { /* CCK. */
766 if (rate & 0x04)
767 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTPRE;
768 switch (rate & ~0x14) {
769 case 0xb: tap->wr_rate = 2; break;
770 case 0xa: tap->wr_rate = 4; break;
771 case 0x9: tap->wr_rate = 11; break;
772 case 0x8: tap->wr_rate = 22; break;
773 }
774 }
775 else { /* OFDM. */
776 switch (rate) {
777 case 0xb: tap->wr_rate = 12; break;
778 case 0xf: tap->wr_rate = 18; break;
779 case 0xa: tap->wr_rate = 24; break;
780 case 0xe: tap->wr_rate = 36; break;
781 case 0x9: tap->wr_rate = 48; break;
782 case 0xd: tap->wr_rate = 72; break;
783 case 0x8: tap->wr_rate = 96; break;
784 case 0xc: tap->wr_rate = 108; break;
785 }
786 }
787 bpf_mtap2(sc->sc_drvbpf, tap, sc->sc_rxtap_len, m);
788 }
789
790 static __inline int
791 ar5008_rx_process(struct athn_softc *sc)
792 {
793 struct ieee80211com *ic = &sc->sc_ic;
794 struct ifnet *ifp = &sc->sc_if;
795 struct athn_rxq *rxq = &sc->sc_rxq[0];
796 struct athn_rx_buf *bf, *nbf;
797 struct ar_rx_desc *ds;
798 struct ieee80211_frame *wh;
799 struct ieee80211_node *ni;
800 struct mbuf *m, *m1;
801 u_int32_t rstamp;
802 int error, len, rssi, s;
803
804 bf = SIMPLEQ_FIRST(&rxq->head);
805 if (__predict_false(bf == NULL)) { /* Should not happen. */
806 aprint_error_dev(sc->sc_dev, "Rx queue is empty!\n");
807 return ENOENT;
808 }
809 ds = bf->bf_desc;
810
811 if (!(ds->ds_status8 & AR_RXS8_DONE)) {
812 /*
813 * On some parts, the status words can get corrupted
814 * (including the "done" bit), so we check the next
815 * descriptor "done" bit. If it is set, it is a good
816 * indication that the status words are corrupted, so
817 * we skip this descriptor and drop the frame.
818 */
819 nbf = SIMPLEQ_NEXT(bf, bf_list);
820 if (nbf != NULL &&
821 (((struct ar_rx_desc *)nbf->bf_desc)->ds_status8 &
822 AR_RXS8_DONE)) {
823 DPRINTFN(DBG_RX, sc,
824 "corrupted descriptor status=0x%x\n",
825 ds->ds_status8);
826 /* HW will not "move" RXDP in this case, so do it. */
827 AR_WRITE(sc, AR_RXDP, nbf->bf_daddr);
828 AR_WRITE_BARRIER(sc);
829 ifp->if_ierrors++;
830 goto skip;
831 }
832 return EBUSY;
833 }
834
835 if (__predict_false(ds->ds_status1 & AR_RXS1_MORE)) {
836 /* Drop frames that span multiple Rx descriptors. */
837 DPRINTFN(DBG_RX, sc, "dropping split frame\n");
838 ifp->if_ierrors++;
839 goto skip;
840 }
841 if (!(ds->ds_status8 & AR_RXS8_FRAME_OK)) {
842 if (ds->ds_status8 & AR_RXS8_CRC_ERR)
843 DPRINTFN(DBG_RX, sc, "CRC error\n");
844 else if (ds->ds_status8 & AR_RXS8_PHY_ERR)
845 DPRINTFN(DBG_RX, sc, "PHY error=0x%x\n",
846 MS(ds->ds_status8, AR_RXS8_PHY_ERR_CODE));
847 else if (ds->ds_status8 & AR_RXS8_DECRYPT_CRC_ERR)
848 DPRINTFN(DBG_RX, sc, "Decryption CRC error\n");
849 else if (ds->ds_status8 & AR_RXS8_MICHAEL_ERR) {
850 DPRINTFN(DBG_RX, sc, "Michael MIC failure\n");
851
852 len = MS(ds->ds_status1, AR_RXS1_DATA_LEN);
853 m = bf->bf_m;
854 m_set_rcvif(m, ifp);
855 m->m_pkthdr.len = m->m_len = len;
856 wh = mtod(m, struct ieee80211_frame *);
857
858 /* Report Michael MIC failures to net80211. */
859 ieee80211_notify_michael_failure(ic, wh, 0 /* XXX: keyix */);
860 }
861 ifp->if_ierrors++;
862 goto skip;
863 }
864
865 len = MS(ds->ds_status1, AR_RXS1_DATA_LEN);
866 if (__predict_false(len < (int)IEEE80211_MIN_LEN || len > ATHN_RXBUFSZ)) {
867 DPRINTFN(DBG_RX, sc, "corrupted descriptor length=%d\n", len);
868 ifp->if_ierrors++;
869 goto skip;
870 }
871
872 /* Allocate a new Rx buffer. */
873 m1 = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
874 if (__predict_false(m1 == NULL)) {
875 ic->ic_stats.is_rx_nobuf++;
876 ifp->if_ierrors++;
877 goto skip;
878 }
879
880 /* Sync and unmap the old Rx buffer. */
881 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
882 BUS_DMASYNC_POSTREAD);
883 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
884
885 /* Map the new Rx buffer. */
886 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map, mtod(m1, void *),
887 ATHN_RXBUFSZ, NULL, BUS_DMA_NOWAIT | BUS_DMA_READ);
888 if (__predict_false(error != 0)) {
889 m_freem(m1);
890
891 /* Remap the old Rx buffer or panic. */
892 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,
893 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,
894 BUS_DMA_NOWAIT | BUS_DMA_READ);
895 KASSERT(error != 0);
896 ifp->if_ierrors++;
897 goto skip;
898 }
899
900 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
901 BUS_DMASYNC_PREREAD);
902
903 /* Write physical address of new Rx buffer. */
904 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
905
906 m = bf->bf_m;
907 bf->bf_m = m1;
908
909 /* Finalize mbuf. */
910 m_set_rcvif(m, ifp);
911 m->m_pkthdr.len = m->m_len = len;
912
913 s = splnet();
914
915 /* Grab a reference to the source node. */
916 wh = mtod(m, struct ieee80211_frame *);
917 ni = ieee80211_find_rxnode(ic, (struct ieee80211_frame_min *)wh);
918
919 /* Remove any HW padding after the 802.11 header. */
920 if (!(wh->i_fc[0] & IEEE80211_FC0_TYPE_CTL)) {
921 u_int hdrlen = ieee80211_anyhdrsize(wh);
922 if (hdrlen & 3) {
923 ovbcopy(wh, (uint8_t *)wh + 2, hdrlen);
924 m_adj(m, 2);
925 }
926 }
927 if (__predict_false(sc->sc_drvbpf != NULL))
928 ar5008_rx_radiotap(sc, m, ds);
929
930 /* Trim 802.11 FCS after radiotap. */
931 m_adj(m, -IEEE80211_CRC_LEN);
932
933 /* Send the frame to the 802.11 layer. */
934 rssi = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED);
935 rstamp = ds->ds_status2;
936 ieee80211_input(ic, m, ni, rssi, rstamp);
937
938 /* Node is no longer needed. */
939 ieee80211_free_node(ni);
940
941 splx(s);
942
943 skip:
944 /* Unlink this descriptor from head. */
945 SIMPLEQ_REMOVE_HEAD(&rxq->head, bf_list);
946 memset(&ds->ds_status0, 0, 36); /* XXX Really needed? */
947 ds->ds_status8 &= ~AR_RXS8_DONE;
948 ds->ds_link = 0;
949
950 /* Re-use this descriptor and link it to tail. */
951 if (__predict_true(!SIMPLEQ_EMPTY(&rxq->head)))
952 ((struct ar_rx_desc *)rxq->lastds)->ds_link = bf->bf_daddr;
953 else
954 AR_WRITE(sc, AR_RXDP, bf->bf_daddr);
955 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list);
956 rxq->lastds = ds;
957
958 /* Re-enable Rx. */
959 AR_WRITE(sc, AR_CR, AR_CR_RXE);
960 AR_WRITE_BARRIER(sc);
961 return 0;
962 }
963
964 Static void
965 ar5008_rx_intr(struct athn_softc *sc)
966 {
967
968 while (ar5008_rx_process(sc) == 0)
969 continue;
970 }
971
972 Static int
973 ar5008_tx_process(struct athn_softc *sc, int qid)
974 {
975 struct ifnet *ifp = &sc->sc_if;
976 struct athn_txq *txq = &sc->sc_txq[qid];
977 struct athn_node *an;
978 struct athn_tx_buf *bf;
979 struct ar_tx_desc *ds;
980 uint8_t failcnt;
981
982 bf = SIMPLEQ_FIRST(&txq->head);
983 if (bf == NULL)
984 return ENOENT;
985 /* Get descriptor of last DMA segment. */
986 ds = &((struct ar_tx_desc *)bf->bf_descs)[bf->bf_map->dm_nsegs - 1];
987
988 if (!(ds->ds_status9 & AR_TXS9_DONE))
989 return EBUSY;
990
991 SIMPLEQ_REMOVE_HEAD(&txq->head, bf_list);
992 ifp->if_opackets++;
993
994 sc->sc_tx_timer = 0;
995
996 if (ds->ds_status1 & AR_TXS1_EXCESSIVE_RETRIES)
997 ifp->if_oerrors++;
998
999 if (ds->ds_status1 & AR_TXS1_UNDERRUN)
1000 athn_inc_tx_trigger_level(sc);
1001
1002 an = (struct athn_node *)bf->bf_ni;
1003 /*
1004 * NB: the data fail count contains the number of un-acked tries
1005 * for the final series used. We must add the number of tries for
1006 * each series that was fully processed.
1007 */
1008 failcnt = MS(ds->ds_status1, AR_TXS1_DATA_FAIL_CNT);
1009 /* NB: Assume two tries per series. */
1010 failcnt += MS(ds->ds_status9, AR_TXS9_FINAL_IDX) * 2;
1011
1012 /* Update rate control statistics. */
1013 an->amn.amn_txcnt++;
1014 if (failcnt > 0)
1015 an->amn.amn_retrycnt++;
1016
1017 DPRINTFN(DBG_TX, sc, "Tx done qid=%d status1=%d fail count=%d\n",
1018 qid, ds->ds_status1, failcnt);
1019
1020 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1021 BUS_DMASYNC_POSTWRITE);
1022 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
1023
1024 m_freem(bf->bf_m);
1025 bf->bf_m = NULL;
1026 ieee80211_free_node(bf->bf_ni);
1027 bf->bf_ni = NULL;
1028
1029 /* Link Tx buffer back to global free list. */
1030 SIMPLEQ_INSERT_TAIL(&sc->sc_txbufs, bf, bf_list);
1031 return 0;
1032 }
1033
1034 Static void
1035 ar5008_tx_intr(struct athn_softc *sc)
1036 {
1037 struct ifnet *ifp = &sc->sc_if;
1038 uint16_t mask = 0;
1039 uint32_t reg;
1040 int qid, s;
1041
1042 s = splnet();
1043
1044 reg = AR_READ(sc, AR_ISR_S0_S);
1045 mask |= MS(reg, AR_ISR_S0_QCU_TXOK);
1046 mask |= MS(reg, AR_ISR_S0_QCU_TXDESC);
1047
1048 reg = AR_READ(sc, AR_ISR_S1_S);
1049 mask |= MS(reg, AR_ISR_S1_QCU_TXERR);
1050 mask |= MS(reg, AR_ISR_S1_QCU_TXEOL);
1051
1052 DPRINTFN(DBG_TX, sc, "Tx interrupt mask=0x%x\n", mask);
1053 for (qid = 0; mask != 0; mask >>= 1, qid++) {
1054 if (mask & 1)
1055 while (ar5008_tx_process(sc, qid) == 0);
1056 }
1057 if (!SIMPLEQ_EMPTY(&sc->sc_txbufs)) {
1058 ifp->if_flags &= ~IFF_OACTIVE;
1059 ifp->if_start(ifp);
1060 }
1061
1062 splx(s);
1063 }
1064
1065 #ifndef IEEE80211_STA_ONLY
1066 /*
1067 * Process Software Beacon Alert interrupts.
1068 */
1069 Static int
1070 ar5008_swba_intr(struct athn_softc *sc)
1071 {
1072 struct ieee80211com *ic = &sc->sc_ic;
1073 struct ifnet *ifp = &sc->sc_if;
1074 struct ieee80211_node *ni = ic->ic_bss;
1075 struct athn_tx_buf *bf = sc->sc_bcnbuf;
1076 struct ieee80211_frame *wh;
1077 struct ieee80211_beacon_offsets bo;
1078 struct ar_tx_desc *ds;
1079 struct mbuf *m;
1080 uint8_t ridx, hwrate;
1081 int error, totlen;
1082
1083 #if notyet
1084 if (ic->ic_tim_mcast_pending &&
1085 IF_IS_EMPTY(&ni->ni_savedq) &&
1086 SIMPLEQ_EMPTY(&sc->sc_txq[ATHN_QID_CAB].head))
1087 ic->ic_tim_mcast_pending = 0;
1088 #endif
1089 if (ic->ic_dtim_count == 0)
1090 ic->ic_dtim_count = ic->ic_dtim_period - 1;
1091 else
1092 ic->ic_dtim_count--;
1093
1094 /* Make sure previous beacon has been sent. */
1095 if (athn_tx_pending(sc, ATHN_QID_BEACON)) {
1096 DPRINTFN(DBG_INTR, sc, "beacon stuck\n");
1097 return EBUSY;
1098 }
1099 /* Get new beacon. */
1100 m = ieee80211_beacon_alloc(ic, ic->ic_bss, &bo);
1101 if (__predict_false(m == NULL))
1102 return ENOBUFS;
1103 /* Assign sequence number. */
1104 /* XXX: use non-QoS tid? */
1105 wh = mtod(m, struct ieee80211_frame *);
1106 *(uint16_t *)&wh->i_seq[0] =
1107 htole16(ic->ic_bss->ni_txseqs[0] << IEEE80211_SEQ_SEQ_SHIFT);
1108 ic->ic_bss->ni_txseqs[0]++;
1109
1110 /* Unmap and free old beacon if any. */
1111 if (__predict_true(bf->bf_m != NULL)) {
1112 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0,
1113 bf->bf_map->dm_mapsize, BUS_DMASYNC_POSTWRITE);
1114 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
1115 m_freem(bf->bf_m);
1116 bf->bf_m = NULL;
1117 }
1118 /* DMA map new beacon. */
1119 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1120 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1121 if (__predict_false(error != 0)) {
1122 m_freem(m);
1123 return error;
1124 }
1125 bf->bf_m = m;
1126
1127 /* Setup Tx descriptor (simplified ar5008_tx()). */
1128 ds = bf->bf_descs;
1129 memset(ds, 0, sizeof(*ds));
1130
1131 totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN;
1132 ds->ds_ctl0 = SM(AR_TXC0_FRAME_LEN, totlen);
1133 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, AR_MAX_RATE_POWER);
1134 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, AR_FRAME_TYPE_BEACON);
1135 ds->ds_ctl1 |= AR_TXC1_NO_ACK;
1136 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, AR_ENCR_TYPE_CLEAR);
1137
1138 /* Write number of tries. */
1139 ds->ds_ctl2 = SM(AR_TXC2_XMIT_DATA_TRIES0, 1);
1140
1141 /* Write Tx rate. */
1142 ridx = (ic->ic_curmode == IEEE80211_MODE_11A) ?
1143 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1;
1144 hwrate = athn_rates[ridx].hwrate;
1145 ds->ds_ctl3 = SM(AR_TXC3_XMIT_RATE0, hwrate);
1146
1147 /* Write Tx chains. */
1148 ds->ds_ctl7 = SM(AR_TXC7_CHAIN_SEL0, sc->sc_txchainmask);
1149
1150 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
1151 /* Segment length must be a multiple of 4. */
1152 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,
1153 (bf->bf_map->dm_segs[0].ds_len + 3) & ~3);
1154
1155 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1156 BUS_DMASYNC_PREWRITE);
1157
1158 /* Stop Tx DMA before putting the new beacon on the queue. */
1159 athn_stop_tx_dma(sc, ATHN_QID_BEACON);
1160
1161 AR_WRITE(sc, AR_QTXDP(ATHN_QID_BEACON), bf->bf_daddr);
1162
1163 for(;;) {
1164 if (SIMPLEQ_EMPTY(&sc->sc_txbufs))
1165 break;
1166
1167 IF_DEQUEUE(&ni->ni_savedq, m);
1168 if (m == NULL)
1169 break;
1170 if (!IF_IS_EMPTY(&ni->ni_savedq)) {
1171 /* more queued frames, set the more data bit */
1172 wh = mtod(m, struct ieee80211_frame *);
1173 wh->i_fc[1] |= IEEE80211_FC1_MORE_DATA;
1174 }
1175
1176 if (sc->sc_ops.tx(sc, m, ni, ATHN_TXFLAG_CAB) != 0) {
1177 ieee80211_free_node(ni);
1178 ifp->if_oerrors++;
1179 break;
1180 }
1181 }
1182
1183 /* Kick Tx. */
1184 AR_WRITE(sc, AR_Q_TXE, 1 << ATHN_QID_BEACON);
1185 AR_WRITE_BARRIER(sc);
1186 return 0;
1187 }
1188 #endif
1189
1190 static int
1191 ar5008_get_intr_status(struct athn_softc *sc, uint32_t *intrp, uint32_t *syncp)
1192 {
1193 uint32_t intr, sync;
1194
1195 /* Get pending interrupts. */
1196 intr = AR_READ(sc, AR_INTR_ASYNC_CAUSE);
1197 if (!(intr & AR_INTR_MAC_IRQ) || intr == AR_INTR_SPURIOUS) {
1198 intr = AR_READ(sc, AR_INTR_SYNC_CAUSE);
1199 if (intr == AR_INTR_SPURIOUS || (intr & sc->sc_isync) == 0)
1200 return 0; /* Not for us. */
1201 }
1202
1203 if ((AR_READ(sc, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) &&
1204 (AR_READ(sc, AR_RTC_STATUS) & AR_RTC_STATUS_M) == AR_RTC_STATUS_ON)
1205 intr = AR_READ(sc, AR_ISR);
1206 else
1207 intr = 0;
1208 sync = AR_READ(sc, AR_INTR_SYNC_CAUSE) & sc->sc_isync;
1209 if (intr == 0 && sync == 0)
1210 return 0; /* Not for us. */
1211
1212 *intrp = intr;
1213 *syncp = sync;
1214 return 1;
1215 }
1216
1217
1218 Static int
1219 ar5008_intr_status(struct athn_softc *sc)
1220 {
1221 uint32_t intr, sync;
1222
1223 return ar5008_get_intr_status(sc, &intr, &sync);
1224 }
1225
1226 Static int
1227 ar5008_intr(struct athn_softc *sc)
1228 {
1229 uint32_t intr, intr5, sync;
1230 #ifndef IEEE80211_STA_ONLY
1231 int s;
1232 #endif
1233
1234 if (!ar5008_get_intr_status(sc, &intr, &sync))
1235 return 0;
1236
1237 if (intr != 0) {
1238 if (intr & AR_ISR_BCNMISC) {
1239 uint32_t intr2 = AR_READ(sc, AR_ISR_S2);
1240 #if notyet
1241 if (intr2 & AR_ISR_S2_TIM)
1242 /* TBD */;
1243 if (intr2 & AR_ISR_S2_TSFOOR)
1244 /* TBD */;
1245 #else
1246 __USE(intr2);
1247 #endif
1248 }
1249 intr = AR_READ(sc, AR_ISR_RAC);
1250 if (intr == AR_INTR_SPURIOUS)
1251 return 1;
1252
1253 #ifndef IEEE80211_STA_ONLY
1254 if (intr & AR_ISR_SWBA) {
1255 s = splnet();
1256 ar5008_swba_intr(sc);
1257 splx(s);
1258 }
1259 #endif
1260 if (intr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
1261 ar5008_rx_intr(sc);
1262 if (intr & (AR_ISR_RXOK | AR_ISR_RXERR | AR_ISR_RXORN))
1263 ar5008_rx_intr(sc);
1264
1265 if (intr & (AR_ISR_TXOK | AR_ISR_TXDESC |
1266 AR_ISR_TXERR | AR_ISR_TXEOL))
1267 ar5008_tx_intr(sc);
1268
1269 intr5 = AR_READ(sc, AR_ISR_S5_S);
1270 if (intr & AR_ISR_GENTMR) {
1271 if (intr5 & AR_ISR_GENTMR) {
1272 DPRINTFN(DBG_INTR, sc,
1273 "GENTMR trigger=%d thresh=%d\n",
1274 MS(intr5, AR_ISR_S5_GENTIMER_TRIG),
1275 MS(intr5, AR_ISR_S5_GENTIMER_THRESH));
1276 }
1277 }
1278 #if notyet
1279 if (intr5 & AR_ISR_S5_TIM_TIMER) {
1280 /* TBD */;
1281 }
1282 #endif
1283 }
1284 if (sync != 0) {
1285 #if notyet
1286 if (sync &
1287 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR)) {
1288 /* TBD */;
1289 }
1290 #endif
1291 if (sync & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
1292 AR_WRITE(sc, AR_RC, AR_RC_HOSTIF);
1293 AR_WRITE(sc, AR_RC, 0);
1294 }
1295
1296 if ((sc->sc_flags & ATHN_FLAG_RFSILENT) &&
1297 (sync & AR_INTR_SYNC_GPIO_PIN(sc->sc_rfsilent_pin))) {
1298 AR_WRITE(sc, AR_INTR_SYNC_ENABLE, 0);
1299 (void)AR_READ(sc, AR_INTR_SYNC_ENABLE);
1300 pmf_event_inject(sc->sc_dev, PMFE_RADIO_OFF);
1301 }
1302
1303 AR_WRITE(sc, AR_INTR_SYNC_CAUSE, sync);
1304 (void)AR_READ(sc, AR_INTR_SYNC_CAUSE);
1305 }
1306 return 1;
1307 }
1308
1309 Static int
1310 ar5008_tx(struct athn_softc *sc, struct mbuf *m, struct ieee80211_node *ni,
1311 int txflags)
1312 {
1313 struct ieee80211com *ic = &sc->sc_ic;
1314 struct ieee80211_key *k = NULL;
1315 struct ieee80211_frame *wh;
1316 struct athn_series series[4];
1317 struct ar_tx_desc *ds, *lastds;
1318 struct athn_txq *txq;
1319 struct athn_tx_buf *bf;
1320 struct athn_node *an = (void *)ni;
1321 struct mbuf *m1;
1322 uint16_t qos;
1323 uint8_t txpower, type, encrtype, ridx[4];
1324 int i, error, totlen, hasqos, qid;
1325
1326 /* Grab a Tx buffer from our global free list. */
1327 bf = SIMPLEQ_FIRST(&sc->sc_txbufs);
1328 KASSERT(bf != NULL);
1329
1330 /* Map 802.11 frame type to hardware frame type. */
1331 wh = mtod(m, struct ieee80211_frame *);
1332 if ((wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) ==
1333 IEEE80211_FC0_TYPE_MGT) {
1334 /* NB: Beacons do not use ar5008_tx(). */
1335 if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) ==
1336 IEEE80211_FC0_SUBTYPE_PROBE_RESP)
1337 type = AR_FRAME_TYPE_PROBE_RESP;
1338 else if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) ==
1339 IEEE80211_FC0_SUBTYPE_ATIM)
1340 type = AR_FRAME_TYPE_ATIM;
1341 else
1342 type = AR_FRAME_TYPE_NORMAL;
1343 }
1344 else if ((wh->i_fc[0] &
1345 (IEEE80211_FC0_TYPE_MASK | IEEE80211_FC0_SUBTYPE_MASK)) ==
1346 (IEEE80211_FC0_TYPE_CTL | IEEE80211_FC0_SUBTYPE_PS_POLL)) {
1347 type = AR_FRAME_TYPE_PSPOLL;
1348 }
1349 else
1350 type = AR_FRAME_TYPE_NORMAL;
1351
1352 if (wh->i_fc[1] & IEEE80211_FC1_PROTECTED) {
1353 k = ieee80211_crypto_encap(ic, ni, m);
1354 if (k == NULL)
1355 return ENOBUFS;
1356
1357 /* packet header may have moved, reset our local pointer */
1358 wh = mtod(m, struct ieee80211_frame *);
1359 }
1360
1361 /* XXX 2-byte padding for QoS and 4-addr headers. */
1362
1363 /* Select the HW Tx queue to use for this frame. */
1364 if ((hasqos = ieee80211_has_qos(wh))) {
1365 #ifdef notyet_edca
1366 uint8_t tid;
1367
1368 qos = ieee80211_get_qos(wh);
1369 tid = qos & IEEE80211_QOS_TID;
1370 qid = athn_ac2qid[ieee80211_up_to_ac(ic, tid)];
1371 #else
1372 qos = ieee80211_get_qos(wh);
1373 qid = ATHN_QID_AC_BE;
1374 #endif /* notyet_edca */
1375 }
1376 else if (type == AR_FRAME_TYPE_PSPOLL) {
1377 qos = 0;
1378 qid = ATHN_QID_PSPOLL;
1379 }
1380 else if (txflags & ATHN_TXFLAG_CAB) {
1381 qos = 0;
1382 qid = ATHN_QID_CAB;
1383 }
1384 else {
1385 qos = 0;
1386 qid = ATHN_QID_AC_BE;
1387 }
1388 txq = &sc->sc_txq[qid];
1389
1390 /* Select the transmit rates to use for this frame. */
1391 if (IEEE80211_IS_MULTICAST(wh->i_addr1) ||
1392 (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) !=
1393 IEEE80211_FC0_TYPE_DATA) {
1394 /* Use lowest rate for all tries. */
1395 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1396 (ic->ic_curmode == IEEE80211_MODE_11A) ?
1397 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1;
1398 }
1399 else if (ic->ic_fixed_rate != -1) {
1400 /* Use same fixed rate for all tries. */
1401 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1402 sc->sc_fixed_ridx;
1403 }
1404 else {
1405 int txrate = ni->ni_txrate;
1406 /* Use fallback table of the node. */
1407 for (i = 0; i < 4; i++) {
1408 ridx[i] = an->ridx[txrate];
1409 txrate = an->fallback[txrate];
1410 }
1411 }
1412
1413 if (__predict_false(sc->sc_drvbpf != NULL)) {
1414 struct athn_tx_radiotap_header *tap = &sc->sc_txtap;
1415
1416 tap->wt_flags = 0;
1417 /* Use initial transmit rate. */
1418 tap->wt_rate = athn_rates[ridx[0]].rate;
1419 tap->wt_chan_freq = htole16(ic->ic_curchan->ic_freq);
1420 tap->wt_chan_flags = htole16(ic->ic_curchan->ic_flags);
1421 // XXX tap->wt_hwqueue = qid;
1422 if (ridx[0] != ATHN_RIDX_CCK1 &&
1423 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1424 tap->wt_flags |= IEEE80211_RADIOTAP_F_SHORTPRE;
1425
1426 bpf_mtap2(sc->sc_drvbpf, tap, sc->sc_txtap_len, m);
1427 }
1428
1429 /* DMA map mbuf. */
1430 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1431 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1432 if (__predict_false(error != 0)) {
1433 if (error != EFBIG) {
1434 aprint_error_dev(sc->sc_dev,
1435 "can't map mbuf (error %d)\n", error);
1436 m_freem(m);
1437 return error;
1438 }
1439 /*
1440 * DMA mapping requires too many DMA segments; linearize
1441 * mbuf in kernel virtual address space and retry.
1442 */
1443 MGETHDR(m1, M_DONTWAIT, MT_DATA);
1444 if (m1 == NULL) {
1445 m_freem(m);
1446 return ENOBUFS;
1447 }
1448 if (m->m_pkthdr.len > (int)MHLEN) {
1449 MCLGET(m1, M_DONTWAIT);
1450 if (!(m1->m_flags & M_EXT)) {
1451 m_freem(m);
1452 m_freem(m1);
1453 return ENOBUFS;
1454 }
1455 }
1456 m_copydata(m, 0, m->m_pkthdr.len, mtod(m1, void *));
1457 m1->m_pkthdr.len = m1->m_len = m->m_pkthdr.len;
1458 m_freem(m);
1459 m = m1;
1460
1461 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1462 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1463 if (error != 0) {
1464 aprint_error_dev(sc->sc_dev,
1465 "can't map mbuf (error %d)\n", error);
1466 m_freem(m);
1467 return error;
1468 }
1469 }
1470 bf->bf_m = m;
1471 bf->bf_ni = ni;
1472 bf->bf_txflags = txflags;
1473
1474 wh = mtod(m, struct ieee80211_frame *);
1475
1476 totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN;
1477
1478 /* Clear all Tx descriptors that we will use. */
1479 memset(bf->bf_descs, 0, bf->bf_map->dm_nsegs * sizeof(*ds));
1480
1481 /* Setup first Tx descriptor. */
1482 ds = bf->bf_descs;
1483
1484 ds->ds_ctl0 = AR_TXC0_INTR_REQ | AR_TXC0_CLR_DEST_MASK;
1485 txpower = AR_MAX_RATE_POWER; /* Get from per-rate registers. */
1486 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, txpower);
1487
1488 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, type);
1489
1490 if (IEEE80211_IS_MULTICAST(wh->i_addr1) ||
1491 (hasqos && (qos & IEEE80211_QOS_ACKPOLICY_MASK) ==
1492 IEEE80211_QOS_ACKPOLICY_NOACK))
1493 ds->ds_ctl1 |= AR_TXC1_NO_ACK;
1494 #if notyet
1495 if (0 && k != NULL) {
1496 uintptr_t entry;
1497
1498 /*
1499 * Map 802.11 cipher to hardware encryption type and
1500 * compute MIC+ICV overhead.
1501 */
1502 totlen += k->wk_keylen;
1503 switch (k->wk_cipher->ic_cipher) {
1504 case IEEE80211_CIPHER_WEP:
1505 encrtype = AR_ENCR_TYPE_WEP;
1506 break;
1507 case IEEE80211_CIPHER_TKIP:
1508 encrtype = AR_ENCR_TYPE_TKIP;
1509 break;
1510 case IEEE80211_CIPHER_AES_OCB:
1511 case IEEE80211_CIPHER_AES_CCM:
1512 encrtype = AR_ENCR_TYPE_AES;
1513 break;
1514 default:
1515 panic("unsupported cipher");
1516 }
1517 /*
1518 * NB: The key cache entry index is stored in the key
1519 * private field when the key is installed.
1520 */
1521 entry = (uintptr_t)k->k_priv;
1522 ds->ds_ctl1 |= SM(AR_TXC1_DEST_IDX, entry);
1523 ds->ds_ctl0 |= AR_TXC0_DEST_IDX_VALID;
1524 }
1525 else
1526 #endif
1527 encrtype = AR_ENCR_TYPE_CLEAR;
1528 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, encrtype);
1529
1530 /* Check if frame must be protected using RTS/CTS or CTS-to-self. */
1531 if (!IEEE80211_IS_MULTICAST(wh->i_addr1)) {
1532 /* NB: Group frames are sent using CCK in 802.11b/g. */
1533 if (totlen > ic->ic_rtsthreshold) {
1534 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE;
1535 }
1536 else if ((ic->ic_flags & IEEE80211_F_USEPROT) &&
1537 athn_rates[ridx[0]].phy == IEEE80211_T_OFDM) {
1538 if (ic->ic_protmode == IEEE80211_PROT_RTSCTS)
1539 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE;
1540 else if (ic->ic_protmode == IEEE80211_PROT_CTSONLY)
1541 ds->ds_ctl0 |= AR_TXC0_CTS_ENABLE;
1542 }
1543 }
1544 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) {
1545 /* Disable multi-rate retries when protection is used. */
1546 ridx[1] = ridx[2] = ridx[3] = ridx[0];
1547 }
1548 /* Setup multi-rate retries. */
1549 for (i = 0; i < 4; i++) {
1550 series[i].hwrate = athn_rates[ridx[i]].hwrate;
1551 if (athn_rates[ridx[i]].phy == IEEE80211_T_DS &&
1552 ridx[i] != ATHN_RIDX_CCK1 &&
1553 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1554 series[i].hwrate |= 0x04;
1555 series[i].dur = 0;
1556 }
1557 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) {
1558 /* Compute duration for each series. */
1559 for (i = 0; i < 4; i++) {
1560 series[i].dur = athn_txtime(sc, IEEE80211_ACK_LEN,
1561 athn_rates[ridx[i]].rspridx, ic->ic_flags);
1562 }
1563 }
1564
1565 /* Write number of tries for each series. */
1566 ds->ds_ctl2 =
1567 SM(AR_TXC2_XMIT_DATA_TRIES0, 2) |
1568 SM(AR_TXC2_XMIT_DATA_TRIES1, 2) |
1569 SM(AR_TXC2_XMIT_DATA_TRIES2, 2) |
1570 SM(AR_TXC2_XMIT_DATA_TRIES3, 4);
1571
1572 /* Tell HW to update duration field in 802.11 header. */
1573 if (type != AR_FRAME_TYPE_PSPOLL)
1574 ds->ds_ctl2 |= AR_TXC2_DUR_UPDATE_ENA;
1575
1576 /* Write Tx rate for each series. */
1577 ds->ds_ctl3 =
1578 SM(AR_TXC3_XMIT_RATE0, series[0].hwrate) |
1579 SM(AR_TXC3_XMIT_RATE1, series[1].hwrate) |
1580 SM(AR_TXC3_XMIT_RATE2, series[2].hwrate) |
1581 SM(AR_TXC3_XMIT_RATE3, series[3].hwrate);
1582
1583 /* Write duration for each series. */
1584 ds->ds_ctl4 =
1585 SM(AR_TXC4_PACKET_DUR0, series[0].dur) |
1586 SM(AR_TXC4_PACKET_DUR1, series[1].dur);
1587 ds->ds_ctl5 =
1588 SM(AR_TXC5_PACKET_DUR2, series[2].dur) |
1589 SM(AR_TXC5_PACKET_DUR3, series[3].dur);
1590
1591 /* Use the same Tx chains for all tries. */
1592 ds->ds_ctl7 =
1593 SM(AR_TXC7_CHAIN_SEL0, sc->sc_txchainmask) |
1594 SM(AR_TXC7_CHAIN_SEL1, sc->sc_txchainmask) |
1595 SM(AR_TXC7_CHAIN_SEL2, sc->sc_txchainmask) |
1596 SM(AR_TXC7_CHAIN_SEL3, sc->sc_txchainmask);
1597 #ifdef notyet
1598 #ifndef IEEE80211_NO_HT
1599 /* Use the same short GI setting for all tries. */
1600 if (ic->ic_flags & IEEE80211_F_SHGI)
1601 ds->ds_ctl7 |= AR_TXC7_GI0123;
1602 /* Use the same channel width for all tries. */
1603 if (ic->ic_flags & IEEE80211_F_CBW40)
1604 ds->ds_ctl7 |= AR_TXC7_2040_0123;
1605 #endif
1606 #endif
1607
1608 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) {
1609 uint8_t protridx, hwrate;
1610 uint16_t dur = 0;
1611
1612 /* Use the same protection mode for all tries. */
1613 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) {
1614 ds->ds_ctl4 |= AR_TXC4_RTSCTS_QUAL01;
1615 ds->ds_ctl5 |= AR_TXC5_RTSCTS_QUAL23;
1616 }
1617 /* Select protection rate (suboptimal but ok). */
1618 protridx = (ic->ic_curmode == IEEE80211_MODE_11A) ?
1619 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK2;
1620 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) {
1621 /* Account for CTS duration. */
1622 dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1623 athn_rates[protridx].rspridx, ic->ic_flags);
1624 }
1625 dur += athn_txtime(sc, totlen, ridx[0], ic->ic_flags);
1626 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) {
1627 /* Account for ACK duration. */
1628 dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1629 athn_rates[ridx[0]].rspridx, ic->ic_flags);
1630 }
1631 /* Write protection frame duration and rate. */
1632 ds->ds_ctl2 |= SM(AR_TXC2_BURST_DUR, dur);
1633 hwrate = athn_rates[protridx].hwrate;
1634 if (protridx == ATHN_RIDX_CCK2 &&
1635 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1636 hwrate |= 0x04;
1637 ds->ds_ctl7 |= SM(AR_TXC7_RTSCTS_RATE, hwrate);
1638 }
1639
1640 /* Finalize first Tx descriptor and fill others (if any). */
1641 ds->ds_ctl0 |= SM(AR_TXC0_FRAME_LEN, totlen);
1642
1643 lastds = NULL; /* XXX: gcc */
1644 for (i = 0; i < bf->bf_map->dm_nsegs; i++, ds++) {
1645 ds->ds_data = bf->bf_map->dm_segs[i].ds_addr;
1646 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,
1647 bf->bf_map->dm_segs[i].ds_len);
1648
1649 if (i != bf->bf_map->dm_nsegs - 1)
1650 ds->ds_ctl1 |= AR_TXC1_MORE;
1651 ds->ds_link = 0;
1652
1653 /* Chain Tx descriptor. */
1654 if (i != 0)
1655 lastds->ds_link = bf->bf_daddr + i * sizeof(*ds);
1656 lastds = ds;
1657 }
1658 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1659 BUS_DMASYNC_PREWRITE);
1660
1661 if (!SIMPLEQ_EMPTY(&txq->head))
1662 ((struct ar_tx_desc *)txq->lastds)->ds_link = bf->bf_daddr;
1663 else
1664 AR_WRITE(sc, AR_QTXDP(qid), bf->bf_daddr);
1665 txq->lastds = lastds;
1666 SIMPLEQ_REMOVE_HEAD(&sc->sc_txbufs, bf_list);
1667 SIMPLEQ_INSERT_TAIL(&txq->head, bf, bf_list);
1668
1669 ds = bf->bf_descs;
1670 DPRINTFN(DBG_TX, sc,
1671 "Tx qid=%d nsegs=%d ctl0=0x%x ctl1=0x%x ctl3=0x%x\n",
1672 qid, bf->bf_map->dm_nsegs, ds->ds_ctl0, ds->ds_ctl1, ds->ds_ctl3);
1673
1674 /* Kick Tx. */
1675 AR_WRITE(sc, AR_Q_TXE, 1 << qid);
1676 AR_WRITE_BARRIER(sc);
1677 return 0;
1678 }
1679
1680 Static void
1681 ar5008_set_rf_mode(struct athn_softc *sc, struct ieee80211_channel *c)
1682 {
1683 uint32_t reg;
1684
1685 reg = IEEE80211_IS_CHAN_2GHZ(c) ?
1686 AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1687 if (!AR_SREV_9280_10_OR_LATER(sc)) {
1688 reg |= IEEE80211_IS_CHAN_2GHZ(c) ?
1689 AR_PHY_MODE_RF2GHZ : AR_PHY_MODE_RF5GHZ;
1690 }
1691 else if (IEEE80211_IS_CHAN_5GHZ(c) &&
1692 (sc->sc_flags & ATHN_FLAG_FAST_PLL_CLOCK)) {
1693 reg |= AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE;
1694 }
1695 AR_WRITE(sc, AR_PHY_MODE, reg);
1696 AR_WRITE_BARRIER(sc);
1697 }
1698
1699 static __inline uint32_t
1700 ar5008_synth_delay(struct athn_softc *sc)
1701 {
1702 uint32_t synth_delay;
1703
1704 synth_delay = MS(AR_READ(sc, AR_PHY_RX_DELAY), AR_PHY_RX_DELAY_DELAY);
1705 if (sc->sc_ic.ic_curmode == IEEE80211_MODE_11B)
1706 synth_delay = (synth_delay * 4) / 22;
1707 else
1708 synth_delay = synth_delay / 10; /* in 100ns steps */
1709 return synth_delay;
1710 }
1711
1712 Static int
1713 ar5008_rf_bus_request(struct athn_softc *sc)
1714 {
1715 int ntries;
1716
1717 /* Request RF Bus grant. */
1718 AR_WRITE(sc, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1719 for (ntries = 0; ntries < 10000; ntries++) {
1720 if (AR_READ(sc, AR_PHY_RFBUS_GRANT) & AR_PHY_RFBUS_GRANT_EN)
1721 return 0;
1722 DELAY(10);
1723 }
1724 DPRINTFN(DBG_RF, sc, "could not kill baseband Rx");
1725 return ETIMEDOUT;
1726 }
1727
1728 Static void
1729 ar5008_rf_bus_release(struct athn_softc *sc)
1730 {
1731
1732 /* Wait for the synthesizer to settle. */
1733 DELAY(AR_BASE_PHY_ACTIVE_DELAY + ar5008_synth_delay(sc));
1734
1735 /* Release the RF Bus grant. */
1736 AR_WRITE(sc, AR_PHY_RFBUS_REQ, 0);
1737 AR_WRITE_BARRIER(sc);
1738 }
1739
1740 Static void
1741 ar5008_set_phy(struct athn_softc *sc, struct ieee80211_channel *c,
1742 struct ieee80211_channel *extc)
1743 {
1744 uint32_t phy;
1745
1746 if (AR_SREV_9285_10_OR_LATER(sc))
1747 phy = AR_READ(sc, AR_PHY_TURBO) & AR_PHY_FC_ENABLE_DAC_FIFO;
1748 else
1749 phy = 0;
1750 phy |= AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40 |
1751 AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH;
1752 #ifndef IEEE80211_NO_HT
1753 if (extc != NULL) {
1754 phy |= AR_PHY_FC_DYN2040_EN;
1755 if (extc > c) /* XXX */
1756 phy |= AR_PHY_FC_DYN2040_PRI_CH;
1757 }
1758 #endif
1759 AR_WRITE(sc, AR_PHY_TURBO, phy);
1760
1761 AR_WRITE(sc, AR_2040_MODE,
1762 (extc != NULL) ? AR_2040_JOINED_RX_CLEAR : 0);
1763
1764 /* Set global transmit timeout. */
1765 AR_WRITE(sc, AR_GTXTO, SM(AR_GTXTO_TIMEOUT_LIMIT, 25));
1766 /* Set carrier sense timeout. */
1767 AR_WRITE(sc, AR_CST, SM(AR_CST_TIMEOUT_LIMIT, 15));
1768 AR_WRITE_BARRIER(sc);
1769 }
1770
1771 Static void
1772 ar5008_set_delta_slope(struct athn_softc *sc, struct ieee80211_channel *c,
1773 struct ieee80211_channel *extc)
1774 {
1775 uint32_t coeff, exp, man, reg;
1776
1777 /* Set Delta Slope (exponent and mantissa). */
1778 coeff = (100 << 24) / c->ic_freq;
1779 athn_get_delta_slope(coeff, &exp, &man);
1780 DPRINTFN(DBG_RX, sc, "delta slope coeff exp=%u man=%u\n", exp, man);
1781
1782 reg = AR_READ(sc, AR_PHY_TIMING3);
1783 reg = RW(reg, AR_PHY_TIMING3_DSC_EXP, exp);
1784 reg = RW(reg, AR_PHY_TIMING3_DSC_MAN, man);
1785 AR_WRITE(sc, AR_PHY_TIMING3, reg);
1786
1787 /* For Short GI, coeff is 9/10 that of normal coeff. */
1788 coeff = (9 * coeff) / 10;
1789 athn_get_delta_slope(coeff, &exp, &man);
1790 DPRINTFN(DBG_RX, sc, "delta slope coeff exp=%u man=%u\n", exp, man);
1791
1792 reg = AR_READ(sc, AR_PHY_HALFGI);
1793 reg = RW(reg, AR_PHY_HALFGI_DSC_EXP, exp);
1794 reg = RW(reg, AR_PHY_HALFGI_DSC_MAN, man);
1795 AR_WRITE(sc, AR_PHY_HALFGI, reg);
1796 AR_WRITE_BARRIER(sc);
1797 }
1798
1799 Static void
1800 ar5008_enable_antenna_diversity(struct athn_softc *sc)
1801 {
1802
1803 AR_SETBITS(sc, AR_PHY_CCK_DETECT,
1804 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV);
1805 AR_WRITE_BARRIER(sc);
1806 }
1807
1808 Static void
1809 ar5008_init_baseband(struct athn_softc *sc)
1810 {
1811 uint32_t synth_delay;
1812
1813 synth_delay = ar5008_synth_delay(sc);
1814 /* Activate the PHY (includes baseband activate and synthesizer on). */
1815 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1816 AR_WRITE_BARRIER(sc);
1817 DELAY(AR_BASE_PHY_ACTIVE_DELAY + synth_delay);
1818 }
1819
1820 Static void
1821 ar5008_disable_phy(struct athn_softc *sc)
1822 {
1823
1824 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1825 AR_WRITE_BARRIER(sc);
1826 }
1827
1828 Static void
1829 ar5008_init_chains(struct athn_softc *sc)
1830 {
1831
1832 if (sc->sc_rxchainmask == 0x5 || sc->sc_txchainmask == 0x5)
1833 AR_SETBITS(sc, AR_PHY_ANALOG_SWAP, AR_PHY_SWAP_ALT_CHAIN);
1834
1835 /* Setup chain masks. */
1836 if (sc->sc_mac_ver <= AR_SREV_VERSION_9160 &&
1837 (sc->sc_rxchainmask == 0x3 || sc->sc_rxchainmask == 0x5)) {
1838 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, 0x7);
1839 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, 0x7);
1840 }
1841 else {
1842 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->sc_rxchainmask);
1843 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->sc_rxchainmask);
1844 }
1845 AR_WRITE(sc, AR_SELFGEN_MASK, sc->sc_txchainmask);
1846 AR_WRITE_BARRIER(sc);
1847 }
1848
1849 Static void
1850 ar5008_set_rxchains(struct athn_softc *sc)
1851 {
1852
1853 if (sc->sc_rxchainmask == 0x3 || sc->sc_rxchainmask == 0x5) {
1854 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->sc_rxchainmask);
1855 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->sc_rxchainmask);
1856 AR_WRITE_BARRIER(sc);
1857 }
1858 }
1859
1860 #ifdef notused
1861 Static void
1862 ar5008_read_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
1863 {
1864 /* Sign-extends 9-bit value (assumes upper bits are zeroes). */
1865 #define SIGN_EXT(v) (((v) ^ 0x100) - 0x100)
1866 uint32_t reg;
1867 int i;
1868
1869 for (i = 0; i < sc->sc_nrxchains; i++) {
1870 reg = AR_READ(sc, AR_PHY_CCA(i));
1871 if (AR_SREV_9280_10_OR_LATER(sc))
1872 nf[i] = MS(reg, AR9280_PHY_MINCCA_PWR);
1873 else
1874 nf[i] = MS(reg, AR_PHY_MINCCA_PWR);
1875 nf[i] = SIGN_EXT(nf[i]);
1876
1877 reg = AR_READ(sc, AR_PHY_EXT_CCA(i));
1878 if (AR_SREV_9280_10_OR_LATER(sc))
1879 nf_ext[i] = MS(reg, AR9280_PHY_EXT_MINCCA_PWR);
1880 else
1881 nf_ext[i] = MS(reg, AR_PHY_EXT_MINCCA_PWR);
1882 nf_ext[i] = SIGN_EXT(nf_ext[i]);
1883 }
1884 #undef SIGN_EXT
1885 }
1886 #endif /* notused */
1887
1888 #ifdef notused
1889 Static void
1890 ar5008_write_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
1891 {
1892 uint32_t reg;
1893 int i;
1894
1895 for (i = 0; i < sc->sc_nrxchains; i++) {
1896 reg = AR_READ(sc, AR_PHY_CCA(i));
1897 reg = RW(reg, AR_PHY_MAXCCA_PWR, nf[i]);
1898 AR_WRITE(sc, AR_PHY_CCA(i), reg);
1899
1900 reg = AR_READ(sc, AR_PHY_EXT_CCA(i));
1901 reg = RW(reg, AR_PHY_EXT_MAXCCA_PWR, nf_ext[i]);
1902 AR_WRITE(sc, AR_PHY_EXT_CCA(i), reg);
1903 }
1904 AR_WRITE_BARRIER(sc);
1905 }
1906 #endif /* notused */
1907
1908 #ifdef notused
1909 Static void
1910 ar5008_get_noisefloor(struct athn_softc *sc, struct ieee80211_channel *c)
1911 {
1912 int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS];
1913 int i;
1914
1915 if (AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF) {
1916 /* Noisefloor calibration not finished. */
1917 return;
1918 }
1919 /* Noisefloor calibration is finished. */
1920 ar5008_read_noisefloor(sc, nf, nf_ext);
1921
1922 /* Update noisefloor history. */
1923 for (i = 0; i < sc->sc_nrxchains; i++) {
1924 sc->sc_nf_hist[sc->sc_nf_hist_cur].nf[i] = nf[i];
1925 sc->sc_nf_hist[sc->sc_nf_hist_cur].nf_ext[i] = nf_ext[i];
1926 }
1927 if (++sc->sc_nf_hist_cur >= ATHN_NF_CAL_HIST_MAX)
1928 sc->sc_nf_hist_cur = 0;
1929 }
1930 #endif /* notused */
1931
1932 #ifdef notused
1933 Static void
1934 ar5008_bb_load_noisefloor(struct athn_softc *sc)
1935 {
1936 int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS];
1937 int i, ntries;
1938
1939 /* Write filtered noisefloor values. */
1940 for (i = 0; i < sc->sc_nrxchains; i++) {
1941 nf[i] = sc->sc_nf_priv[i] * 2;
1942 nf_ext[i] = sc->sc_nf_ext_priv[i] * 2;
1943 }
1944 ar5008_write_noisefloor(sc, nf, nf_ext);
1945
1946 /* Load filtered noisefloor values into baseband. */
1947 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF);
1948 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
1949 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1950 /* Wait for load to complete. */
1951 for (ntries = 0; ntries < 1000; ntries++) {
1952 if (!(AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF))
1953 break;
1954 DELAY(50);
1955 }
1956 if (ntries == 1000) {
1957 DPRINTFN(DBG_RF, sc, "failed to load noisefloor values\n");
1958 return;
1959 }
1960
1961 /* Restore noisefloor values to initial (max) values. */
1962 for (i = 0; i < AR_MAX_CHAINS; i++)
1963 nf[i] = nf_ext[i] = -50 * 2;
1964 ar5008_write_noisefloor(sc, nf, nf_ext);
1965 }
1966 #endif /* notused */
1967
1968 #ifdef notused
1969 Static void
1970 ar5008_noisefloor_calib(struct athn_softc *sc)
1971 {
1972
1973 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF);
1974 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
1975 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1976 AR_WRITE_BARRIER(sc);
1977 }
1978 #endif /* notused */
1979
1980 Static void
1981 ar5008_do_noisefloor_calib(struct athn_softc *sc)
1982 {
1983
1984 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1985 AR_WRITE_BARRIER(sc);
1986 }
1987
1988 Static void
1989 ar5008_do_calib(struct athn_softc *sc)
1990 {
1991 uint32_t mode, reg;
1992 int log;
1993
1994 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4_0);
1995 log = AR_SREV_9280_10_OR_LATER(sc) ? 10 : 2;
1996 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCAL_LOG_COUNT_MAX, log);
1997 AR_WRITE(sc, AR_PHY_TIMING_CTRL4_0, reg);
1998
1999 if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_GAIN)
2000 mode = AR_PHY_CALMODE_ADC_GAIN;
2001 else if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_DC)
2002 mode = AR_PHY_CALMODE_ADC_DC_PER;
2003 else /* ATHN_CAL_IQ */
2004 mode = AR_PHY_CALMODE_IQ;
2005 AR_WRITE(sc, AR_PHY_CALMODE, mode);
2006
2007 DPRINTFN(DBG_RF, sc, "starting calibration mode=0x%x\n", mode);
2008 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0, AR_PHY_TIMING_CTRL4_DO_CAL);
2009 AR_WRITE_BARRIER(sc);
2010 }
2011
2012 Static void
2013 ar5008_next_calib(struct athn_softc *sc)
2014 {
2015
2016 /* Check if we have any calibration in progress. */
2017 if (sc->sc_cur_calib_mask != 0) {
2018 if (!(AR_READ(sc, AR_PHY_TIMING_CTRL4_0) &
2019 AR_PHY_TIMING_CTRL4_DO_CAL)) {
2020 /* Calibration completed for current sample. */
2021 if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_GAIN)
2022 ar5008_calib_adc_gain(sc);
2023 else if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_DC)
2024 ar5008_calib_adc_dc_off(sc);
2025 else /* ATHN_CAL_IQ */
2026 ar5008_calib_iq(sc);
2027 }
2028 }
2029 }
2030
2031 Static void
2032 ar5008_calib_iq(struct athn_softc *sc)
2033 {
2034 struct athn_iq_cal *cal;
2035 uint32_t reg, i_coff_denom, q_coff_denom;
2036 int32_t i_coff, q_coff;
2037 int i, iq_corr_neg;
2038
2039 for (i = 0; i < AR_MAX_CHAINS; i++) {
2040 cal = &sc->sc_calib.iq[i];
2041
2042 /* Accumulate IQ calibration measures (clear on read). */
2043 cal->pwr_meas_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2044 cal->pwr_meas_q += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2045 cal->iq_corr_meas +=
2046 (int32_t)AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2047 }
2048 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2049 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2050 /* Not enough samples accumulated, continue. */
2051 ar5008_do_calib(sc);
2052 return;
2053 }
2054
2055 for (i = 0; i < sc->sc_nrxchains; i++) {
2056 cal = &sc->sc_calib.iq[i];
2057
2058 if (cal->pwr_meas_q == 0)
2059 continue;
2060
2061 if ((iq_corr_neg = cal->iq_corr_meas) < 0)
2062 cal->iq_corr_meas = -cal->iq_corr_meas;
2063
2064 i_coff_denom =
2065 (cal->pwr_meas_i / 2 + cal->pwr_meas_q / 2) / 128;
2066 q_coff_denom = cal->pwr_meas_q / 64;
2067
2068 if (i_coff_denom == 0 || q_coff_denom == 0)
2069 continue; /* Prevents division by zero. */
2070
2071 i_coff = cal->iq_corr_meas / i_coff_denom;
2072 q_coff = (cal->pwr_meas_i / q_coff_denom) - 64;
2073
2074 /* Negate i_coff if iq_corr_meas is positive. */
2075 if (!iq_corr_neg)
2076 i_coff = 0x40 - (i_coff & 0x3f);
2077 if (q_coff > 15)
2078 q_coff = 15;
2079 else if (q_coff <= -16)
2080 q_coff = -16; /* XXX Linux has a bug here? */
2081
2082 DPRINTFN(DBG_RF, sc, "IQ calibration for chain %d\n", i);
2083 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4(i));
2084 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_I_COFF, i_coff);
2085 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_Q_COFF, q_coff);
2086 AR_WRITE(sc, AR_PHY_TIMING_CTRL4(i), reg);
2087 }
2088
2089 /* Apply new settings. */
2090 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0,
2091 AR_PHY_TIMING_CTRL4_IQCORR_ENABLE);
2092 AR_WRITE_BARRIER(sc);
2093
2094 /* IQ calibration done. */
2095 sc->sc_cur_calib_mask &= ~ATHN_CAL_IQ;
2096 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2097 }
2098
2099 Static void
2100 ar5008_calib_adc_gain(struct athn_softc *sc)
2101 {
2102 struct athn_adc_cal *cal;
2103 uint32_t reg, gain_mismatch_i, gain_mismatch_q;
2104 int i;
2105
2106 for (i = 0; i < AR_MAX_CHAINS; i++) {
2107 cal = &sc->sc_calib.adc_gain[i];
2108
2109 /* Accumulate ADC gain measures (clear on read). */
2110 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2111 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2112 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2113 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i));
2114 }
2115 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2116 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2117 /* Not enough samples accumulated, continue. */
2118 ar5008_do_calib(sc);
2119 return;
2120 }
2121
2122 for (i = 0; i < sc->sc_nrxchains; i++) {
2123 cal = &sc->sc_calib.adc_gain[i];
2124
2125 if (cal->pwr_meas_odd_i == 0 || cal->pwr_meas_even_q == 0)
2126 continue; /* Prevents division by zero. */
2127
2128 gain_mismatch_i =
2129 (cal->pwr_meas_even_i * 32) / cal->pwr_meas_odd_i;
2130 gain_mismatch_q =
2131 (cal->pwr_meas_odd_q * 32) / cal->pwr_meas_even_q;
2132
2133 DPRINTFN(DBG_RF, sc, "ADC gain calibration for chain %d\n", i);
2134 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i));
2135 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IGAIN, gain_mismatch_i);
2136 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QGAIN, gain_mismatch_q);
2137 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg);
2138 }
2139
2140 /* Apply new settings. */
2141 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),
2142 AR_PHY_NEW_ADC_GAIN_CORR_ENABLE);
2143 AR_WRITE_BARRIER(sc);
2144
2145 /* ADC gain calibration done. */
2146 sc->sc_cur_calib_mask &= ~ATHN_CAL_ADC_GAIN;
2147 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2148 }
2149
2150 Static void
2151 ar5008_calib_adc_dc_off(struct athn_softc *sc)
2152 {
2153 struct athn_adc_cal *cal;
2154 int32_t dc_offset_mismatch_i, dc_offset_mismatch_q;
2155 uint32_t reg;
2156 int count, i;
2157
2158 for (i = 0; i < AR_MAX_CHAINS; i++) {
2159 cal = &sc->sc_calib.adc_dc_offset[i];
2160
2161 /* Accumulate ADC DC offset measures (clear on read). */
2162 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2163 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2164 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2165 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i));
2166 }
2167 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2168 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2169 /* Not enough samples accumulated, continue. */
2170 ar5008_do_calib(sc);
2171 return;
2172 }
2173
2174 if (AR_SREV_9280_10_OR_LATER(sc))
2175 count = (1 << (10 + 5));
2176 else
2177 count = (1 << ( 2 + 5)) * AR_CAL_SAMPLES;
2178 for (i = 0; i < sc->sc_nrxchains; i++) {
2179 cal = &sc->sc_calib.adc_dc_offset[i];
2180
2181 dc_offset_mismatch_i =
2182 (cal->pwr_meas_even_i - cal->pwr_meas_odd_i * 2) / count;
2183 dc_offset_mismatch_q =
2184 (cal->pwr_meas_odd_q - cal->pwr_meas_even_q * 2) / count;
2185
2186 DPRINTFN(DBG_RF, sc, "ADC DC offset calibration for chain %d\n", i);
2187 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i));
2188 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QDC,
2189 dc_offset_mismatch_q);
2190 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IDC,
2191 dc_offset_mismatch_i);
2192 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg);
2193 }
2194
2195 /* Apply new settings. */
2196 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),
2197 AR_PHY_NEW_ADC_DC_OFFSET_CORR_ENABLE);
2198 AR_WRITE_BARRIER(sc);
2199
2200 /* ADC DC offset calibration done. */
2201 sc->sc_cur_calib_mask &= ~ATHN_CAL_ADC_DC;
2202 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2203 }
2204
2205 PUBLIC void
2206 ar5008_write_txpower(struct athn_softc *sc, int16_t power[ATHN_POWER_COUNT])
2207 {
2208
2209 AR_WRITE(sc, AR_PHY_POWER_TX_RATE1,
2210 (power[ATHN_POWER_OFDM18 ] & 0x3f) << 24 |
2211 (power[ATHN_POWER_OFDM12 ] & 0x3f) << 16 |
2212 (power[ATHN_POWER_OFDM9 ] & 0x3f) << 8 |
2213 (power[ATHN_POWER_OFDM6 ] & 0x3f));
2214 AR_WRITE(sc, AR_PHY_POWER_TX_RATE2,
2215 (power[ATHN_POWER_OFDM54 ] & 0x3f) << 24 |
2216 (power[ATHN_POWER_OFDM48 ] & 0x3f) << 16 |
2217 (power[ATHN_POWER_OFDM36 ] & 0x3f) << 8 |
2218 (power[ATHN_POWER_OFDM24 ] & 0x3f));
2219 AR_WRITE(sc, AR_PHY_POWER_TX_RATE3,
2220 (power[ATHN_POWER_CCK2_SP ] & 0x3f) << 24 |
2221 (power[ATHN_POWER_CCK2_LP ] & 0x3f) << 16 |
2222 (power[ATHN_POWER_XR ] & 0x3f) << 8 |
2223 (power[ATHN_POWER_CCK1_LP ] & 0x3f));
2224 AR_WRITE(sc, AR_PHY_POWER_TX_RATE4,
2225 (power[ATHN_POWER_CCK11_SP] & 0x3f) << 24 |
2226 (power[ATHN_POWER_CCK11_LP] & 0x3f) << 16 |
2227 (power[ATHN_POWER_CCK55_SP] & 0x3f) << 8 |
2228 (power[ATHN_POWER_CCK55_LP] & 0x3f));
2229 #ifndef IEEE80211_NO_HT
2230 AR_WRITE(sc, AR_PHY_POWER_TX_RATE5,
2231 (power[ATHN_POWER_HT20(3) ] & 0x3f) << 24 |
2232 (power[ATHN_POWER_HT20(2) ] & 0x3f) << 16 |
2233 (power[ATHN_POWER_HT20(1) ] & 0x3f) << 8 |
2234 (power[ATHN_POWER_HT20(0) ] & 0x3f));
2235 AR_WRITE(sc, AR_PHY_POWER_TX_RATE6,
2236 (power[ATHN_POWER_HT20(7) ] & 0x3f) << 24 |
2237 (power[ATHN_POWER_HT20(6) ] & 0x3f) << 16 |
2238 (power[ATHN_POWER_HT20(5) ] & 0x3f) << 8 |
2239 (power[ATHN_POWER_HT20(4) ] & 0x3f));
2240 AR_WRITE(sc, AR_PHY_POWER_TX_RATE7,
2241 (power[ATHN_POWER_HT40(3) ] & 0x3f) << 24 |
2242 (power[ATHN_POWER_HT40(2) ] & 0x3f) << 16 |
2243 (power[ATHN_POWER_HT40(1) ] & 0x3f) << 8 |
2244 (power[ATHN_POWER_HT40(0) ] & 0x3f));
2245 AR_WRITE(sc, AR_PHY_POWER_TX_RATE8,
2246 (power[ATHN_POWER_HT40(7) ] & 0x3f) << 24 |
2247 (power[ATHN_POWER_HT40(6) ] & 0x3f) << 16 |
2248 (power[ATHN_POWER_HT40(5) ] & 0x3f) << 8 |
2249 (power[ATHN_POWER_HT40(4) ] & 0x3f));
2250 AR_WRITE(sc, AR_PHY_POWER_TX_RATE9,
2251 (power[ATHN_POWER_OFDM_EXT] & 0x3f) << 24 |
2252 (power[ATHN_POWER_CCK_EXT ] & 0x3f) << 16 |
2253 (power[ATHN_POWER_OFDM_DUP] & 0x3f) << 8 |
2254 (power[ATHN_POWER_CCK_DUP ] & 0x3f));
2255 #endif
2256 AR_WRITE_BARRIER(sc);
2257 }
2258
2259 PUBLIC void
2260 ar5008_set_viterbi_mask(struct athn_softc *sc, int bin)
2261 {
2262 uint32_t mask[4], reg;
2263 uint8_t m[62], p[62]; /* XXX use bit arrays? */
2264 int i, bit, cur;
2265
2266 /* Compute pilot mask. */
2267 cur = -6000;
2268 for (i = 0; i < 4; i++) {
2269 mask[i] = 0;
2270 for (bit = 0; bit < 30; bit++) {
2271 if (abs(cur - bin) < 100)
2272 mask[i] |= 1 << bit;
2273 cur += 100;
2274 }
2275 if (cur == 0) /* Skip entry "0". */
2276 cur = 100;
2277 }
2278 /* Write entries from -6000 to -3100. */
2279 AR_WRITE(sc, AR_PHY_TIMING7, mask[0]);
2280 AR_WRITE(sc, AR_PHY_TIMING9, mask[0]);
2281 /* Write entries from -3000 to -100. */
2282 AR_WRITE(sc, AR_PHY_TIMING8, mask[1]);
2283 AR_WRITE(sc, AR_PHY_TIMING10, mask[1]);
2284 /* Write entries from 100 to 3000. */
2285 AR_WRITE(sc, AR_PHY_PILOT_MASK_01_30, mask[2]);
2286 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_01_30, mask[2]);
2287 /* Write entries from 3100 to 6000. */
2288 AR_WRITE(sc, AR_PHY_PILOT_MASK_31_60, mask[3]);
2289 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_31_60, mask[3]);
2290
2291 /* Compute viterbi mask. */
2292 for (cur = 6100; cur >= 0; cur -= 100)
2293 p[+cur / 100] = abs(cur - bin) < 75;
2294 for (cur = -100; cur >= -6100; cur -= 100)
2295 m[-cur / 100] = abs(cur - bin) < 75;
2296
2297 /* Write viterbi mask (XXX needs to be reworked). */
2298 reg =
2299 m[46] << 30 | m[47] << 28 | m[48] << 26 | m[49] << 24 |
2300 m[50] << 22 | m[51] << 20 | m[52] << 18 | m[53] << 16 |
2301 m[54] << 14 | m[55] << 12 | m[56] << 10 | m[57] << 8 |
2302 m[58] << 6 | m[59] << 4 | m[60] << 2 | m[61] << 0;
2303 AR_WRITE(sc, AR_PHY_BIN_MASK_1, reg);
2304 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_46_61, reg);
2305
2306 /* XXX m[48] should be m[38] ? */
2307 reg = m[31] << 28 | m[32] << 26 | m[33] << 24 |
2308 m[34] << 22 | m[35] << 20 | m[36] << 18 | m[37] << 16 |
2309 m[48] << 14 | m[39] << 12 | m[40] << 10 | m[41] << 8 |
2310 m[42] << 6 | m[43] << 4 | m[44] << 2 | m[45] << 0;
2311 AR_WRITE(sc, AR_PHY_BIN_MASK_2, reg);
2312 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_31_45, reg);
2313
2314 /* XXX This one is weird too. */
2315 reg =
2316 m[16] << 30 | m[16] << 28 | m[18] << 26 | m[18] << 24 |
2317 m[20] << 22 | m[20] << 20 | m[22] << 18 | m[22] << 16 |
2318 m[24] << 14 | m[24] << 12 | m[25] << 10 | m[26] << 8 |
2319 m[27] << 6 | m[28] << 4 | m[29] << 2 | m[30] << 0;
2320 AR_WRITE(sc, AR_PHY_BIN_MASK_3, reg);
2321 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_16_30, reg);
2322
2323 reg =
2324 m[ 0] << 30 | m[ 1] << 28 | m[ 2] << 26 | m[ 3] << 24 |
2325 m[ 4] << 22 | m[ 5] << 20 | m[ 6] << 18 | m[ 7] << 16 |
2326 m[ 8] << 14 | m[ 9] << 12 | m[10] << 10 | m[11] << 8 |
2327 m[12] << 6 | m[13] << 4 | m[14] << 2 | m[15] << 0;
2328 AR_WRITE(sc, AR_PHY_MASK_CTL, reg);
2329 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_00_15, reg);
2330
2331 reg = p[15] << 28 | p[14] << 26 | p[13] << 24 |
2332 p[12] << 22 | p[11] << 20 | p[10] << 18 | p[ 9] << 16 |
2333 p[ 8] << 14 | p[ 7] << 12 | p[ 6] << 10 | p[ 5] << 8 |
2334 p[ 4] << 6 | p[ 3] << 4 | p[ 2] << 2 | p[ 1] << 0;
2335 AR_WRITE(sc, AR_PHY_BIN_MASK2_1, reg);
2336 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_15_01, reg);
2337
2338 reg = p[30] << 28 | p[29] << 26 | p[28] << 24 |
2339 p[27] << 22 | p[26] << 20 | p[25] << 18 | p[24] << 16 |
2340 p[23] << 14 | p[22] << 12 | p[21] << 10 | p[20] << 8 |
2341 p[19] << 6 | p[18] << 4 | p[17] << 2 | p[16] << 0;
2342 AR_WRITE(sc, AR_PHY_BIN_MASK2_2, reg);
2343 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_30_16, reg);
2344
2345 reg = p[45] << 28 | p[44] << 26 | p[43] << 24 |
2346 p[42] << 22 | p[41] << 20 | p[40] << 18 | p[39] << 16 |
2347 p[38] << 14 | p[37] << 12 | p[36] << 10 | p[35] << 8 |
2348 p[34] << 6 | p[33] << 4 | p[32] << 2 | p[31] << 0;
2349 AR_WRITE(sc, AR_PHY_BIN_MASK2_3, reg);
2350 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_45_31, reg);
2351
2352 reg =
2353 p[61] << 30 | p[60] << 28 | p[59] << 26 | p[58] << 24 |
2354 p[57] << 22 | p[56] << 20 | p[55] << 18 | p[54] << 16 |
2355 p[53] << 14 | p[52] << 12 | p[51] << 10 | p[50] << 8 |
2356 p[49] << 6 | p[48] << 4 | p[47] << 2 | p[46] << 0;
2357 AR_WRITE(sc, AR_PHY_BIN_MASK2_4, reg);
2358 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_61_46, reg);
2359 AR_WRITE_BARRIER(sc);
2360 }
2361
2362 Static void
2363 ar5008_hw_init(struct athn_softc *sc, struct ieee80211_channel *c,
2364 struct ieee80211_channel *extc)
2365 {
2366 struct athn_ops *ops = &sc->sc_ops;
2367 const struct athn_ini *ini = sc->sc_ini;
2368 const uint32_t *pvals;
2369 uint32_t reg;
2370 int i;
2371
2372 AR_WRITE(sc, AR_PHY(0), 0x00000007);
2373 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
2374
2375 if (!AR_SINGLE_CHIP(sc))
2376 ar5416_reset_addac(sc, c);
2377
2378 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
2379
2380 /* First initialization step (depends on channel band/bandwidth). */
2381 #ifndef IEEE80211_NO_HT
2382 if (extc != NULL) {
2383 if (IEEE80211_IS_CHAN_2GHZ(c))
2384 pvals = ini->vals_2g40;
2385 else
2386 pvals = ini->vals_5g40;
2387 }
2388 else
2389 #endif
2390 {
2391 if (IEEE80211_IS_CHAN_2GHZ(c))
2392 pvals = ini->vals_2g20;
2393 else
2394 pvals = ini->vals_5g20;
2395 }
2396 DPRINTFN(DBG_INIT, sc, "writing modal init vals\n");
2397 for (i = 0; i < ini->nregs; i++) {
2398 uint32_t val = pvals[i];
2399
2400 /* Fix AR_AN_TOP2 initialization value if required. */
2401 if (ini->regs[i] == AR_AN_TOP2 &&
2402 (sc->sc_flags & ATHN_FLAG_AN_TOP2_FIXUP))
2403 val &= ~AR_AN_TOP2_PWDCLKIND;
2404 AR_WRITE(sc, ini->regs[i], val);
2405 if (AR_IS_ANALOG_REG(ini->regs[i])) {
2406 AR_WRITE_BARRIER(sc);
2407 DELAY(100);
2408 }
2409 if ((i & 0x1f) == 0)
2410 DELAY(1);
2411 }
2412 AR_WRITE_BARRIER(sc);
2413
2414 if (sc->sc_rx_gain != NULL)
2415 ar9280_reset_rx_gain(sc, c);
2416 if (sc->sc_tx_gain != NULL)
2417 ar9280_reset_tx_gain(sc, c);
2418
2419 if (AR_SREV_9271_10(sc)) {
2420 AR_WRITE(sc, AR_PHY(68), 0x30002311);
2421 AR_WRITE(sc, AR_PHY_RF_CTL3, 0x0a020001);
2422 }
2423 AR_WRITE_BARRIER(sc);
2424
2425 /* Second initialization step (common to all channels). */
2426 DPRINTFN(DBG_INIT, sc, "writing common init vals\n");
2427 for (i = 0; i < ini->ncmregs; i++) {
2428 AR_WRITE(sc, ini->cmregs[i], ini->cmvals[i]);
2429 if (AR_IS_ANALOG_REG(ini->cmregs[i])) {
2430 AR_WRITE_BARRIER(sc);
2431 DELAY(100);
2432 }
2433 if ((i & 0x1f) == 0)
2434 DELAY(1);
2435 }
2436 AR_WRITE_BARRIER(sc);
2437
2438 if (!AR_SINGLE_CHIP(sc))
2439 ar5416_reset_bb_gain(sc, c);
2440
2441 if (IEEE80211_IS_CHAN_5GHZ(c) &&
2442 (sc->sc_flags & ATHN_FLAG_FAST_PLL_CLOCK)) {
2443 /* Update modal values for fast PLL clock. */
2444 #ifndef IEEE80211_NO_HT
2445 if (extc != NULL)
2446 pvals = ini->fastvals_5g40;
2447 else
2448 #endif
2449 pvals = ini->fastvals_5g20;
2450 DPRINTFN(DBG_INIT, sc, "writing fast pll clock init vals\n");
2451 for (i = 0; i < ini->nfastregs; i++) {
2452 AR_WRITE(sc, ini->fastregs[i], pvals[i]);
2453 if (AR_IS_ANALOG_REG(ini->fastregs[i])) {
2454 AR_WRITE_BARRIER(sc);
2455 DELAY(100);
2456 }
2457 if ((i & 0x1f) == 0)
2458 DELAY(1);
2459 }
2460 }
2461
2462 /*
2463 * Set the RX_ABORT and RX_DIS bits to prevent frames with corrupted
2464 * descriptor status.
2465 */
2466 AR_SETBITS(sc, AR_DIAG_SW, AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT);
2467
2468 /* Hardware workarounds for occasional Rx data corruption. */
2469 if (AR_SREV_9280_10_OR_LATER(sc)) {
2470 reg = AR_READ(sc, AR_PCU_MISC_MODE2);
2471 if (!AR_SREV_9271(sc))
2472 reg &= ~AR_PCU_MISC_MODE2_HWWAR1;
2473 if (AR_SREV_9287_10_OR_LATER(sc))
2474 reg &= ~AR_PCU_MISC_MODE2_HWWAR2;
2475 AR_WRITE(sc, AR_PCU_MISC_MODE2, reg);
2476
2477 }
2478 else if (AR_SREV_5416_20_OR_LATER(sc)) {
2479 /* Disable baseband clock gating. */
2480 AR_WRITE(sc, AR_PHY(651), 0x11);
2481
2482 if (AR_SREV_9160(sc)) {
2483 /* Disable RIFS search to fix baseband hang. */
2484 AR_CLRBITS(sc, AR_PHY_HEAVY_CLIP_FACTOR_RIFS,
2485 AR_PHY_RIFS_INIT_DELAY_M);
2486 }
2487 }
2488 AR_WRITE_BARRIER(sc);
2489
2490 ar5008_set_phy(sc, c, extc);
2491 ar5008_init_chains(sc);
2492
2493 if (sc->sc_flags & ATHN_FLAG_OLPC) {
2494 extern int ticks;
2495 sc->sc_olpc_ticks = ticks;
2496 ops->olpc_init(sc);
2497 }
2498
2499 ops->set_txpower(sc, c, extc);
2500
2501 if (!AR_SINGLE_CHIP(sc))
2502 ar5416_rf_reset(sc, c);
2503 }
2504
2505 Static uint8_t
2506 ar5008_get_vpd(uint8_t pwr, const uint8_t *pwrPdg, const uint8_t *vpdPdg,
2507 int nicepts)
2508 {
2509 uint8_t vpd;
2510 int i, lo, hi;
2511
2512 for (i = 0; i < nicepts; i++)
2513 if (pwrPdg[i] > pwr)
2514 break;
2515 hi = i;
2516 lo = hi - 1;
2517 if (lo == -1)
2518 lo = hi;
2519 else if (hi == nicepts)
2520 hi = lo;
2521
2522 vpd = athn_interpolate(pwr, pwrPdg[lo], vpdPdg[lo],
2523 pwrPdg[hi], vpdPdg[hi]);
2524 return vpd;
2525 }
2526
2527 PUBLIC void
2528 ar5008_get_pdadcs(struct athn_softc *sc, uint8_t fbin,
2529 struct athn_pier *lopier, struct athn_pier *hipier, int nxpdgains,
2530 int nicepts, uint8_t overlap, uint8_t *boundaries, uint8_t *pdadcs)
2531 {
2532 #define DB(x) ((x) / 2) /* Convert half dB to dB. */
2533 uint8_t minpwr[AR_PD_GAINS_IN_MASK], maxpwr[AR_PD_GAINS_IN_MASK];
2534 uint8_t vpd[AR_MAX_PWR_RANGE_IN_HALF_DB], pwr;
2535 uint8_t lovpd, hivpd, boundary;
2536 int16_t ss, delta, vpdstep, val;
2537 int i, j, npdadcs, nvpds, maxidx, tgtidx;
2538
2539 /* Compute min and max power in half dB for each pdGain. */
2540 for (i = 0; i < nxpdgains; i++) {
2541 minpwr[i] = MAX(lopier->pwr[i][0], hipier->pwr[i][0]);
2542 maxpwr[i] = MIN(lopier->pwr[i][nicepts - 1],
2543 hipier->pwr[i][nicepts - 1]);
2544 }
2545
2546 /* Fill phase domain analog-to-digital converter (PDADC) table. */
2547 npdadcs = 0;
2548 for (i = 0; i < nxpdgains; i++) {
2549 if (i != nxpdgains - 1)
2550 boundaries[i] = DB(maxpwr[i] + minpwr[i + 1]) / 2;
2551 else
2552 boundaries[i] = DB(maxpwr[i]);
2553 if (boundaries[i] > AR_MAX_RATE_POWER)
2554 boundaries[i] = AR_MAX_RATE_POWER;
2555
2556 if (i == 0 && !AR_SREV_5416_20_OR_LATER(sc)) {
2557 /* Fix the gain delta (AR5416 1.0 only). */
2558 delta = boundaries[0] - 23;
2559 boundaries[0] = 23;
2560 }
2561 else
2562 delta = 0;
2563
2564 /* Find starting index for this pdGain. */
2565 if (i != 0) {
2566 ss = boundaries[i - 1] - DB(minpwr[i]) -
2567 overlap + 1 + delta;
2568 }
2569 else if (AR_SREV_9280_10_OR_LATER(sc))
2570 ss = -DB(minpwr[i]);
2571 else
2572 ss = 0;
2573
2574 /* Compute Vpd table for this pdGain. */
2575 nvpds = DB(maxpwr[i] - minpwr[i]) + 1;
2576 memset(vpd, 0, sizeof(vpd));
2577 pwr = minpwr[i];
2578 for (j = 0; j < nvpds; j++) {
2579 /* Get lower and higher Vpd. */
2580 lovpd = ar5008_get_vpd(pwr, lopier->pwr[i],
2581 lopier->vpd[i], nicepts);
2582 hivpd = ar5008_get_vpd(pwr, hipier->pwr[i],
2583 hipier->vpd[i], nicepts);
2584
2585 /* Interpolate the final Vpd. */
2586 vpd[j] = athn_interpolate(fbin,
2587 lopier->fbin, lovpd, hipier->fbin, hivpd);
2588
2589 pwr += 2; /* In half dB. */
2590 }
2591
2592 /* Extrapolate data for ss < 0. */
2593 if (vpd[1] > vpd[0])
2594 vpdstep = vpd[1] - vpd[0];
2595 else
2596 vpdstep = 1;
2597 while (ss < 0 && npdadcs < AR_NUM_PDADC_VALUES - 1) {
2598 val = vpd[0] + ss * vpdstep;
2599 pdadcs[npdadcs++] = MAX(val, 0);
2600 ss++;
2601 }
2602
2603 tgtidx = boundaries[i] + overlap - DB(minpwr[i]);
2604 maxidx = MIN(tgtidx, nvpds);
2605 while (ss < maxidx && npdadcs < AR_NUM_PDADC_VALUES - 1)
2606 pdadcs[npdadcs++] = vpd[ss++];
2607
2608 if (tgtidx < maxidx)
2609 continue;
2610
2611 /* Extrapolate data for maxidx <= ss <= tgtidx. */
2612 if (vpd[nvpds - 1] > vpd[nvpds - 2])
2613 vpdstep = vpd[nvpds - 1] - vpd[nvpds - 2];
2614 else
2615 vpdstep = 1;
2616 while (ss <= tgtidx && npdadcs < AR_NUM_PDADC_VALUES - 1) {
2617 val = vpd[nvpds - 1] + (ss - maxidx + 1) * vpdstep;
2618 pdadcs[npdadcs++] = MIN(val, 255);
2619 ss++;
2620 }
2621 }
2622
2623 /* Fill remaining PDADC and boundaries entries. */
2624 if (AR_SREV_9285(sc))
2625 boundary = AR9285_PD_GAIN_BOUNDARY_DEFAULT;
2626 else /* Fill with latest. */
2627 boundary = boundaries[nxpdgains - 1];
2628
2629 for (; nxpdgains < AR_PD_GAINS_IN_MASK; nxpdgains++)
2630 boundaries[nxpdgains] = boundary;
2631
2632 for (; npdadcs < AR_NUM_PDADC_VALUES; npdadcs++)
2633 pdadcs[npdadcs] = pdadcs[npdadcs - 1];
2634 #undef DB
2635 }
2636
2637 PUBLIC void
2638 ar5008_get_lg_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2639 uint8_t ctl, const struct ar_cal_target_power_leg *tgt, int nchans,
2640 uint8_t tpow[4])
2641 {
2642 uint8_t fbin;
2643 int i, lo, hi;
2644
2645 /* Find interval (lower and upper indices). */
2646 fbin = athn_chan2fbin(c);
2647 for (i = 0; i < nchans; i++) {
2648 if (tgt[i].bChannel == AR_BCHAN_UNUSED ||
2649 tgt[i].bChannel > fbin)
2650 break;
2651 }
2652 hi = i;
2653 lo = hi - 1;
2654 if (lo == -1)
2655 lo = hi;
2656 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED)
2657 hi = lo;
2658
2659 /* Interpolate values. */
2660 for (i = 0; i < 4; i++) {
2661 tpow[i] = athn_interpolate(fbin,
2662 tgt[lo].bChannel, tgt[lo].tPow2x[i],
2663 tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2664 }
2665 /* XXX Apply conformance testing limit. */
2666 }
2667
2668 #ifndef IEEE80211_NO_HT
2669 PUBLIC void
2670 ar5008_get_ht_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2671 uint8_t ctl, const struct ar_cal_target_power_ht *tgt, int nchans,
2672 uint8_t tpow[8])
2673 {
2674 uint8_t fbin;
2675 int i, lo, hi;
2676
2677 /* Find interval (lower and upper indices). */
2678 fbin = athn_chan2fbin(c);
2679 for (i = 0; i < nchans; i++) {
2680 if (tgt[i].bChannel == AR_BCHAN_UNUSED ||
2681 tgt[i].bChannel > fbin)
2682 break;
2683 }
2684 hi = i;
2685 lo = hi - 1;
2686 if (lo == -1)
2687 lo = hi;
2688 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED)
2689 hi = lo;
2690
2691 /* Interpolate values. */
2692 for (i = 0; i < 8; i++) {
2693 tpow[i] = athn_interpolate(fbin,
2694 tgt[lo].bChannel, tgt[lo].tPow2x[i],
2695 tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2696 }
2697 /* XXX Apply conformance testing limit. */
2698 }
2699 #endif
2700
2701 /*
2702 * Adaptive noise immunity.
2703 */
2704 Static void
2705 ar5008_set_noise_immunity_level(struct athn_softc *sc, int level)
2706 {
2707 int high = level == 4;
2708 uint32_t reg;
2709
2710 reg = AR_READ(sc, AR_PHY_DESIRED_SZ);
2711 reg = RW(reg, AR_PHY_DESIRED_SZ_TOT_DES, high ? -62 : -55);
2712 AR_WRITE(sc, AR_PHY_DESIRED_SZ, reg);
2713
2714 reg = AR_READ(sc, AR_PHY_AGC_CTL1);
2715 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_LOW, high ? -70 : -64);
2716 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_HIGH, high ? -12 : -14);
2717 AR_WRITE(sc, AR_PHY_AGC_CTL1, reg);
2718
2719 reg = AR_READ(sc, AR_PHY_FIND_SIG);
2720 reg = RW(reg, AR_PHY_FIND_SIG_FIRPWR, high ? -80 : -78);
2721 AR_WRITE(sc, AR_PHY_FIND_SIG, reg);
2722
2723 AR_WRITE_BARRIER(sc);
2724 }
2725
2726 Static void
2727 ar5008_enable_ofdm_weak_signal(struct athn_softc *sc)
2728 {
2729 uint32_t reg;
2730
2731 reg = AR_READ(sc, AR_PHY_SFCORR_LOW);
2732 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 50);
2733 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 40);
2734 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 48);
2735 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg);
2736
2737 reg = AR_READ(sc, AR_PHY_SFCORR);
2738 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 77);
2739 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 64);
2740 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 16);
2741 AR_WRITE(sc, AR_PHY_SFCORR, reg);
2742
2743 reg = AR_READ(sc, AR_PHY_SFCORR_EXT);
2744 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 50);
2745 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 40);
2746 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 77);
2747 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 64);
2748 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg);
2749
2750 AR_SETBITS(sc, AR_PHY_SFCORR_LOW,
2751 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW);
2752 AR_WRITE_BARRIER(sc);
2753 }
2754
2755 Static void
2756 ar5008_disable_ofdm_weak_signal(struct athn_softc *sc)
2757 {
2758 uint32_t reg;
2759
2760 reg = AR_READ(sc, AR_PHY_SFCORR_LOW);
2761 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 127);
2762 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 127);
2763 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 63);
2764 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg);
2765
2766 reg = AR_READ(sc, AR_PHY_SFCORR);
2767 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 127);
2768 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 127);
2769 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 31);
2770 AR_WRITE(sc, AR_PHY_SFCORR, reg);
2771
2772 reg = AR_READ(sc, AR_PHY_SFCORR_EXT);
2773 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 127);
2774 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 127);
2775 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 127);
2776 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 127);
2777 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg);
2778
2779 AR_CLRBITS(sc, AR_PHY_SFCORR_LOW,
2780 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW);
2781 AR_WRITE_BARRIER(sc);
2782 }
2783
2784 Static void
2785 ar5008_set_cck_weak_signal(struct athn_softc *sc, int high)
2786 {
2787 uint32_t reg;
2788
2789 reg = AR_READ(sc, AR_PHY_CCK_DETECT);
2790 reg = RW(reg, AR_PHY_CCK_DETECT_WEAK_SIG_THR_CCK, high ? 6 : 8);
2791 AR_WRITE(sc, AR_PHY_CCK_DETECT, reg);
2792 AR_WRITE_BARRIER(sc);
2793 }
2794
2795 Static void
2796 ar5008_set_firstep_level(struct athn_softc *sc, int level)
2797 {
2798 uint32_t reg;
2799
2800 reg = AR_READ(sc, AR_PHY_FIND_SIG);
2801 reg = RW(reg, AR_PHY_FIND_SIG_FIRSTEP, level * 4);
2802 AR_WRITE(sc, AR_PHY_FIND_SIG, reg);
2803 AR_WRITE_BARRIER(sc);
2804 }
2805
2806 Static void
2807 ar5008_set_spur_immunity_level(struct athn_softc *sc, int level)
2808 {
2809 uint32_t reg;
2810
2811 reg = AR_READ(sc, AR_PHY_TIMING5);
2812 reg = RW(reg, AR_PHY_TIMING5_CYCPWR_THR1, (level + 1) * 2);
2813 AR_WRITE(sc, AR_PHY_TIMING5, reg);
2814 AR_WRITE_BARRIER(sc);
2815 }
2816