Linux Audio

Check our new training course

In-person Linux kernel drivers training

Jun 16-20, 2025
Register
Loading...
v3.1
   1/*
   2 * Copyright (c) 2008-2011 Atheros Communications Inc.
   3 *
   4 * Permission to use, copy, modify, and/or distribute this software for any
   5 * purpose with or without fee is hereby granted, provided that the above
   6 * copyright notice and this permission notice appear in all copies.
   7 *
   8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
   9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
  11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
  13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
  14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  15 */
  16
  17#include <linux/io.h>
  18#include <linux/slab.h>
 
  19#include <asm/unaligned.h>
  20
  21#include "hw.h"
  22#include "hw-ops.h"
  23#include "rc.h"
  24#include "ar9003_mac.h"
 
 
 
  25
  26static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
  27
  28MODULE_AUTHOR("Atheros Communications");
  29MODULE_DESCRIPTION("Support for Atheros 802.11n wireless LAN cards.");
  30MODULE_SUPPORTED_DEVICE("Atheros 802.11n WLAN cards");
  31MODULE_LICENSE("Dual BSD/GPL");
  32
  33static int __init ath9k_init(void)
  34{
  35	return 0;
  36}
  37module_init(ath9k_init);
  38
  39static void __exit ath9k_exit(void)
  40{
  41	return;
  42}
  43module_exit(ath9k_exit);
  44
  45/* Private hardware callbacks */
  46
  47static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
  48{
  49	ath9k_hw_private_ops(ah)->init_cal_settings(ah);
  50}
  51
  52static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
  53{
  54	ath9k_hw_private_ops(ah)->init_mode_regs(ah);
  55}
  56
  57static u32 ath9k_hw_compute_pll_control(struct ath_hw *ah,
  58					struct ath9k_channel *chan)
  59{
  60	return ath9k_hw_private_ops(ah)->compute_pll_control(ah, chan);
  61}
  62
  63static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
  64{
  65	if (!ath9k_hw_private_ops(ah)->init_mode_gain_regs)
  66		return;
  67
  68	ath9k_hw_private_ops(ah)->init_mode_gain_regs(ah);
  69}
  70
  71static void ath9k_hw_ani_cache_ini_regs(struct ath_hw *ah)
  72{
  73	/* You will not have this callback if using the old ANI */
  74	if (!ath9k_hw_private_ops(ah)->ani_cache_ini_regs)
  75		return;
  76
  77	ath9k_hw_private_ops(ah)->ani_cache_ini_regs(ah);
  78}
  79
  80/********************/
  81/* Helper Functions */
  82/********************/
  83
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  84static void ath9k_hw_set_clockrate(struct ath_hw *ah)
  85{
  86	struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
  87	struct ath_common *common = ath9k_hw_common(ah);
  88	unsigned int clockrate;
  89
  90	/* AR9287 v1.3+ uses async FIFO and runs the MAC at 117 MHz */
  91	if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah))
  92		clockrate = 117;
  93	else if (!ah->curchan) /* should really check for CCK instead */
  94		clockrate = ATH9K_CLOCK_RATE_CCK;
  95	else if (conf->channel->band == IEEE80211_BAND_2GHZ)
  96		clockrate = ATH9K_CLOCK_RATE_2GHZ_OFDM;
  97	else if (ah->caps.hw_caps & ATH9K_HW_CAP_FASTCLOCK)
  98		clockrate = ATH9K_CLOCK_FAST_RATE_5GHZ_OFDM;
  99	else
 100		clockrate = ATH9K_CLOCK_RATE_5GHZ_OFDM;
 101
 102	if (conf_is_ht40(conf))
 103		clockrate *= 2;
 104
 105	if (ah->curchan) {
 106		if (IS_CHAN_HALF_RATE(ah->curchan))
 107			clockrate /= 2;
 108		if (IS_CHAN_QUARTER_RATE(ah->curchan))
 109			clockrate /= 4;
 110	}
 111
 112	common->clockrate = clockrate;
 113}
 114
 115static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
 116{
 117	struct ath_common *common = ath9k_hw_common(ah);
 118
 119	return usecs * common->clockrate;
 120}
 121
 122bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
 123{
 124	int i;
 125
 126	BUG_ON(timeout < AH_TIME_QUANTUM);
 127
 128	for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
 129		if ((REG_READ(ah, reg) & mask) == val)
 130			return true;
 131
 132		udelay(AH_TIME_QUANTUM);
 133	}
 134
 135	ath_dbg(ath9k_hw_common(ah), ATH_DBG_ANY,
 136		"timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
 137		timeout, reg, REG_READ(ah, reg), mask, val);
 138
 139	return false;
 140}
 141EXPORT_SYMBOL(ath9k_hw_wait);
 142
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 143void ath9k_hw_write_array(struct ath_hw *ah, struct ar5416IniArray *array,
 144			  int column, unsigned int *writecnt)
 145{
 146	int r;
 147
 148	ENABLE_REGWRITE_BUFFER(ah);
 149	for (r = 0; r < array->ia_rows; r++) {
 150		REG_WRITE(ah, INI_RA(array, r, 0),
 151			  INI_RA(array, r, column));
 152		DO_DELAY(*writecnt);
 153	}
 154	REGWRITE_BUFFER_FLUSH(ah);
 155}
 156
 157u32 ath9k_hw_reverse_bits(u32 val, u32 n)
 158{
 159	u32 retval;
 160	int i;
 161
 162	for (i = 0, retval = 0; i < n; i++) {
 163		retval = (retval << 1) | (val & 1);
 164		val >>= 1;
 165	}
 166	return retval;
 167}
 168
 169u16 ath9k_hw_computetxtime(struct ath_hw *ah,
 170			   u8 phy, int kbps,
 171			   u32 frameLen, u16 rateix,
 172			   bool shortPreamble)
 173{
 174	u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
 175
 176	if (kbps == 0)
 177		return 0;
 178
 179	switch (phy) {
 180	case WLAN_RC_PHY_CCK:
 181		phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
 182		if (shortPreamble)
 183			phyTime >>= 1;
 184		numBits = frameLen << 3;
 185		txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
 186		break;
 187	case WLAN_RC_PHY_OFDM:
 188		if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
 189			bitsPerSymbol =	(kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
 190			numBits = OFDM_PLCP_BITS + (frameLen << 3);
 191			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
 192			txTime = OFDM_SIFS_TIME_QUARTER
 193				+ OFDM_PREAMBLE_TIME_QUARTER
 194				+ (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
 195		} else if (ah->curchan &&
 196			   IS_CHAN_HALF_RATE(ah->curchan)) {
 197			bitsPerSymbol =	(kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
 198			numBits = OFDM_PLCP_BITS + (frameLen << 3);
 199			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
 200			txTime = OFDM_SIFS_TIME_HALF +
 201				OFDM_PREAMBLE_TIME_HALF
 202				+ (numSymbols * OFDM_SYMBOL_TIME_HALF);
 203		} else {
 204			bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
 205			numBits = OFDM_PLCP_BITS + (frameLen << 3);
 206			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
 207			txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
 208				+ (numSymbols * OFDM_SYMBOL_TIME);
 209		}
 210		break;
 211	default:
 212		ath_err(ath9k_hw_common(ah),
 213			"Unknown phy %u (rate ix %u)\n", phy, rateix);
 214		txTime = 0;
 215		break;
 216	}
 217
 218	return txTime;
 219}
 220EXPORT_SYMBOL(ath9k_hw_computetxtime);
 221
 222void ath9k_hw_get_channel_centers(struct ath_hw *ah,
 223				  struct ath9k_channel *chan,
 224				  struct chan_centers *centers)
 225{
 226	int8_t extoff;
 227
 228	if (!IS_CHAN_HT40(chan)) {
 229		centers->ctl_center = centers->ext_center =
 230			centers->synth_center = chan->channel;
 231		return;
 232	}
 233
 234	if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
 235	    (chan->chanmode == CHANNEL_G_HT40PLUS)) {
 236		centers->synth_center =
 237			chan->channel + HT40_CHANNEL_CENTER_SHIFT;
 238		extoff = 1;
 239	} else {
 240		centers->synth_center =
 241			chan->channel - HT40_CHANNEL_CENTER_SHIFT;
 242		extoff = -1;
 243	}
 244
 245	centers->ctl_center =
 246		centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
 247	/* 25 MHz spacing is supported by hw but not on upper layers */
 248	centers->ext_center =
 249		centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT);
 250}
 251
 252/******************/
 253/* Chip Revisions */
 254/******************/
 255
 256static void ath9k_hw_read_revisions(struct ath_hw *ah)
 257{
 258	u32 val;
 259
 260	switch (ah->hw_version.devid) {
 261	case AR5416_AR9100_DEVID:
 262		ah->hw_version.macVersion = AR_SREV_VERSION_9100;
 263		break;
 264	case AR9300_DEVID_AR9330:
 265		ah->hw_version.macVersion = AR_SREV_VERSION_9330;
 266		if (ah->get_mac_revision) {
 267			ah->hw_version.macRev = ah->get_mac_revision();
 268		} else {
 269			val = REG_READ(ah, AR_SREV);
 270			ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
 271		}
 272		return;
 273	case AR9300_DEVID_AR9340:
 274		ah->hw_version.macVersion = AR_SREV_VERSION_9340;
 275		val = REG_READ(ah, AR_SREV);
 276		ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
 277		return;
 278	}
 279
 280	val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
 281
 282	if (val == 0xFF) {
 283		val = REG_READ(ah, AR_SREV);
 284		ah->hw_version.macVersion =
 285			(val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
 286		ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
 287		ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
 
 
 
 
 
 288	} else {
 289		if (!AR_SREV_9100(ah))
 290			ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
 291
 292		ah->hw_version.macRev = val & AR_SREV_REVISION;
 293
 294		if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
 295			ah->is_pciexpress = true;
 296	}
 297}
 298
 299/************************************/
 300/* HW Attach, Detach, Init Routines */
 301/************************************/
 302
 303static void ath9k_hw_disablepcie(struct ath_hw *ah)
 304{
 305	if (!AR_SREV_5416(ah))
 306		return;
 307
 308	REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
 309	REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
 310	REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
 311	REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
 312	REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
 313	REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
 314	REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
 315	REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
 316	REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
 317
 318	REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
 319}
 320
 321static void ath9k_hw_aspm_init(struct ath_hw *ah)
 322{
 323	struct ath_common *common = ath9k_hw_common(ah);
 324
 325	if (common->bus_ops->aspm_init)
 326		common->bus_ops->aspm_init(common);
 327}
 328
 329/* This should work for all families including legacy */
 330static bool ath9k_hw_chip_test(struct ath_hw *ah)
 331{
 332	struct ath_common *common = ath9k_hw_common(ah);
 333	u32 regAddr[2] = { AR_STA_ID0 };
 334	u32 regHold[2];
 335	static const u32 patternData[4] = {
 336		0x55555555, 0xaaaaaaaa, 0x66666666, 0x99999999
 337	};
 338	int i, j, loop_max;
 339
 340	if (!AR_SREV_9300_20_OR_LATER(ah)) {
 341		loop_max = 2;
 342		regAddr[1] = AR_PHY_BASE + (8 << 2);
 343	} else
 344		loop_max = 1;
 345
 346	for (i = 0; i < loop_max; i++) {
 347		u32 addr = regAddr[i];
 348		u32 wrData, rdData;
 349
 350		regHold[i] = REG_READ(ah, addr);
 351		for (j = 0; j < 0x100; j++) {
 352			wrData = (j << 16) | j;
 353			REG_WRITE(ah, addr, wrData);
 354			rdData = REG_READ(ah, addr);
 355			if (rdData != wrData) {
 356				ath_err(common,
 357					"address test failed addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
 358					addr, wrData, rdData);
 359				return false;
 360			}
 361		}
 362		for (j = 0; j < 4; j++) {
 363			wrData = patternData[j];
 364			REG_WRITE(ah, addr, wrData);
 365			rdData = REG_READ(ah, addr);
 366			if (wrData != rdData) {
 367				ath_err(common,
 368					"address test failed addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
 369					addr, wrData, rdData);
 370				return false;
 371			}
 372		}
 373		REG_WRITE(ah, regAddr[i], regHold[i]);
 374	}
 375	udelay(100);
 376
 377	return true;
 378}
 379
 380static void ath9k_hw_init_config(struct ath_hw *ah)
 381{
 382	int i;
 383
 384	ah->config.dma_beacon_response_time = 2;
 385	ah->config.sw_beacon_response_time = 10;
 386	ah->config.additional_swba_backoff = 0;
 387	ah->config.ack_6mb = 0x0;
 388	ah->config.cwm_ignore_extcca = 0;
 389	ah->config.pcie_clock_req = 0;
 390	ah->config.pcie_waen = 0;
 391	ah->config.analog_shiftreg = 1;
 392	ah->config.enable_ani = true;
 393
 394	for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
 395		ah->config.spurchans[i][0] = AR_NO_SPUR;
 396		ah->config.spurchans[i][1] = AR_NO_SPUR;
 397	}
 398
 399	/* PAPRD needs some more work to be enabled */
 400	ah->config.paprd_disable = 1;
 401
 402	ah->config.rx_intr_mitigation = true;
 403	ah->config.pcieSerDesWrite = true;
 404
 405	/*
 406	 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
 407	 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
 408	 * This means we use it for all AR5416 devices, and the few
 409	 * minor PCI AR9280 devices out there.
 410	 *
 411	 * Serialization is required because these devices do not handle
 412	 * well the case of two concurrent reads/writes due to the latency
 413	 * involved. During one read/write another read/write can be issued
 414	 * on another CPU while the previous read/write may still be working
 415	 * on our hardware, if we hit this case the hardware poops in a loop.
 416	 * We prevent this by serializing reads and writes.
 417	 *
 418	 * This issue is not present on PCI-Express devices or pre-AR5416
 419	 * devices (legacy, 802.11abg).
 420	 */
 421	if (num_possible_cpus() > 1)
 422		ah->config.serialize_regmode = SER_REG_MODE_AUTO;
 423}
 424
 425static void ath9k_hw_init_defaults(struct ath_hw *ah)
 426{
 427	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
 428
 429	regulatory->country_code = CTRY_DEFAULT;
 430	regulatory->power_limit = MAX_RATE_POWER;
 431	regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
 432
 433	ah->hw_version.magic = AR5416_MAGIC;
 434	ah->hw_version.subvendorid = 0;
 435
 436	ah->atim_window = 0;
 437	ah->sta_id1_defaults =
 438		AR_STA_ID1_CRPT_MIC_ENABLE |
 439		AR_STA_ID1_MCAST_KSRCH;
 440	if (AR_SREV_9100(ah))
 441		ah->sta_id1_defaults |= AR_STA_ID1_AR9100_BA_FIX;
 442	ah->enable_32kHz_clock = DONT_USE_32KHZ;
 443	ah->slottime = 20;
 444	ah->globaltxtimeout = (u32) -1;
 445	ah->power_mode = ATH9K_PM_UNDEFINED;
 
 446}
 447
 448static int ath9k_hw_init_macaddr(struct ath_hw *ah)
 449{
 450	struct ath_common *common = ath9k_hw_common(ah);
 451	u32 sum;
 452	int i;
 453	u16 eeval;
 454	static const u32 EEP_MAC[] = { EEP_MAC_LSW, EEP_MAC_MID, EEP_MAC_MSW };
 455
 456	sum = 0;
 457	for (i = 0; i < 3; i++) {
 458		eeval = ah->eep_ops->get_eeprom(ah, EEP_MAC[i]);
 459		sum += eeval;
 460		common->macaddr[2 * i] = eeval >> 8;
 461		common->macaddr[2 * i + 1] = eeval & 0xff;
 462	}
 463	if (sum == 0 || sum == 0xffff * 3)
 464		return -EADDRNOTAVAIL;
 465
 466	return 0;
 467}
 468
 469static int ath9k_hw_post_init(struct ath_hw *ah)
 470{
 471	struct ath_common *common = ath9k_hw_common(ah);
 472	int ecode;
 473
 474	if (common->bus_ops->ath_bus_type != ATH_USB) {
 475		if (!ath9k_hw_chip_test(ah))
 476			return -ENODEV;
 477	}
 478
 479	if (!AR_SREV_9300_20_OR_LATER(ah)) {
 480		ecode = ar9002_hw_rf_claim(ah);
 481		if (ecode != 0)
 482			return ecode;
 483	}
 484
 485	ecode = ath9k_hw_eeprom_init(ah);
 486	if (ecode != 0)
 487		return ecode;
 488
 489	ath_dbg(ath9k_hw_common(ah), ATH_DBG_CONFIG,
 490		"Eeprom VER: %d, REV: %d\n",
 491		ah->eep_ops->get_eeprom_ver(ah),
 492		ah->eep_ops->get_eeprom_rev(ah));
 493
 494	ecode = ath9k_hw_rf_alloc_ext_banks(ah);
 495	if (ecode) {
 496		ath_err(ath9k_hw_common(ah),
 497			"Failed allocating banks for external radio\n");
 498		ath9k_hw_rf_free_ext_banks(ah);
 499		return ecode;
 500	}
 501
 502	if (!AR_SREV_9100(ah) && !AR_SREV_9340(ah)) {
 503		ath9k_hw_ani_setup(ah);
 504		ath9k_hw_ani_init(ah);
 505	}
 506
 507	return 0;
 508}
 509
 510static void ath9k_hw_attach_ops(struct ath_hw *ah)
 511{
 512	if (AR_SREV_9300_20_OR_LATER(ah))
 513		ar9003_hw_attach_ops(ah);
 514	else
 515		ar9002_hw_attach_ops(ah);
 516}
 517
 518/* Called for all hardware families */
 519static int __ath9k_hw_init(struct ath_hw *ah)
 520{
 521	struct ath_common *common = ath9k_hw_common(ah);
 522	int r = 0;
 523
 524	ath9k_hw_read_revisions(ah);
 525
 526	/*
 527	 * Read back AR_WA into a permanent copy and set bits 14 and 17.
 528	 * We need to do this to avoid RMW of this register. We cannot
 529	 * read the reg when chip is asleep.
 530	 */
 531	ah->WARegVal = REG_READ(ah, AR_WA);
 532	ah->WARegVal |= (AR_WA_D3_L1_DISABLE |
 533			 AR_WA_ASPM_TIMER_BASED_DISABLE);
 534
 535	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
 536		ath_err(common, "Couldn't reset chip\n");
 537		return -EIO;
 538	}
 539
 
 
 
 540	ath9k_hw_init_defaults(ah);
 541	ath9k_hw_init_config(ah);
 542
 543	ath9k_hw_attach_ops(ah);
 544
 545	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
 546		ath_err(common, "Couldn't wakeup chip\n");
 547		return -EIO;
 548	}
 549
 550	if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
 551		if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
 552		    ((AR_SREV_9160(ah) || AR_SREV_9280(ah)) &&
 553		     !ah->is_pciexpress)) {
 554			ah->config.serialize_regmode =
 555				SER_REG_MODE_ON;
 556		} else {
 557			ah->config.serialize_regmode =
 558				SER_REG_MODE_OFF;
 559		}
 560	}
 561
 562	ath_dbg(common, ATH_DBG_RESET, "serialize_regmode is %d\n",
 563		ah->config.serialize_regmode);
 564
 565	if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
 566		ah->config.max_txtrig_level = MAX_TX_FIFO_THRESHOLD >> 1;
 567	else
 568		ah->config.max_txtrig_level = MAX_TX_FIFO_THRESHOLD;
 569
 570	switch (ah->hw_version.macVersion) {
 571	case AR_SREV_VERSION_5416_PCI:
 572	case AR_SREV_VERSION_5416_PCIE:
 573	case AR_SREV_VERSION_9160:
 574	case AR_SREV_VERSION_9100:
 575	case AR_SREV_VERSION_9280:
 576	case AR_SREV_VERSION_9285:
 577	case AR_SREV_VERSION_9287:
 578	case AR_SREV_VERSION_9271:
 579	case AR_SREV_VERSION_9300:
 580	case AR_SREV_VERSION_9330:
 581	case AR_SREV_VERSION_9485:
 582	case AR_SREV_VERSION_9340:
 
 583		break;
 584	default:
 585		ath_err(common,
 586			"Mac Chip Rev 0x%02x.%x is not supported by this driver\n",
 587			ah->hw_version.macVersion, ah->hw_version.macRev);
 588		return -EOPNOTSUPP;
 589	}
 590
 591	if (AR_SREV_9271(ah) || AR_SREV_9100(ah) || AR_SREV_9340(ah) ||
 592	    AR_SREV_9330(ah))
 593		ah->is_pciexpress = false;
 594
 595	ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
 596	ath9k_hw_init_cal_settings(ah);
 597
 598	ah->ani_function = ATH9K_ANI_ALL;
 599	if (AR_SREV_9280_20_OR_LATER(ah) && !AR_SREV_9300_20_OR_LATER(ah))
 600		ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
 601	if (!AR_SREV_9300_20_OR_LATER(ah))
 602		ah->ani_function &= ~ATH9K_ANI_MRC_CCK;
 603
 604	ath9k_hw_init_mode_regs(ah);
 
 
 605
 
 606
 607	if (ah->is_pciexpress)
 608		ath9k_hw_aspm_init(ah);
 609	else
 610		ath9k_hw_disablepcie(ah);
 611
 612	if (!AR_SREV_9300_20_OR_LATER(ah))
 613		ar9002_hw_cck_chan14_spread(ah);
 614
 615	r = ath9k_hw_post_init(ah);
 616	if (r)
 617		return r;
 618
 619	ath9k_hw_init_mode_gain_regs(ah);
 620	r = ath9k_hw_fill_cap_info(ah);
 621	if (r)
 622		return r;
 623
 
 
 
 624	r = ath9k_hw_init_macaddr(ah);
 625	if (r) {
 626		ath_err(common, "Failed to initialize MAC address\n");
 627		return r;
 628	}
 629
 630	if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
 631		ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
 632	else
 633		ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
 634
 635	if (AR_SREV_9330(ah))
 636		ah->bb_watchdog_timeout_ms = 85;
 637	else
 638		ah->bb_watchdog_timeout_ms = 25;
 639
 640	common->state = ATH_HW_INITIALIZED;
 641
 642	return 0;
 643}
 644
 645int ath9k_hw_init(struct ath_hw *ah)
 646{
 647	int ret;
 648	struct ath_common *common = ath9k_hw_common(ah);
 649
 650	/* These are all the AR5008/AR9001/AR9002 hardware family of chipsets */
 651	switch (ah->hw_version.devid) {
 652	case AR5416_DEVID_PCI:
 653	case AR5416_DEVID_PCIE:
 654	case AR5416_AR9100_DEVID:
 655	case AR9160_DEVID_PCI:
 656	case AR9280_DEVID_PCI:
 657	case AR9280_DEVID_PCIE:
 658	case AR9285_DEVID_PCIE:
 659	case AR9287_DEVID_PCI:
 660	case AR9287_DEVID_PCIE:
 661	case AR2427_DEVID_PCIE:
 662	case AR9300_DEVID_PCIE:
 663	case AR9300_DEVID_AR9485_PCIE:
 664	case AR9300_DEVID_AR9330:
 665	case AR9300_DEVID_AR9340:
 
 
 
 666		break;
 667	default:
 668		if (common->bus_ops->ath_bus_type == ATH_USB)
 669			break;
 670		ath_err(common, "Hardware device ID 0x%04x not supported\n",
 671			ah->hw_version.devid);
 672		return -EOPNOTSUPP;
 673	}
 674
 675	ret = __ath9k_hw_init(ah);
 676	if (ret) {
 677		ath_err(common,
 678			"Unable to initialize hardware; initialization status: %d\n",
 679			ret);
 680		return ret;
 681	}
 682
 683	return 0;
 684}
 685EXPORT_SYMBOL(ath9k_hw_init);
 686
 687static void ath9k_hw_init_qos(struct ath_hw *ah)
 688{
 689	ENABLE_REGWRITE_BUFFER(ah);
 690
 691	REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
 692	REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
 693
 694	REG_WRITE(ah, AR_QOS_NO_ACK,
 695		  SM(2, AR_QOS_NO_ACK_TWO_BIT) |
 696		  SM(5, AR_QOS_NO_ACK_BIT_OFF) |
 697		  SM(0, AR_QOS_NO_ACK_BYTE_OFF));
 698
 699	REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
 700	REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
 701	REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
 702	REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
 703	REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
 704
 705	REGWRITE_BUFFER_FLUSH(ah);
 706}
 707
 708u32 ar9003_get_pll_sqsum_dvc(struct ath_hw *ah)
 709{
 
 
 
 710	REG_CLR_BIT(ah, PLL3, PLL3_DO_MEAS_MASK);
 711	udelay(100);
 712	REG_SET_BIT(ah, PLL3, PLL3_DO_MEAS_MASK);
 713
 714	while ((REG_READ(ah, PLL4) & PLL4_MEAS_DONE) == 0)
 
 715		udelay(100);
 716
 
 
 
 
 
 
 
 
 717	return (REG_READ(ah, PLL3) & SQSUM_DVC_MASK) >> 3;
 718}
 719EXPORT_SYMBOL(ar9003_get_pll_sqsum_dvc);
 720
 721static void ath9k_hw_init_pll(struct ath_hw *ah,
 722			      struct ath9k_channel *chan)
 723{
 724	u32 pll;
 725
 726	if (AR_SREV_9485(ah)) {
 727
 728		/* program BB PLL ki and kd value, ki=0x4, kd=0x40 */
 729		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 730			      AR_CH0_BB_DPLL2_PLL_PWD, 0x1);
 731		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 732			      AR_CH0_DPLL2_KD, 0x40);
 733		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 734			      AR_CH0_DPLL2_KI, 0x4);
 735
 736		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
 737			      AR_CH0_BB_DPLL1_REFDIV, 0x5);
 738		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
 739			      AR_CH0_BB_DPLL1_NINI, 0x58);
 740		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
 741			      AR_CH0_BB_DPLL1_NFRAC, 0x0);
 742
 743		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 744			      AR_CH0_BB_DPLL2_OUTDIV, 0x1);
 745		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 746			      AR_CH0_BB_DPLL2_LOCAL_PLL, 0x1);
 747		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 748			      AR_CH0_BB_DPLL2_EN_NEGTRIG, 0x1);
 749
 750		/* program BB PLL phase_shift to 0x6 */
 751		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL3,
 752			      AR_CH0_BB_DPLL3_PHASE_SHIFT, 0x6);
 753
 754		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 755			      AR_CH0_BB_DPLL2_PLL_PWD, 0x0);
 756		udelay(1000);
 757	} else if (AR_SREV_9330(ah)) {
 758		u32 ddr_dpll2, pll_control2, kd;
 759
 760		if (ah->is_clk_25mhz) {
 761			ddr_dpll2 = 0x18e82f01;
 762			pll_control2 = 0xe04a3d;
 763			kd = 0x1d;
 764		} else {
 765			ddr_dpll2 = 0x19e82f01;
 766			pll_control2 = 0x886666;
 767			kd = 0x3d;
 768		}
 769
 770		/* program DDR PLL ki and kd value */
 771		REG_WRITE(ah, AR_CH0_DDR_DPLL2, ddr_dpll2);
 772
 773		/* program DDR PLL phase_shift */
 774		REG_RMW_FIELD(ah, AR_CH0_DDR_DPLL3,
 775			      AR_CH0_DPLL3_PHASE_SHIFT, 0x1);
 776
 777		REG_WRITE(ah, AR_RTC_PLL_CONTROL, 0x1142c);
 778		udelay(1000);
 779
 780		/* program refdiv, nint, frac to RTC register */
 781		REG_WRITE(ah, AR_RTC_PLL_CONTROL2, pll_control2);
 782
 783		/* program BB PLL kd and ki value */
 784		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2, AR_CH0_DPLL2_KD, kd);
 785		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2, AR_CH0_DPLL2_KI, 0x06);
 786
 787		/* program BB PLL phase_shift */
 788		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL3,
 789			      AR_CH0_BB_DPLL3_PHASE_SHIFT, 0x1);
 790	} else if (AR_SREV_9340(ah)) {
 791		u32 regval, pll2_divint, pll2_divfrac, refdiv;
 792
 793		REG_WRITE(ah, AR_RTC_PLL_CONTROL, 0x1142c);
 794		udelay(1000);
 795
 796		REG_SET_BIT(ah, AR_PHY_PLL_MODE, 0x1 << 16);
 797		udelay(100);
 798
 799		if (ah->is_clk_25mhz) {
 800			pll2_divint = 0x54;
 801			pll2_divfrac = 0x1eb85;
 802			refdiv = 3;
 803		} else {
 804			pll2_divint = 88;
 805			pll2_divfrac = 0;
 806			refdiv = 5;
 807		}
 808
 809		regval = REG_READ(ah, AR_PHY_PLL_MODE);
 810		regval |= (0x1 << 16);
 811		REG_WRITE(ah, AR_PHY_PLL_MODE, regval);
 812		udelay(100);
 813
 814		REG_WRITE(ah, AR_PHY_PLL_CONTROL, (refdiv << 27) |
 815			  (pll2_divint << 18) | pll2_divfrac);
 816		udelay(100);
 817
 818		regval = REG_READ(ah, AR_PHY_PLL_MODE);
 819		regval = (regval & 0x80071fff) | (0x1 << 30) | (0x1 << 13) |
 820			 (0x4 << 26) | (0x18 << 19);
 821		REG_WRITE(ah, AR_PHY_PLL_MODE, regval);
 822		REG_WRITE(ah, AR_PHY_PLL_MODE,
 823			  REG_READ(ah, AR_PHY_PLL_MODE) & 0xfffeffff);
 824		udelay(1000);
 825	}
 826
 827	pll = ath9k_hw_compute_pll_control(ah, chan);
 828
 829	REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
 830
 831	if (AR_SREV_9485(ah) || AR_SREV_9340(ah) || AR_SREV_9330(ah))
 832		udelay(1000);
 833
 834	/* Switch the core clock for ar9271 to 117Mhz */
 835	if (AR_SREV_9271(ah)) {
 836		udelay(500);
 837		REG_WRITE(ah, 0x50040, 0x304);
 838	}
 839
 840	udelay(RTC_PLL_SETTLE_DELAY);
 841
 842	REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
 843
 844	if (AR_SREV_9340(ah)) {
 845		if (ah->is_clk_25mhz) {
 846			REG_WRITE(ah, AR_RTC_DERIVED_CLK, 0x17c << 1);
 847			REG_WRITE(ah, AR_SLP32_MODE, 0x0010f3d7);
 848			REG_WRITE(ah,  AR_SLP32_INC, 0x0001e7ae);
 849		} else {
 850			REG_WRITE(ah, AR_RTC_DERIVED_CLK, 0x261 << 1);
 851			REG_WRITE(ah, AR_SLP32_MODE, 0x0010f400);
 852			REG_WRITE(ah,  AR_SLP32_INC, 0x0001e800);
 853		}
 854		udelay(100);
 855	}
 856}
 857
 858static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
 859					  enum nl80211_iftype opmode)
 860{
 861	u32 sync_default = AR_INTR_SYNC_DEFAULT;
 862	u32 imr_reg = AR_IMR_TXERR |
 863		AR_IMR_TXURN |
 864		AR_IMR_RXERR |
 865		AR_IMR_RXORN |
 866		AR_IMR_BCNMISC;
 867
 868	if (AR_SREV_9340(ah))
 869		sync_default &= ~AR_INTR_SYNC_HOST1_FATAL;
 870
 871	if (AR_SREV_9300_20_OR_LATER(ah)) {
 872		imr_reg |= AR_IMR_RXOK_HP;
 873		if (ah->config.rx_intr_mitigation)
 874			imr_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
 875		else
 876			imr_reg |= AR_IMR_RXOK_LP;
 877
 878	} else {
 879		if (ah->config.rx_intr_mitigation)
 880			imr_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
 881		else
 882			imr_reg |= AR_IMR_RXOK;
 883	}
 884
 885	if (ah->config.tx_intr_mitigation)
 886		imr_reg |= AR_IMR_TXINTM | AR_IMR_TXMINTR;
 887	else
 888		imr_reg |= AR_IMR_TXOK;
 889
 890	if (opmode == NL80211_IFTYPE_AP)
 891		imr_reg |= AR_IMR_MIB;
 892
 893	ENABLE_REGWRITE_BUFFER(ah);
 894
 895	REG_WRITE(ah, AR_IMR, imr_reg);
 896	ah->imrs2_reg |= AR_IMR_S2_GTT;
 897	REG_WRITE(ah, AR_IMR_S2, ah->imrs2_reg);
 898
 899	if (!AR_SREV_9100(ah)) {
 900		REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
 901		REG_WRITE(ah, AR_INTR_SYNC_ENABLE, sync_default);
 902		REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
 903	}
 904
 905	REGWRITE_BUFFER_FLUSH(ah);
 906
 907	if (AR_SREV_9300_20_OR_LATER(ah)) {
 908		REG_WRITE(ah, AR_INTR_PRIO_ASYNC_ENABLE, 0);
 909		REG_WRITE(ah, AR_INTR_PRIO_ASYNC_MASK, 0);
 910		REG_WRITE(ah, AR_INTR_PRIO_SYNC_ENABLE, 0);
 911		REG_WRITE(ah, AR_INTR_PRIO_SYNC_MASK, 0);
 912	}
 913}
 914
 915static void ath9k_hw_set_sifs_time(struct ath_hw *ah, u32 us)
 916{
 917	u32 val = ath9k_hw_mac_to_clks(ah, us - 2);
 918	val = min(val, (u32) 0xFFFF);
 919	REG_WRITE(ah, AR_D_GBL_IFS_SIFS, val);
 920}
 921
 922static void ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
 923{
 924	u32 val = ath9k_hw_mac_to_clks(ah, us);
 925	val = min(val, (u32) 0xFFFF);
 926	REG_WRITE(ah, AR_D_GBL_IFS_SLOT, val);
 927}
 928
 929static void ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
 930{
 931	u32 val = ath9k_hw_mac_to_clks(ah, us);
 932	val = min(val, (u32) MS(0xFFFFFFFF, AR_TIME_OUT_ACK));
 933	REG_RMW_FIELD(ah, AR_TIME_OUT, AR_TIME_OUT_ACK, val);
 934}
 935
 936static void ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
 937{
 938	u32 val = ath9k_hw_mac_to_clks(ah, us);
 939	val = min(val, (u32) MS(0xFFFFFFFF, AR_TIME_OUT_CTS));
 940	REG_RMW_FIELD(ah, AR_TIME_OUT, AR_TIME_OUT_CTS, val);
 941}
 942
 943static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
 944{
 945	if (tu > 0xFFFF) {
 946		ath_dbg(ath9k_hw_common(ah), ATH_DBG_XMIT,
 947			"bad global tx timeout %u\n", tu);
 948		ah->globaltxtimeout = (u32) -1;
 949		return false;
 950	} else {
 951		REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
 952		ah->globaltxtimeout = tu;
 953		return true;
 954	}
 955}
 956
 957void ath9k_hw_init_global_settings(struct ath_hw *ah)
 958{
 959	struct ath_common *common = ath9k_hw_common(ah);
 960	struct ieee80211_conf *conf = &common->hw->conf;
 961	const struct ath9k_channel *chan = ah->curchan;
 962	int acktimeout;
 963	int slottime;
 964	int sifstime;
 965	int rx_lat = 0, tx_lat = 0, eifs = 0;
 966	u32 reg;
 967
 968	ath_dbg(ath9k_hw_common(ah), ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
 969		ah->misc_mode);
 970
 971	if (!chan)
 972		return;
 973
 974	if (ah->misc_mode != 0)
 975		REG_SET_BIT(ah, AR_PCU_MISC, ah->misc_mode);
 976
 977	rx_lat = 37;
 
 
 
 978	tx_lat = 54;
 979
 
 
 
 
 
 980	if (IS_CHAN_HALF_RATE(chan)) {
 981		eifs = 175;
 982		rx_lat *= 2;
 983		tx_lat *= 2;
 984		if (IS_CHAN_A_FAST_CLOCK(ah, chan))
 985		    tx_lat += 11;
 986
 
 
 987		slottime = 13;
 988		sifstime = 32;
 989	} else if (IS_CHAN_QUARTER_RATE(chan)) {
 990		eifs = 340;
 991		rx_lat *= 4;
 992		tx_lat *= 4;
 993		if (IS_CHAN_A_FAST_CLOCK(ah, chan))
 994		    tx_lat += 22;
 995
 
 
 996		slottime = 21;
 997		sifstime = 64;
 998	} else {
 999		eifs = REG_READ(ah, AR_D_GBL_IFS_EIFS);
1000		reg = REG_READ(ah, AR_USEC);
 
 
 
 
 
 
1001		rx_lat = MS(reg, AR_USEC_RX_LAT);
1002		tx_lat = MS(reg, AR_USEC_TX_LAT);
1003
1004		slottime = ah->slottime;
1005		if (IS_CHAN_5GHZ(chan))
1006			sifstime = 16;
1007		else
1008			sifstime = 10;
1009	}
1010
1011	/* As defined by IEEE 802.11-2007 17.3.8.6 */
1012	acktimeout = slottime + sifstime + 3 * ah->coverage_class;
 
1013
1014	/*
1015	 * Workaround for early ACK timeouts, add an offset to match the
1016	 * initval's 64us ack timeout value.
1017	 * This was initially only meant to work around an issue with delayed
1018	 * BA frames in some implementations, but it has been found to fix ACK
1019	 * timeout issues in other cases as well.
1020	 */
1021	if (conf->channel && conf->channel->band == IEEE80211_BAND_2GHZ)
 
1022		acktimeout += 64 - sifstime - ah->slottime;
 
 
 
1023
1024	ath9k_hw_set_sifs_time(ah, sifstime);
1025	ath9k_hw_setslottime(ah, slottime);
1026	ath9k_hw_set_ack_timeout(ah, acktimeout);
1027	ath9k_hw_set_cts_timeout(ah, acktimeout);
1028	if (ah->globaltxtimeout != (u32) -1)
1029		ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1030
1031	REG_WRITE(ah, AR_D_GBL_IFS_EIFS, ath9k_hw_mac_to_clks(ah, eifs));
1032	REG_RMW(ah, AR_USEC,
1033		(common->clockrate - 1) |
1034		SM(rx_lat, AR_USEC_RX_LAT) |
1035		SM(tx_lat, AR_USEC_TX_LAT),
1036		AR_USEC_TX_LAT | AR_USEC_RX_LAT | AR_USEC_USEC);
1037
1038}
1039EXPORT_SYMBOL(ath9k_hw_init_global_settings);
1040
1041void ath9k_hw_deinit(struct ath_hw *ah)
1042{
1043	struct ath_common *common = ath9k_hw_common(ah);
1044
1045	if (common->state < ATH_HW_INITIALIZED)
1046		goto free_hw;
1047
1048	ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1049
1050free_hw:
1051	ath9k_hw_rf_free_ext_banks(ah);
1052}
1053EXPORT_SYMBOL(ath9k_hw_deinit);
1054
1055/*******/
1056/* INI */
1057/*******/
1058
1059u32 ath9k_regd_get_ctl(struct ath_regulatory *reg, struct ath9k_channel *chan)
1060{
1061	u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1062
1063	if (IS_CHAN_B(chan))
1064		ctl |= CTL_11B;
1065	else if (IS_CHAN_G(chan))
1066		ctl |= CTL_11G;
1067	else
1068		ctl |= CTL_11A;
1069
1070	return ctl;
1071}
1072
1073/****************************************/
1074/* Reset and Channel Switching Routines */
1075/****************************************/
1076
1077static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1078{
1079	struct ath_common *common = ath9k_hw_common(ah);
1080
1081	ENABLE_REGWRITE_BUFFER(ah);
1082
1083	/*
1084	 * set AHB_MODE not to do cacheline prefetches
1085	*/
1086	if (!AR_SREV_9300_20_OR_LATER(ah))
1087		REG_SET_BIT(ah, AR_AHB_MODE, AR_AHB_PREFETCH_RD_EN);
1088
1089	/*
1090	 * let mac dma reads be in 128 byte chunks
1091	 */
1092	REG_RMW(ah, AR_TXCFG, AR_TXCFG_DMASZ_128B, AR_TXCFG_DMASZ_MASK);
1093
1094	REGWRITE_BUFFER_FLUSH(ah);
1095
1096	/*
1097	 * Restore TX Trigger Level to its pre-reset value.
1098	 * The initial value depends on whether aggregation is enabled, and is
1099	 * adjusted whenever underruns are detected.
1100	 */
1101	if (!AR_SREV_9300_20_OR_LATER(ah))
1102		REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1103
1104	ENABLE_REGWRITE_BUFFER(ah);
1105
1106	/*
1107	 * let mac dma writes be in 128 byte chunks
1108	 */
1109	REG_RMW(ah, AR_RXCFG, AR_RXCFG_DMASZ_128B, AR_RXCFG_DMASZ_MASK);
1110
1111	/*
1112	 * Setup receive FIFO threshold to hold off TX activities
1113	 */
1114	REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1115
1116	if (AR_SREV_9300_20_OR_LATER(ah)) {
1117		REG_RMW_FIELD(ah, AR_RXBP_THRESH, AR_RXBP_THRESH_HP, 0x1);
1118		REG_RMW_FIELD(ah, AR_RXBP_THRESH, AR_RXBP_THRESH_LP, 0x1);
1119
1120		ath9k_hw_set_rx_bufsize(ah, common->rx_bufsize -
1121			ah->caps.rx_status_len);
1122	}
1123
1124	/*
1125	 * reduce the number of usable entries in PCU TXBUF to avoid
1126	 * wrap around issues.
1127	 */
1128	if (AR_SREV_9285(ah)) {
1129		/* For AR9285 the number of Fifos are reduced to half.
1130		 * So set the usable tx buf size also to half to
1131		 * avoid data/delimiter underruns
1132		 */
1133		REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1134			  AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1135	} else if (!AR_SREV_9271(ah)) {
1136		REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1137			  AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1138	}
1139
1140	REGWRITE_BUFFER_FLUSH(ah);
1141
1142	if (AR_SREV_9300_20_OR_LATER(ah))
1143		ath9k_hw_reset_txstatus_ring(ah);
1144}
1145
1146static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1147{
1148	u32 mask = AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC;
1149	u32 set = AR_STA_ID1_KSRCH_MODE;
1150
1151	switch (opmode) {
1152	case NL80211_IFTYPE_ADHOC:
1153	case NL80211_IFTYPE_MESH_POINT:
1154		set |= AR_STA_ID1_ADHOC;
1155		REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1156		break;
1157	case NL80211_IFTYPE_AP:
1158		set |= AR_STA_ID1_STA_AP;
1159		/* fall through */
1160	case NL80211_IFTYPE_STATION:
1161		REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1162		break;
1163	default:
1164		if (!ah->is_monitoring)
1165			set = 0;
1166		break;
1167	}
1168	REG_RMW(ah, AR_STA_ID1, set, mask);
1169}
1170
1171void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah, u32 coef_scaled,
1172				   u32 *coef_mantissa, u32 *coef_exponent)
1173{
1174	u32 coef_exp, coef_man;
1175
1176	for (coef_exp = 31; coef_exp > 0; coef_exp--)
1177		if ((coef_scaled >> coef_exp) & 0x1)
1178			break;
1179
1180	coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1181
1182	coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1183
1184	*coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1185	*coef_exponent = coef_exp - 16;
1186}
1187
1188static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1189{
1190	u32 rst_flags;
1191	u32 tmpReg;
1192
1193	if (AR_SREV_9100(ah)) {
1194		REG_RMW_FIELD(ah, AR_RTC_DERIVED_CLK,
1195			      AR_RTC_DERIVED_CLK_PERIOD, 1);
1196		(void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1197	}
1198
1199	ENABLE_REGWRITE_BUFFER(ah);
1200
1201	if (AR_SREV_9300_20_OR_LATER(ah)) {
1202		REG_WRITE(ah, AR_WA, ah->WARegVal);
1203		udelay(10);
1204	}
1205
1206	REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1207		  AR_RTC_FORCE_WAKE_ON_INT);
1208
1209	if (AR_SREV_9100(ah)) {
1210		rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1211			AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1212	} else {
1213		tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1214		if (tmpReg &
1215		    (AR_INTR_SYNC_LOCAL_TIMEOUT |
1216		     AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1217			u32 val;
1218			REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1219
1220			val = AR_RC_HOSTIF;
1221			if (!AR_SREV_9300_20_OR_LATER(ah))
1222				val |= AR_RC_AHB;
1223			REG_WRITE(ah, AR_RC, val);
1224
1225		} else if (!AR_SREV_9300_20_OR_LATER(ah))
1226			REG_WRITE(ah, AR_RC, AR_RC_AHB);
1227
1228		rst_flags = AR_RTC_RC_MAC_WARM;
1229		if (type == ATH9K_RESET_COLD)
1230			rst_flags |= AR_RTC_RC_MAC_COLD;
1231	}
1232
1233	if (AR_SREV_9330(ah)) {
1234		int npend = 0;
1235		int i;
1236
1237		/* AR9330 WAR:
1238		 * call external reset function to reset WMAC if:
1239		 * - doing a cold reset
1240		 * - we have pending frames in the TX queues
1241		 */
1242
1243		for (i = 0; i < AR_NUM_QCU; i++) {
1244			npend = ath9k_hw_numtxpending(ah, i);
1245			if (npend)
1246				break;
1247		}
1248
1249		if (ah->external_reset &&
1250		    (npend || type == ATH9K_RESET_COLD)) {
1251			int reset_err = 0;
1252
1253			ath_dbg(ath9k_hw_common(ah), ATH_DBG_RESET,
1254				"reset MAC via external reset\n");
1255
1256			reset_err = ah->external_reset();
1257			if (reset_err) {
1258				ath_err(ath9k_hw_common(ah),
1259					"External reset failed, err=%d\n",
1260					reset_err);
1261				return false;
1262			}
1263
1264			REG_WRITE(ah, AR_RTC_RESET, 1);
1265		}
1266	}
1267
1268	REG_WRITE(ah, AR_RTC_RC, rst_flags);
1269
1270	REGWRITE_BUFFER_FLUSH(ah);
1271
1272	udelay(50);
1273
1274	REG_WRITE(ah, AR_RTC_RC, 0);
1275	if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1276		ath_dbg(ath9k_hw_common(ah), ATH_DBG_RESET,
1277			"RTC stuck in MAC reset\n");
1278		return false;
1279	}
1280
1281	if (!AR_SREV_9100(ah))
1282		REG_WRITE(ah, AR_RC, 0);
1283
1284	if (AR_SREV_9100(ah))
1285		udelay(50);
1286
1287	return true;
1288}
1289
1290static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1291{
1292	ENABLE_REGWRITE_BUFFER(ah);
1293
1294	if (AR_SREV_9300_20_OR_LATER(ah)) {
1295		REG_WRITE(ah, AR_WA, ah->WARegVal);
1296		udelay(10);
1297	}
1298
1299	REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1300		  AR_RTC_FORCE_WAKE_ON_INT);
1301
1302	if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
1303		REG_WRITE(ah, AR_RC, AR_RC_AHB);
1304
1305	REG_WRITE(ah, AR_RTC_RESET, 0);
1306
1307	REGWRITE_BUFFER_FLUSH(ah);
1308
1309	if (!AR_SREV_9300_20_OR_LATER(ah))
1310		udelay(2);
1311
1312	if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
1313		REG_WRITE(ah, AR_RC, 0);
1314
1315	REG_WRITE(ah, AR_RTC_RESET, 1);
1316
1317	if (!ath9k_hw_wait(ah,
1318			   AR_RTC_STATUS,
1319			   AR_RTC_STATUS_M,
1320			   AR_RTC_STATUS_ON,
1321			   AH_WAIT_TIMEOUT)) {
1322		ath_dbg(ath9k_hw_common(ah), ATH_DBG_RESET,
1323			"RTC not waking up\n");
1324		return false;
1325	}
1326
1327	return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1328}
1329
1330static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1331{
 
 
1332	if (AR_SREV_9300_20_OR_LATER(ah)) {
1333		REG_WRITE(ah, AR_WA, ah->WARegVal);
1334		udelay(10);
1335	}
1336
1337	REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1338		  AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1339
1340	switch (type) {
1341	case ATH9K_RESET_POWER_ON:
1342		return ath9k_hw_set_reset_power_on(ah);
 
1343	case ATH9K_RESET_WARM:
1344	case ATH9K_RESET_COLD:
1345		return ath9k_hw_set_reset(ah, type);
 
1346	default:
1347		return false;
1348	}
 
 
 
 
 
1349}
1350
1351static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1352				struct ath9k_channel *chan)
1353{
1354	if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) {
1355		if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1356			return false;
1357	} else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
 
 
 
 
 
 
1358		return false;
1359
1360	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1361		return false;
1362
1363	ah->chip_fullsleep = false;
 
 
 
1364	ath9k_hw_init_pll(ah, chan);
1365	ath9k_hw_set_rfmode(ah, chan);
1366
1367	return true;
1368}
1369
1370static bool ath9k_hw_channel_change(struct ath_hw *ah,
1371				    struct ath9k_channel *chan)
1372{
1373	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1374	struct ath_common *common = ath9k_hw_common(ah);
1375	struct ieee80211_channel *channel = chan->chan;
1376	u32 qnum;
1377	int r;
 
 
 
 
 
 
 
 
1378
1379	for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1380		if (ath9k_hw_numtxpending(ah, qnum)) {
1381			ath_dbg(common, ATH_DBG_QUEUE,
1382				"Transmit frames pending on queue %d\n", qnum);
1383			return false;
1384		}
1385	}
1386
1387	if (!ath9k_hw_rfbus_req(ah)) {
1388		ath_err(common, "Could not kill baseband RX\n");
1389		return false;
1390	}
1391
 
 
 
 
 
 
 
 
 
 
 
 
1392	ath9k_hw_set_channel_regs(ah, chan);
1393
1394	r = ath9k_hw_rf_set_freq(ah, chan);
1395	if (r) {
1396		ath_err(common, "Failed to set channel\n");
1397		return false;
1398	}
1399	ath9k_hw_set_clockrate(ah);
1400
1401	ah->eep_ops->set_txpower(ah, chan,
1402			     ath9k_regd_get_ctl(regulatory, chan),
1403			     channel->max_antenna_gain * 2,
1404			     channel->max_power * 2,
1405			     min((u32) MAX_RATE_POWER,
1406			     (u32) regulatory->power_limit), false);
1407
1408	ath9k_hw_rfbus_done(ah);
1409
1410	if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1411		ath9k_hw_set_delta_slope(ah, chan);
1412
1413	ath9k_hw_spur_mitigate_freq(ah, chan);
1414
 
 
 
 
 
 
 
 
 
 
 
 
1415	return true;
1416}
1417
1418static void ath9k_hw_apply_gpio_override(struct ath_hw *ah)
1419{
1420	u32 gpio_mask = ah->gpio_mask;
1421	int i;
1422
1423	for (i = 0; gpio_mask; i++, gpio_mask >>= 1) {
1424		if (!(gpio_mask & 1))
1425			continue;
1426
1427		ath9k_hw_cfg_output(ah, i, AR_GPIO_OUTPUT_MUX_AS_OUTPUT);
1428		ath9k_hw_set_gpio(ah, i, !!(ah->gpio_val & BIT(i)));
1429	}
1430}
1431
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1432bool ath9k_hw_check_alive(struct ath_hw *ah)
1433{
1434	int count = 50;
1435	u32 reg;
1436
 
 
 
1437	if (AR_SREV_9285_12_OR_LATER(ah))
1438		return true;
1439
1440	do {
1441		reg = REG_READ(ah, AR_OBS_BUS_1);
1442
1443		if ((reg & 0x7E7FFFEF) == 0x00702400)
1444			continue;
1445
1446		switch (reg & 0x7E000B00) {
1447		case 0x1E000000:
1448		case 0x52000B00:
1449		case 0x18000B00:
1450			continue;
1451		default:
1452			return true;
1453		}
1454	} while (count-- > 0);
1455
1456	return false;
1457}
1458EXPORT_SYMBOL(ath9k_hw_check_alive);
1459
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1460int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
1461		   struct ath9k_hw_cal_data *caldata, bool bChannelChange)
1462{
1463	struct ath_common *common = ath9k_hw_common(ah);
1464	u32 saveLedState;
1465	struct ath9k_channel *curchan = ah->curchan;
1466	u32 saveDefAntenna;
1467	u32 macStaId1;
1468	u64 tsf = 0;
1469	int i, r;
1470
1471	ah->txchainmask = common->tx_chainmask;
1472	ah->rxchainmask = common->rx_chainmask;
 
 
 
 
 
 
1473
1474	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1475		return -EIO;
1476
1477	if (curchan && !ah->chip_fullsleep)
1478		ath9k_hw_getnf(ah, curchan);
1479
1480	ah->caldata = caldata;
1481	if (caldata &&
1482	    (chan->channel != caldata->channel ||
1483	     (chan->channelFlags & ~CHANNEL_CW_INT) !=
1484	     (caldata->channelFlags & ~CHANNEL_CW_INT))) {
1485		/* Operating channel changed, reset channel calibration data */
1486		memset(caldata, 0, sizeof(*caldata));
1487		ath9k_init_nfcal_hist_buffer(ah, chan);
1488	}
 
1489
1490	if (bChannelChange &&
1491	    (ah->chip_fullsleep != true) &&
1492	    (ah->curchan != NULL) &&
1493	    (chan->channel != ah->curchan->channel) &&
1494	    ((chan->channelFlags & CHANNEL_ALL) ==
1495	     (ah->curchan->channelFlags & CHANNEL_ALL)) &&
1496	    (!AR_SREV_9280(ah) || AR_DEVID_7010(ah))) {
1497
1498		if (ath9k_hw_channel_change(ah, chan)) {
1499			ath9k_hw_loadnf(ah, ah->curchan);
1500			ath9k_hw_start_nfcal(ah, true);
1501			if (AR_SREV_9271(ah))
1502				ar9002_hw_load_ani_reg(ah, chan);
1503			return 0;
1504		}
1505	}
1506
 
 
 
1507	saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
1508	if (saveDefAntenna == 0)
1509		saveDefAntenna = 1;
1510
1511	macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
1512
1513	/* For chips on which RTC reset is done, save TSF before it gets cleared */
1514	if (AR_SREV_9100(ah) ||
1515	    (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)))
1516		tsf = ath9k_hw_gettsf64(ah);
1517
1518	saveLedState = REG_READ(ah, AR_CFG_LED) &
1519		(AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
1520		 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
1521
1522	ath9k_hw_mark_phy_inactive(ah);
1523
1524	ah->paprd_table_write_done = false;
1525
1526	/* Only required on the first reset */
1527	if (AR_SREV_9271(ah) && ah->htc_reset_init) {
1528		REG_WRITE(ah,
1529			  AR9271_RESET_POWER_DOWN_CONTROL,
1530			  AR9271_RADIO_RF_RST);
1531		udelay(50);
1532	}
1533
1534	if (!ath9k_hw_chip_reset(ah, chan)) {
1535		ath_err(common, "Chip reset failed\n");
1536		return -EINVAL;
1537	}
1538
1539	/* Only required on the first reset */
1540	if (AR_SREV_9271(ah) && ah->htc_reset_init) {
1541		ah->htc_reset_init = false;
1542		REG_WRITE(ah,
1543			  AR9271_RESET_POWER_DOWN_CONTROL,
1544			  AR9271_GATE_MAC_CTL);
1545		udelay(50);
1546	}
1547
1548	/* Restore TSF */
1549	if (tsf)
1550		ath9k_hw_settsf64(ah, tsf);
1551
1552	if (AR_SREV_9280_20_OR_LATER(ah))
1553		REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
1554
1555	if (!AR_SREV_9300_20_OR_LATER(ah))
1556		ar9002_hw_enable_async_fifo(ah);
1557
1558	r = ath9k_hw_process_ini(ah, chan);
1559	if (r)
1560		return r;
1561
 
 
 
1562	/*
1563	 * Some AR91xx SoC devices frequently fail to accept TSF writes
1564	 * right after the chip reset. When that happens, write a new
1565	 * value after the initvals have been applied, with an offset
1566	 * based on measured time difference
1567	 */
1568	if (AR_SREV_9100(ah) && (ath9k_hw_gettsf64(ah) < tsf)) {
1569		tsf += 1500;
1570		ath9k_hw_settsf64(ah, tsf);
1571	}
1572
1573	/* Setup MFP options for CCMP */
1574	if (AR_SREV_9280_20_OR_LATER(ah)) {
1575		/* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
1576		 * frames when constructing CCMP AAD. */
1577		REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
1578			      0xc7ff);
1579		ah->sw_mgmt_crypto = false;
1580	} else if (AR_SREV_9160_10_OR_LATER(ah)) {
1581		/* Disable hardware crypto for management frames */
1582		REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
1583			    AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
1584		REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
1585			    AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
1586		ah->sw_mgmt_crypto = true;
1587	} else
1588		ah->sw_mgmt_crypto = true;
1589
1590	if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1591		ath9k_hw_set_delta_slope(ah, chan);
1592
1593	ath9k_hw_spur_mitigate_freq(ah, chan);
1594	ah->eep_ops->set_board_values(ah, chan);
1595
1596	ENABLE_REGWRITE_BUFFER(ah);
1597
1598	REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(common->macaddr));
1599	REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(common->macaddr + 4)
1600		  | macStaId1
1601		  | AR_STA_ID1_RTS_USE_DEF
1602		  | (ah->config.
1603		     ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
1604		  | ah->sta_id1_defaults);
1605	ath_hw_setbssidmask(common);
1606	REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
1607	ath9k_hw_write_associd(ah);
1608	REG_WRITE(ah, AR_ISR, ~0);
1609	REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
1610
1611	REGWRITE_BUFFER_FLUSH(ah);
1612
1613	ath9k_hw_set_operating_mode(ah, ah->opmode);
1614
1615	r = ath9k_hw_rf_set_freq(ah, chan);
1616	if (r)
1617		return r;
1618
1619	ath9k_hw_set_clockrate(ah);
1620
1621	ENABLE_REGWRITE_BUFFER(ah);
1622
1623	for (i = 0; i < AR_NUM_DCU; i++)
1624		REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
1625
1626	REGWRITE_BUFFER_FLUSH(ah);
1627
1628	ah->intr_txqs = 0;
1629	for (i = 0; i < ATH9K_NUM_TX_QUEUES; i++)
1630		ath9k_hw_resettxqueue(ah, i);
1631
1632	ath9k_hw_init_interrupt_masks(ah, ah->opmode);
1633	ath9k_hw_ani_cache_ini_regs(ah);
1634	ath9k_hw_init_qos(ah);
1635
1636	if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
1637		ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
1638
1639	ath9k_hw_init_global_settings(ah);
1640
1641	if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah)) {
1642		REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
1643			    AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
1644		REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
1645			      AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
1646		REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
1647			    AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
1648	}
1649
1650	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PRESERVE_SEQNUM);
1651
1652	ath9k_hw_set_dma(ah);
1653
1654	REG_WRITE(ah, AR_OBS, 8);
1655
1656	if (ah->config.rx_intr_mitigation) {
1657		REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
1658		REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
1659	}
1660
1661	if (ah->config.tx_intr_mitigation) {
1662		REG_RMW_FIELD(ah, AR_TIMT, AR_TIMT_LAST, 300);
1663		REG_RMW_FIELD(ah, AR_TIMT, AR_TIMT_FIRST, 750);
1664	}
1665
1666	ath9k_hw_init_bb(ah, chan);
1667
 
 
 
 
1668	if (!ath9k_hw_init_cal(ah, chan))
1669		return -EIO;
1670
 
 
 
 
 
 
1671	ENABLE_REGWRITE_BUFFER(ah);
1672
1673	ath9k_hw_restore_chainmask(ah);
1674	REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
1675
1676	REGWRITE_BUFFER_FLUSH(ah);
1677
1678	/*
1679	 * For big endian systems turn on swapping for descriptors
1680	 */
1681	if (AR_SREV_9100(ah)) {
1682		u32 mask;
1683		mask = REG_READ(ah, AR_CFG);
1684		if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
1685			ath_dbg(common, ATH_DBG_RESET,
1686				"CFG Byte Swap Set 0x%x\n", mask);
1687		} else {
1688			mask =
1689				INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
1690			REG_WRITE(ah, AR_CFG, mask);
1691			ath_dbg(common, ATH_DBG_RESET,
1692				"Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
1693		}
1694	} else {
1695		if (common->bus_ops->ath_bus_type == ATH_USB) {
1696			/* Configure AR9271 target WLAN */
1697			if (AR_SREV_9271(ah))
1698				REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
1699			else
1700				REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
1701		}
1702#ifdef __BIG_ENDIAN
1703		else if (AR_SREV_9330(ah) || AR_SREV_9340(ah))
1704			REG_RMW(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB, 0);
1705		else
1706			REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
1707#endif
1708	}
1709
1710	if (ah->btcoex_hw.enabled)
1711		ath9k_hw_btcoex_enable(ah);
1712
 
 
 
1713	if (AR_SREV_9300_20_OR_LATER(ah)) {
1714		ar9003_hw_bb_watchdog_config(ah);
1715
1716		ar9003_hw_disable_phy_restart(ah);
1717	}
1718
1719	ath9k_hw_apply_gpio_override(ah);
1720
1721	return 0;
1722}
1723EXPORT_SYMBOL(ath9k_hw_reset);
1724
1725/******************************/
1726/* Power Management (Chipset) */
1727/******************************/
1728
1729/*
1730 * Notify Power Mgt is disabled in self-generated frames.
1731 * If requested, force chip to sleep.
1732 */
1733static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
1734{
1735	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
1736	if (setChip) {
 
 
 
 
 
 
 
 
 
 
 
 
1737		/*
1738		 * Clear the RTC force wake bit to allow the
1739		 * mac to go to sleep.
1740		 */
1741		REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
1742			    AR_RTC_FORCE_WAKE_EN);
 
 
 
1743		if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
1744			REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1745
1746		/* Shutdown chip. Active low */
1747		if (!AR_SREV_5416(ah) && !AR_SREV_9271(ah))
1748			REG_CLR_BIT(ah, (AR_RTC_RESET),
1749				    AR_RTC_RESET_EN);
 
1750	}
1751
1752	/* Clear Bit 14 of AR_WA after putting chip into Full Sleep mode. */
1753	if (AR_SREV_9300_20_OR_LATER(ah))
1754		REG_WRITE(ah, AR_WA,
1755			  ah->WARegVal & ~AR_WA_D3_L1_DISABLE);
1756}
1757
1758/*
1759 * Notify Power Management is enabled in self-generating
1760 * frames. If request, set power mode of chip to
1761 * auto/normal.  Duration in units of 128us (1/8 TU).
1762 */
1763static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
1764{
 
 
1765	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
1766	if (setChip) {
1767		struct ath9k_hw_capabilities *pCap = &ah->caps;
1768
1769		if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
1770			/* Set WakeOnInterrupt bit; clear ForceWake bit */
1771			REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1772				  AR_RTC_FORCE_WAKE_ON_INT);
1773		} else {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1774			/*
1775			 * Clear the RTC force wake bit to allow the
1776			 * mac to go to sleep.
1777			 */
1778			REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
1779				    AR_RTC_FORCE_WAKE_EN);
 
 
 
1780		}
1781	}
1782
1783	/* Clear Bit 14 of AR_WA after putting chip into Net Sleep mode. */
1784	if (AR_SREV_9300_20_OR_LATER(ah))
1785		REG_WRITE(ah, AR_WA, ah->WARegVal & ~AR_WA_D3_L1_DISABLE);
1786}
1787
1788static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
1789{
1790	u32 val;
1791	int i;
1792
1793	/* Set Bits 14 and 17 of AR_WA before powering on the chip. */
1794	if (AR_SREV_9300_20_OR_LATER(ah)) {
1795		REG_WRITE(ah, AR_WA, ah->WARegVal);
1796		udelay(10);
1797	}
1798
1799	if (setChip) {
1800		if ((REG_READ(ah, AR_RTC_STATUS) &
1801		     AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
1802			if (ath9k_hw_set_reset_reg(ah,
1803					   ATH9K_RESET_POWER_ON) != true) {
1804				return false;
1805			}
1806			if (!AR_SREV_9300_20_OR_LATER(ah))
1807				ath9k_hw_init_pll(ah, NULL);
1808		}
1809		if (AR_SREV_9100(ah))
1810			REG_SET_BIT(ah, AR_RTC_RESET,
1811				    AR_RTC_RESET_EN);
1812
1813		REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
1814			    AR_RTC_FORCE_WAKE_EN);
1815		udelay(50);
1816
1817		for (i = POWER_UP_TIME / 50; i > 0; i--) {
1818			val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
1819			if (val == AR_RTC_STATUS_ON)
1820				break;
1821			udelay(50);
1822			REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
1823				    AR_RTC_FORCE_WAKE_EN);
1824		}
1825		if (i == 0) {
1826			ath_err(ath9k_hw_common(ah),
1827				"Failed to wakeup in %uus\n",
1828				POWER_UP_TIME / 20);
1829			return false;
1830		}
1831	}
1832
1833	REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
1834
1835	return true;
1836}
1837
1838bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
1839{
1840	struct ath_common *common = ath9k_hw_common(ah);
1841	int status = true, setChip = true;
1842	static const char *modes[] = {
1843		"AWAKE",
1844		"FULL-SLEEP",
1845		"NETWORK SLEEP",
1846		"UNDEFINED"
1847	};
1848
1849	if (ah->power_mode == mode)
1850		return status;
1851
1852	ath_dbg(common, ATH_DBG_RESET, "%s -> %s\n",
1853		modes[ah->power_mode], modes[mode]);
1854
1855	switch (mode) {
1856	case ATH9K_PM_AWAKE:
1857		status = ath9k_hw_set_power_awake(ah, setChip);
 
 
 
 
1858		break;
1859	case ATH9K_PM_FULL_SLEEP:
 
 
 
1860		ath9k_set_power_sleep(ah, setChip);
1861		ah->chip_fullsleep = true;
1862		break;
1863	case ATH9K_PM_NETWORK_SLEEP:
 
 
 
 
1864		ath9k_set_power_network_sleep(ah, setChip);
1865		break;
1866	default:
1867		ath_err(common, "Unknown power mode %u\n", mode);
1868		return false;
1869	}
1870	ah->power_mode = mode;
1871
1872	/*
1873	 * XXX: If this warning never comes up after a while then
1874	 * simply keep the ATH_DBG_WARN_ON_ONCE() but make
1875	 * ath9k_hw_setpower() return type void.
1876	 */
1877
1878	if (!(ah->ah_flags & AH_UNPLUGGED))
1879		ATH_DBG_WARN_ON_ONCE(!status);
1880
1881	return status;
1882}
1883EXPORT_SYMBOL(ath9k_hw_setpower);
1884
1885/*******************/
1886/* Beacon Handling */
1887/*******************/
1888
1889void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
1890{
1891	int flags = 0;
1892
1893	ENABLE_REGWRITE_BUFFER(ah);
1894
1895	switch (ah->opmode) {
1896	case NL80211_IFTYPE_ADHOC:
1897	case NL80211_IFTYPE_MESH_POINT:
1898		REG_SET_BIT(ah, AR_TXCFG,
1899			    AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
1900		REG_WRITE(ah, AR_NEXT_NDP_TIMER, next_beacon +
1901			  TU_TO_USEC(ah->atim_window ? ah->atim_window : 1));
1902		flags |= AR_NDP_TIMER_EN;
1903	case NL80211_IFTYPE_AP:
1904		REG_WRITE(ah, AR_NEXT_TBTT_TIMER, next_beacon);
1905		REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, next_beacon -
1906			  TU_TO_USEC(ah->config.dma_beacon_response_time));
1907		REG_WRITE(ah, AR_NEXT_SWBA, next_beacon -
1908			  TU_TO_USEC(ah->config.sw_beacon_response_time));
1909		flags |=
1910			AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
1911		break;
1912	default:
1913		ath_dbg(ath9k_hw_common(ah), ATH_DBG_BEACON,
1914			"%s: unsupported opmode: %d\n",
1915			__func__, ah->opmode);
1916		return;
1917		break;
1918	}
1919
1920	REG_WRITE(ah, AR_BEACON_PERIOD, beacon_period);
1921	REG_WRITE(ah, AR_DMA_BEACON_PERIOD, beacon_period);
1922	REG_WRITE(ah, AR_SWBA_PERIOD, beacon_period);
1923	REG_WRITE(ah, AR_NDP_PERIOD, beacon_period);
1924
1925	REGWRITE_BUFFER_FLUSH(ah);
1926
1927	REG_SET_BIT(ah, AR_TIMER_MODE, flags);
1928}
1929EXPORT_SYMBOL(ath9k_hw_beaconinit);
1930
1931void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
1932				    const struct ath9k_beacon_state *bs)
1933{
1934	u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
1935	struct ath9k_hw_capabilities *pCap = &ah->caps;
1936	struct ath_common *common = ath9k_hw_common(ah);
1937
1938	ENABLE_REGWRITE_BUFFER(ah);
1939
1940	REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
1941
1942	REG_WRITE(ah, AR_BEACON_PERIOD,
1943		  TU_TO_USEC(bs->bs_intval));
1944	REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
1945		  TU_TO_USEC(bs->bs_intval));
1946
1947	REGWRITE_BUFFER_FLUSH(ah);
1948
1949	REG_RMW_FIELD(ah, AR_RSSI_THR,
1950		      AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
1951
1952	beaconintval = bs->bs_intval;
1953
1954	if (bs->bs_sleepduration > beaconintval)
1955		beaconintval = bs->bs_sleepduration;
1956
1957	dtimperiod = bs->bs_dtimperiod;
1958	if (bs->bs_sleepduration > dtimperiod)
1959		dtimperiod = bs->bs_sleepduration;
1960
1961	if (beaconintval == dtimperiod)
1962		nextTbtt = bs->bs_nextdtim;
1963	else
1964		nextTbtt = bs->bs_nexttbtt;
1965
1966	ath_dbg(common, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim);
1967	ath_dbg(common, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt);
1968	ath_dbg(common, ATH_DBG_BEACON, "beacon period %d\n", beaconintval);
1969	ath_dbg(common, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod);
1970
1971	ENABLE_REGWRITE_BUFFER(ah);
1972
1973	REG_WRITE(ah, AR_NEXT_DTIM,
1974		  TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
1975	REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
1976
1977	REG_WRITE(ah, AR_SLEEP1,
1978		  SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
1979		  | AR_SLEEP1_ASSUME_DTIM);
1980
1981	if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
1982		beacontimeout = (BEACON_TIMEOUT_VAL << 3);
1983	else
1984		beacontimeout = MIN_BEACON_TIMEOUT_VAL;
1985
1986	REG_WRITE(ah, AR_SLEEP2,
1987		  SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
1988
1989	REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
1990	REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
1991
1992	REGWRITE_BUFFER_FLUSH(ah);
1993
1994	REG_SET_BIT(ah, AR_TIMER_MODE,
1995		    AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
1996		    AR_DTIM_TIMER_EN);
1997
1998	/* TSF Out of Range Threshold */
1999	REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
2000}
2001EXPORT_SYMBOL(ath9k_hw_set_sta_beacon_timers);
2002
2003/*******************/
2004/* HW Capabilities */
2005/*******************/
2006
2007static u8 fixup_chainmask(u8 chip_chainmask, u8 eeprom_chainmask)
2008{
2009	eeprom_chainmask &= chip_chainmask;
2010	if (eeprom_chainmask)
2011		return eeprom_chainmask;
2012	else
2013		return chip_chainmask;
2014}
2015
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2016int ath9k_hw_fill_cap_info(struct ath_hw *ah)
2017{
2018	struct ath9k_hw_capabilities *pCap = &ah->caps;
2019	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
2020	struct ath_common *common = ath9k_hw_common(ah);
2021	struct ath_btcoex_hw *btcoex_hw = &ah->btcoex_hw;
2022	unsigned int chip_chainmask;
2023
2024	u16 eeval;
2025	u8 ant_div_ctl1, tx_chainmask, rx_chainmask;
2026
2027	eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
2028	regulatory->current_rd = eeval;
2029
2030	eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1);
2031	if (AR_SREV_9285_12_OR_LATER(ah))
2032		eeval |= AR9285_RDEXT_DEFAULT;
2033	regulatory->current_rd_ext = eeval;
2034
2035	if (ah->opmode != NL80211_IFTYPE_AP &&
2036	    ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
2037		if (regulatory->current_rd == 0x64 ||
2038		    regulatory->current_rd == 0x65)
2039			regulatory->current_rd += 5;
2040		else if (regulatory->current_rd == 0x41)
2041			regulatory->current_rd = 0x43;
2042		ath_dbg(common, ATH_DBG_REGULATORY,
2043			"regdomain mapped to 0x%x\n", regulatory->current_rd);
2044	}
2045
2046	eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
2047	if ((eeval & (AR5416_OPFLAGS_11G | AR5416_OPFLAGS_11A)) == 0) {
2048		ath_err(common,
2049			"no band has been marked as supported in EEPROM\n");
2050		return -EINVAL;
2051	}
2052
2053	if (eeval & AR5416_OPFLAGS_11A)
2054		pCap->hw_caps |= ATH9K_HW_CAP_5GHZ;
2055
2056	if (eeval & AR5416_OPFLAGS_11G)
2057		pCap->hw_caps |= ATH9K_HW_CAP_2GHZ;
2058
2059	if (AR_SREV_9485(ah) || AR_SREV_9285(ah) || AR_SREV_9330(ah))
2060		chip_chainmask = 1;
 
 
2061	else if (!AR_SREV_9280_20_OR_LATER(ah))
2062		chip_chainmask = 7;
2063	else if (!AR_SREV_9300_20_OR_LATER(ah) || AR_SREV_9340(ah))
2064		chip_chainmask = 3;
2065	else
2066		chip_chainmask = 7;
2067
2068	pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
2069	/*
2070	 * For AR9271 we will temporarilly uses the rx chainmax as read from
2071	 * the EEPROM.
2072	 */
2073	if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
2074	    !(eeval & AR5416_OPFLAGS_11A) &&
2075	    !(AR_SREV_9271(ah)))
2076		/* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
2077		pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
2078	else if (AR_SREV_9100(ah))
2079		pCap->rx_chainmask = 0x7;
2080	else
2081		/* Use rx_chainmask from EEPROM. */
2082		pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
2083
2084	pCap->tx_chainmask = fixup_chainmask(chip_chainmask, pCap->tx_chainmask);
2085	pCap->rx_chainmask = fixup_chainmask(chip_chainmask, pCap->rx_chainmask);
 
 
2086
2087	ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
2088
2089	/* enable key search for every frame in an aggregate */
2090	if (AR_SREV_9300_20_OR_LATER(ah))
2091		ah->misc_mode |= AR_PCU_ALWAYS_PERFORM_KEYSEARCH;
2092
2093	common->crypt_caps |= ATH_CRYPT_CAP_CIPHER_AESCCM;
2094
2095	if (ah->hw_version.devid != AR2427_DEVID_PCIE)
2096		pCap->hw_caps |= ATH9K_HW_CAP_HT;
2097	else
2098		pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
2099
2100	if (AR_SREV_9271(ah))
2101		pCap->num_gpio_pins = AR9271_NUM_GPIO;
2102	else if (AR_DEVID_7010(ah))
2103		pCap->num_gpio_pins = AR7010_NUM_GPIO;
 
 
 
 
2104	else if (AR_SREV_9285_12_OR_LATER(ah))
2105		pCap->num_gpio_pins = AR9285_NUM_GPIO;
2106	else if (AR_SREV_9280_20_OR_LATER(ah))
2107		pCap->num_gpio_pins = AR928X_NUM_GPIO;
2108	else
2109		pCap->num_gpio_pins = AR_NUM_GPIO;
2110
2111	if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah)) {
2112		pCap->hw_caps |= ATH9K_HW_CAP_CST;
2113		pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
2114	} else {
2115		pCap->rts_aggr_limit = (8 * 1024);
2116	}
2117
2118#if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE)
2119	ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
2120	if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
2121		ah->rfkill_gpio =
2122			MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
2123		ah->rfkill_polarity =
2124			MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
2125
2126		pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
2127	}
2128#endif
2129	if (AR_SREV_9271(ah) || AR_SREV_9300_20_OR_LATER(ah))
2130		pCap->hw_caps |= ATH9K_HW_CAP_AUTOSLEEP;
2131	else
2132		pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
2133
2134	if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
2135		pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
2136	else
2137		pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
2138
2139	if (common->btcoex_enabled) {
2140		if (AR_SREV_9300_20_OR_LATER(ah)) {
2141			btcoex_hw->scheme = ATH_BTCOEX_CFG_3WIRE;
2142			btcoex_hw->btactive_gpio = ATH_BTACTIVE_GPIO_9300;
2143			btcoex_hw->wlanactive_gpio = ATH_WLANACTIVE_GPIO_9300;
2144			btcoex_hw->btpriority_gpio = ATH_BTPRIORITY_GPIO_9300;
2145		} else if (AR_SREV_9280_20_OR_LATER(ah)) {
2146			btcoex_hw->btactive_gpio = ATH_BTACTIVE_GPIO_9280;
2147			btcoex_hw->wlanactive_gpio = ATH_WLANACTIVE_GPIO_9280;
2148
2149			if (AR_SREV_9285(ah)) {
2150				btcoex_hw->scheme = ATH_BTCOEX_CFG_3WIRE;
2151				btcoex_hw->btpriority_gpio =
2152						ATH_BTPRIORITY_GPIO_9285;
2153			} else {
2154				btcoex_hw->scheme = ATH_BTCOEX_CFG_2WIRE;
2155			}
2156		}
2157	} else {
2158		btcoex_hw->scheme = ATH_BTCOEX_CFG_NONE;
2159	}
2160
2161	if (AR_SREV_9300_20_OR_LATER(ah)) {
2162		pCap->hw_caps |= ATH9K_HW_CAP_EDMA | ATH9K_HW_CAP_FASTCLOCK;
2163		if (!AR_SREV_9330(ah) && !AR_SREV_9485(ah))
2164			pCap->hw_caps |= ATH9K_HW_CAP_LDPC;
2165
2166		pCap->rx_hp_qdepth = ATH9K_HW_RX_HP_QDEPTH;
2167		pCap->rx_lp_qdepth = ATH9K_HW_RX_LP_QDEPTH;
2168		pCap->rx_status_len = sizeof(struct ar9003_rxs);
2169		pCap->tx_desc_len = sizeof(struct ar9003_txc);
2170		pCap->txs_len = sizeof(struct ar9003_txs);
2171		if (!ah->config.paprd_disable &&
2172		    ah->eep_ops->get_eeprom(ah, EEP_PAPRD))
2173			pCap->hw_caps |= ATH9K_HW_CAP_PAPRD;
2174	} else {
2175		pCap->tx_desc_len = sizeof(struct ath_desc);
2176		if (AR_SREV_9280_20(ah))
2177			pCap->hw_caps |= ATH9K_HW_CAP_FASTCLOCK;
2178	}
2179
2180	if (AR_SREV_9300_20_OR_LATER(ah))
2181		pCap->hw_caps |= ATH9K_HW_CAP_RAC_SUPPORTED;
2182
2183	if (AR_SREV_9300_20_OR_LATER(ah))
2184		ah->ent_mode = REG_READ(ah, AR_ENT_OTP);
2185
2186	if (AR_SREV_9287_11_OR_LATER(ah) || AR_SREV_9271(ah))
2187		pCap->hw_caps |= ATH9K_HW_CAP_SGI_20;
2188
2189	if (AR_SREV_9285(ah))
2190		if (ah->eep_ops->get_eeprom(ah, EEP_MODAL_VER) >= 3) {
2191			ant_div_ctl1 =
2192				ah->eep_ops->get_eeprom(ah, EEP_ANT_DIV_CTL1);
2193			if ((ant_div_ctl1 & 0x1) && ((ant_div_ctl1 >> 3) & 0x1))
2194				pCap->hw_caps |= ATH9K_HW_CAP_ANT_DIV_COMB;
2195		}
2196	if (AR_SREV_9300_20_OR_LATER(ah)) {
2197		if (ah->eep_ops->get_eeprom(ah, EEP_CHAIN_MASK_REDUCE))
2198			pCap->hw_caps |= ATH9K_HW_CAP_APM;
2199	}
2200
2201
2202	if (AR_SREV_9330(ah) || AR_SREV_9485(ah)) {
2203		ant_div_ctl1 = ah->eep_ops->get_eeprom(ah, EEP_ANT_DIV_CTL1);
2204		/*
2205		 * enable the diversity-combining algorithm only when
2206		 * both enable_lna_div and enable_fast_div are set
2207		 *		Table for Diversity
2208		 * ant_div_alt_lnaconf		bit 0-1
2209		 * ant_div_main_lnaconf		bit 2-3
2210		 * ant_div_alt_gaintb		bit 4
2211		 * ant_div_main_gaintb		bit 5
2212		 * enable_ant_div_lnadiv	bit 6
2213		 * enable_ant_fast_div		bit 7
2214		 */
2215		if ((ant_div_ctl1 >> 0x6) == 0x3)
2216			pCap->hw_caps |= ATH9K_HW_CAP_ANT_DIV_COMB;
2217	}
2218
2219	if (AR_SREV_9485_10(ah)) {
2220		pCap->pcie_lcr_extsync_en = true;
2221		pCap->pcie_lcr_offset = 0x80;
2222	}
2223
 
 
 
2224	tx_chainmask = pCap->tx_chainmask;
2225	rx_chainmask = pCap->rx_chainmask;
2226	while (tx_chainmask || rx_chainmask) {
2227		if (tx_chainmask & BIT(0))
2228			pCap->max_txchains++;
2229		if (rx_chainmask & BIT(0))
2230			pCap->max_rxchains++;
2231
2232		tx_chainmask >>= 1;
2233		rx_chainmask >>= 1;
2234	}
2235
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2236	return 0;
2237}
2238
2239/****************************/
2240/* GPIO / RFKILL / Antennae */
2241/****************************/
2242
2243static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah,
2244					 u32 gpio, u32 type)
2245{
2246	int addr;
2247	u32 gpio_shift, tmp;
2248
2249	if (gpio > 11)
2250		addr = AR_GPIO_OUTPUT_MUX3;
2251	else if (gpio > 5)
2252		addr = AR_GPIO_OUTPUT_MUX2;
2253	else
2254		addr = AR_GPIO_OUTPUT_MUX1;
2255
2256	gpio_shift = (gpio % 6) * 5;
2257
2258	if (AR_SREV_9280_20_OR_LATER(ah)
2259	    || (addr != AR_GPIO_OUTPUT_MUX1)) {
2260		REG_RMW(ah, addr, (type << gpio_shift),
2261			(0x1f << gpio_shift));
2262	} else {
2263		tmp = REG_READ(ah, addr);
2264		tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
2265		tmp &= ~(0x1f << gpio_shift);
2266		tmp |= (type << gpio_shift);
2267		REG_WRITE(ah, addr, tmp);
2268	}
2269}
2270
2271void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio)
2272{
2273	u32 gpio_shift;
2274
2275	BUG_ON(gpio >= ah->caps.num_gpio_pins);
2276
2277	if (AR_DEVID_7010(ah)) {
2278		gpio_shift = gpio;
2279		REG_RMW(ah, AR7010_GPIO_OE,
2280			(AR7010_GPIO_OE_AS_INPUT << gpio_shift),
2281			(AR7010_GPIO_OE_MASK << gpio_shift));
2282		return;
2283	}
2284
2285	gpio_shift = gpio << 1;
2286	REG_RMW(ah,
2287		AR_GPIO_OE_OUT,
2288		(AR_GPIO_OE_OUT_DRV_NO << gpio_shift),
2289		(AR_GPIO_OE_OUT_DRV << gpio_shift));
2290}
2291EXPORT_SYMBOL(ath9k_hw_cfg_gpio_input);
2292
2293u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
2294{
2295#define MS_REG_READ(x, y) \
2296	(MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y)))
2297
2298	if (gpio >= ah->caps.num_gpio_pins)
2299		return 0xffffffff;
2300
2301	if (AR_DEVID_7010(ah)) {
2302		u32 val;
2303		val = REG_READ(ah, AR7010_GPIO_IN);
2304		return (MS(val, AR7010_GPIO_IN_VAL) & AR_GPIO_BIT(gpio)) == 0;
2305	} else if (AR_SREV_9300_20_OR_LATER(ah))
2306		return (MS(REG_READ(ah, AR_GPIO_IN), AR9300_GPIO_IN_VAL) &
2307			AR_GPIO_BIT(gpio)) != 0;
2308	else if (AR_SREV_9271(ah))
2309		return MS_REG_READ(AR9271, gpio) != 0;
2310	else if (AR_SREV_9287_11_OR_LATER(ah))
2311		return MS_REG_READ(AR9287, gpio) != 0;
2312	else if (AR_SREV_9285_12_OR_LATER(ah))
2313		return MS_REG_READ(AR9285, gpio) != 0;
2314	else if (AR_SREV_9280_20_OR_LATER(ah))
2315		return MS_REG_READ(AR928X, gpio) != 0;
2316	else
2317		return MS_REG_READ(AR, gpio) != 0;
2318}
2319EXPORT_SYMBOL(ath9k_hw_gpio_get);
2320
2321void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio,
2322			 u32 ah_signal_type)
2323{
2324	u32 gpio_shift;
2325
2326	if (AR_DEVID_7010(ah)) {
2327		gpio_shift = gpio;
2328		REG_RMW(ah, AR7010_GPIO_OE,
2329			(AR7010_GPIO_OE_AS_OUTPUT << gpio_shift),
2330			(AR7010_GPIO_OE_MASK << gpio_shift));
2331		return;
2332	}
2333
2334	ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
2335	gpio_shift = 2 * gpio;
2336	REG_RMW(ah,
2337		AR_GPIO_OE_OUT,
2338		(AR_GPIO_OE_OUT_DRV_ALL << gpio_shift),
2339		(AR_GPIO_OE_OUT_DRV << gpio_shift));
2340}
2341EXPORT_SYMBOL(ath9k_hw_cfg_output);
2342
2343void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
2344{
2345	if (AR_DEVID_7010(ah)) {
2346		val = val ? 0 : 1;
2347		REG_RMW(ah, AR7010_GPIO_OUT, ((val&1) << gpio),
2348			AR_GPIO_BIT(gpio));
2349		return;
2350	}
2351
2352	if (AR_SREV_9271(ah))
2353		val = ~val;
2354
2355	REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio),
2356		AR_GPIO_BIT(gpio));
2357}
2358EXPORT_SYMBOL(ath9k_hw_set_gpio);
2359
2360u32 ath9k_hw_getdefantenna(struct ath_hw *ah)
2361{
2362	return REG_READ(ah, AR_DEF_ANTENNA) & 0x7;
2363}
2364EXPORT_SYMBOL(ath9k_hw_getdefantenna);
2365
2366void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
2367{
2368	REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
2369}
2370EXPORT_SYMBOL(ath9k_hw_setantenna);
2371
2372/*********************/
2373/* General Operation */
2374/*********************/
2375
2376u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
2377{
2378	u32 bits = REG_READ(ah, AR_RX_FILTER);
2379	u32 phybits = REG_READ(ah, AR_PHY_ERR);
2380
2381	if (phybits & AR_PHY_ERR_RADAR)
2382		bits |= ATH9K_RX_FILTER_PHYRADAR;
2383	if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
2384		bits |= ATH9K_RX_FILTER_PHYERR;
2385
2386	return bits;
2387}
2388EXPORT_SYMBOL(ath9k_hw_getrxfilter);
2389
2390void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
2391{
2392	u32 phybits;
2393
2394	ENABLE_REGWRITE_BUFFER(ah);
2395
 
 
 
2396	REG_WRITE(ah, AR_RX_FILTER, bits);
2397
2398	phybits = 0;
2399	if (bits & ATH9K_RX_FILTER_PHYRADAR)
2400		phybits |= AR_PHY_ERR_RADAR;
2401	if (bits & ATH9K_RX_FILTER_PHYERR)
2402		phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
2403	REG_WRITE(ah, AR_PHY_ERR, phybits);
2404
2405	if (phybits)
2406		REG_SET_BIT(ah, AR_RXCFG, AR_RXCFG_ZLFDMA);
2407	else
2408		REG_CLR_BIT(ah, AR_RXCFG, AR_RXCFG_ZLFDMA);
2409
2410	REGWRITE_BUFFER_FLUSH(ah);
2411}
2412EXPORT_SYMBOL(ath9k_hw_setrxfilter);
2413
2414bool ath9k_hw_phy_disable(struct ath_hw *ah)
2415{
2416	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
2417		return false;
2418
2419	ath9k_hw_init_pll(ah, NULL);
 
2420	return true;
2421}
2422EXPORT_SYMBOL(ath9k_hw_phy_disable);
2423
2424bool ath9k_hw_disable(struct ath_hw *ah)
2425{
2426	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2427		return false;
2428
2429	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD))
2430		return false;
2431
2432	ath9k_hw_init_pll(ah, NULL);
2433	return true;
2434}
2435EXPORT_SYMBOL(ath9k_hw_disable);
2436
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2437void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit, bool test)
2438{
2439	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
2440	struct ath9k_channel *chan = ah->curchan;
2441	struct ieee80211_channel *channel = chan->chan;
2442
2443	regulatory->power_limit = min(limit, (u32) MAX_RATE_POWER);
 
 
2444
2445	ah->eep_ops->set_txpower(ah, chan,
2446				 ath9k_regd_get_ctl(regulatory, chan),
2447				 channel->max_antenna_gain * 2,
2448				 channel->max_power * 2,
2449				 min((u32) MAX_RATE_POWER,
2450				 (u32) regulatory->power_limit), test);
2451}
2452EXPORT_SYMBOL(ath9k_hw_set_txpowerlimit);
2453
2454void ath9k_hw_setopmode(struct ath_hw *ah)
2455{
2456	ath9k_hw_set_operating_mode(ah, ah->opmode);
2457}
2458EXPORT_SYMBOL(ath9k_hw_setopmode);
2459
2460void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
2461{
2462	REG_WRITE(ah, AR_MCAST_FIL0, filter0);
2463	REG_WRITE(ah, AR_MCAST_FIL1, filter1);
2464}
2465EXPORT_SYMBOL(ath9k_hw_setmcastfilter);
2466
2467void ath9k_hw_write_associd(struct ath_hw *ah)
2468{
2469	struct ath_common *common = ath9k_hw_common(ah);
2470
2471	REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(common->curbssid));
2472	REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(common->curbssid + 4) |
2473		  ((common->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
2474}
2475EXPORT_SYMBOL(ath9k_hw_write_associd);
2476
2477#define ATH9K_MAX_TSF_READ 10
2478
2479u64 ath9k_hw_gettsf64(struct ath_hw *ah)
2480{
2481	u32 tsf_lower, tsf_upper1, tsf_upper2;
2482	int i;
2483
2484	tsf_upper1 = REG_READ(ah, AR_TSF_U32);
2485	for (i = 0; i < ATH9K_MAX_TSF_READ; i++) {
2486		tsf_lower = REG_READ(ah, AR_TSF_L32);
2487		tsf_upper2 = REG_READ(ah, AR_TSF_U32);
2488		if (tsf_upper2 == tsf_upper1)
2489			break;
2490		tsf_upper1 = tsf_upper2;
2491	}
2492
2493	WARN_ON( i == ATH9K_MAX_TSF_READ );
2494
2495	return (((u64)tsf_upper1 << 32) | tsf_lower);
2496}
2497EXPORT_SYMBOL(ath9k_hw_gettsf64);
2498
2499void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
2500{
2501	REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
2502	REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
2503}
2504EXPORT_SYMBOL(ath9k_hw_settsf64);
2505
2506void ath9k_hw_reset_tsf(struct ath_hw *ah)
2507{
2508	if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
2509			   AH_TSF_WRITE_TIMEOUT))
2510		ath_dbg(ath9k_hw_common(ah), ATH_DBG_RESET,
2511			"AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
2512
2513	REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
2514}
2515EXPORT_SYMBOL(ath9k_hw_reset_tsf);
2516
2517void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting)
2518{
2519	if (setting)
2520		ah->misc_mode |= AR_PCU_TX_ADD_TSF;
2521	else
2522		ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
2523}
2524EXPORT_SYMBOL(ath9k_hw_set_tsfadjust);
2525
2526void ath9k_hw_set11nmac2040(struct ath_hw *ah)
2527{
2528	struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
2529	u32 macmode;
2530
2531	if (conf_is_ht40(conf) && !ah->config.cwm_ignore_extcca)
2532		macmode = AR_2040_JOINED_RX_CLEAR;
2533	else
2534		macmode = 0;
2535
2536	REG_WRITE(ah, AR_2040_MODE, macmode);
2537}
2538
2539/* HW Generic timers configuration */
2540
2541static const struct ath_gen_timer_configuration gen_tmr_configuration[] =
2542{
2543	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2544	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2545	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2546	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2547	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2548	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2549	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2550	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2551	{AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001},
2552	{AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4,
2553				AR_NDP2_TIMER_MODE, 0x0002},
2554	{AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4,
2555				AR_NDP2_TIMER_MODE, 0x0004},
2556	{AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4,
2557				AR_NDP2_TIMER_MODE, 0x0008},
2558	{AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4,
2559				AR_NDP2_TIMER_MODE, 0x0010},
2560	{AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4,
2561				AR_NDP2_TIMER_MODE, 0x0020},
2562	{AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4,
2563				AR_NDP2_TIMER_MODE, 0x0040},
2564	{AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4,
2565				AR_NDP2_TIMER_MODE, 0x0080}
2566};
2567
2568/* HW generic timer primitives */
2569
2570/* compute and clear index of rightmost 1 */
2571static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask)
2572{
2573	u32 b;
2574
2575	b = *mask;
2576	b &= (0-b);
2577	*mask &= ~b;
2578	b *= debruijn32;
2579	b >>= 27;
2580
2581	return timer_table->gen_timer_index[b];
2582}
2583
2584u32 ath9k_hw_gettsf32(struct ath_hw *ah)
2585{
2586	return REG_READ(ah, AR_TSF_L32);
2587}
2588EXPORT_SYMBOL(ath9k_hw_gettsf32);
2589
2590struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah,
2591					  void (*trigger)(void *),
2592					  void (*overflow)(void *),
2593					  void *arg,
2594					  u8 timer_index)
2595{
2596	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
2597	struct ath_gen_timer *timer;
2598
2599	timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL);
2600
2601	if (timer == NULL) {
2602		ath_err(ath9k_hw_common(ah),
2603			"Failed to allocate memory for hw timer[%d]\n",
2604			timer_index);
2605		return NULL;
2606	}
2607
2608	/* allocate a hardware generic timer slot */
2609	timer_table->timers[timer_index] = timer;
2610	timer->index = timer_index;
2611	timer->trigger = trigger;
2612	timer->overflow = overflow;
2613	timer->arg = arg;
2614
2615	return timer;
2616}
2617EXPORT_SYMBOL(ath_gen_timer_alloc);
2618
2619void ath9k_hw_gen_timer_start(struct ath_hw *ah,
2620			      struct ath_gen_timer *timer,
2621			      u32 trig_timeout,
2622			      u32 timer_period)
2623{
2624	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
2625	u32 tsf, timer_next;
2626
2627	BUG_ON(!timer_period);
2628
2629	set_bit(timer->index, &timer_table->timer_mask.timer_bits);
2630
2631	tsf = ath9k_hw_gettsf32(ah);
2632
2633	timer_next = tsf + trig_timeout;
2634
2635	ath_dbg(ath9k_hw_common(ah), ATH_DBG_HWTIMER,
2636		"current tsf %x period %x timer_next %x\n",
2637		tsf, timer_period, timer_next);
2638
2639	/*
2640	 * Program generic timer registers
2641	 */
2642	REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr,
2643		 timer_next);
2644	REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr,
2645		  timer_period);
2646	REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
2647		    gen_tmr_configuration[timer->index].mode_mask);
2648
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2649	/* Enable both trigger and thresh interrupt masks */
2650	REG_SET_BIT(ah, AR_IMR_S5,
2651		(SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
2652		SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
2653}
2654EXPORT_SYMBOL(ath9k_hw_gen_timer_start);
2655
2656void ath9k_hw_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer)
2657{
2658	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
2659
2660	if ((timer->index < AR_FIRST_NDP_TIMER) ||
2661		(timer->index >= ATH_MAX_GEN_TIMER)) {
2662		return;
2663	}
2664
2665	/* Clear generic timer enable bits. */
2666	REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
2667			gen_tmr_configuration[timer->index].mode_mask);
2668
2669	/* Disable both trigger and thresh interrupt masks */
2670	REG_CLR_BIT(ah, AR_IMR_S5,
2671		(SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
2672		SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
2673
2674	clear_bit(timer->index, &timer_table->timer_mask.timer_bits);
2675}
2676EXPORT_SYMBOL(ath9k_hw_gen_timer_stop);
2677
2678void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer)
2679{
2680	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
2681
2682	/* free the hardware generic timer slot */
2683	timer_table->timers[timer->index] = NULL;
2684	kfree(timer);
2685}
2686EXPORT_SYMBOL(ath_gen_timer_free);
2687
2688/*
2689 * Generic Timer Interrupts handling
2690 */
2691void ath_gen_timer_isr(struct ath_hw *ah)
2692{
2693	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
2694	struct ath_gen_timer *timer;
2695	struct ath_common *common = ath9k_hw_common(ah);
2696	u32 trigger_mask, thresh_mask, index;
2697
2698	/* get hardware generic timer interrupt status */
2699	trigger_mask = ah->intr_gen_timer_trigger;
2700	thresh_mask = ah->intr_gen_timer_thresh;
2701	trigger_mask &= timer_table->timer_mask.val;
2702	thresh_mask &= timer_table->timer_mask.val;
2703
2704	trigger_mask &= ~thresh_mask;
2705
2706	while (thresh_mask) {
2707		index = rightmost_index(timer_table, &thresh_mask);
2708		timer = timer_table->timers[index];
2709		BUG_ON(!timer);
2710		ath_dbg(common, ATH_DBG_HWTIMER,
2711			"TSF overflow for Gen timer %d\n", index);
2712		timer->overflow(timer->arg);
2713	}
2714
2715	while (trigger_mask) {
2716		index = rightmost_index(timer_table, &trigger_mask);
2717		timer = timer_table->timers[index];
2718		BUG_ON(!timer);
2719		ath_dbg(common, ATH_DBG_HWTIMER,
2720			"Gen timer[%d] trigger\n", index);
2721		timer->trigger(timer->arg);
2722	}
2723}
2724EXPORT_SYMBOL(ath_gen_timer_isr);
2725
2726/********/
2727/* HTC  */
2728/********/
2729
2730void ath9k_hw_htc_resetinit(struct ath_hw *ah)
2731{
2732	ah->htc_reset_init = true;
2733}
2734EXPORT_SYMBOL(ath9k_hw_htc_resetinit);
2735
2736static struct {
2737	u32 version;
2738	const char * name;
2739} ath_mac_bb_names[] = {
2740	/* Devices with external radios */
2741	{ AR_SREV_VERSION_5416_PCI,	"5416" },
2742	{ AR_SREV_VERSION_5416_PCIE,	"5418" },
2743	{ AR_SREV_VERSION_9100,		"9100" },
2744	{ AR_SREV_VERSION_9160,		"9160" },
2745	/* Single-chip solutions */
2746	{ AR_SREV_VERSION_9280,		"9280" },
2747	{ AR_SREV_VERSION_9285,		"9285" },
2748	{ AR_SREV_VERSION_9287,         "9287" },
2749	{ AR_SREV_VERSION_9271,         "9271" },
2750	{ AR_SREV_VERSION_9300,         "9300" },
2751	{ AR_SREV_VERSION_9330,         "9330" },
 
2752	{ AR_SREV_VERSION_9485,         "9485" },
 
2753};
2754
2755/* For devices with external radios */
2756static struct {
2757	u16 version;
2758	const char * name;
2759} ath_rf_names[] = {
2760	{ 0,				"5133" },
2761	{ AR_RAD5133_SREV_MAJOR,	"5133" },
2762	{ AR_RAD5122_SREV_MAJOR,	"5122" },
2763	{ AR_RAD2133_SREV_MAJOR,	"2133" },
2764	{ AR_RAD2122_SREV_MAJOR,	"2122" }
2765};
2766
2767/*
2768 * Return the MAC/BB name. "????" is returned if the MAC/BB is unknown.
2769 */
2770static const char *ath9k_hw_mac_bb_name(u32 mac_bb_version)
2771{
2772	int i;
2773
2774	for (i=0; i<ARRAY_SIZE(ath_mac_bb_names); i++) {
2775		if (ath_mac_bb_names[i].version == mac_bb_version) {
2776			return ath_mac_bb_names[i].name;
2777		}
2778	}
2779
2780	return "????";
2781}
2782
2783/*
2784 * Return the RF name. "????" is returned if the RF is unknown.
2785 * Used for devices with external radios.
2786 */
2787static const char *ath9k_hw_rf_name(u16 rf_version)
2788{
2789	int i;
2790
2791	for (i=0; i<ARRAY_SIZE(ath_rf_names); i++) {
2792		if (ath_rf_names[i].version == rf_version) {
2793			return ath_rf_names[i].name;
2794		}
2795	}
2796
2797	return "????";
2798}
2799
2800void ath9k_hw_name(struct ath_hw *ah, char *hw_name, size_t len)
2801{
2802	int used;
2803
2804	/* chipsets >= AR9280 are single-chip */
2805	if (AR_SREV_9280_20_OR_LATER(ah)) {
2806		used = snprintf(hw_name, len,
2807			       "Atheros AR%s Rev:%x",
2808			       ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
2809			       ah->hw_version.macRev);
2810	}
2811	else {
2812		used = snprintf(hw_name, len,
2813			       "Atheros AR%s MAC/BB Rev:%x AR%s RF Rev:%x",
2814			       ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
2815			       ah->hw_version.macRev,
2816			       ath9k_hw_rf_name((ah->hw_version.analog5GhzRev &
2817						AR_RADIO_SREV_MAJOR)),
2818			       ah->hw_version.phyRev);
2819	}
2820
2821	hw_name[used] = '\0';
2822}
2823EXPORT_SYMBOL(ath9k_hw_name);
v3.5.6
   1/*
   2 * Copyright (c) 2008-2011 Atheros Communications Inc.
   3 *
   4 * Permission to use, copy, modify, and/or distribute this software for any
   5 * purpose with or without fee is hereby granted, provided that the above
   6 * copyright notice and this permission notice appear in all copies.
   7 *
   8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
   9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
  11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
  13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
  14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  15 */
  16
  17#include <linux/io.h>
  18#include <linux/slab.h>
  19#include <linux/module.h>
  20#include <asm/unaligned.h>
  21
  22#include "hw.h"
  23#include "hw-ops.h"
  24#include "rc.h"
  25#include "ar9003_mac.h"
  26#include "ar9003_mci.h"
  27#include "debug.h"
  28#include "ath9k.h"
  29
  30static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
  31
  32MODULE_AUTHOR("Atheros Communications");
  33MODULE_DESCRIPTION("Support for Atheros 802.11n wireless LAN cards.");
  34MODULE_SUPPORTED_DEVICE("Atheros 802.11n WLAN cards");
  35MODULE_LICENSE("Dual BSD/GPL");
  36
  37static int __init ath9k_init(void)
  38{
  39	return 0;
  40}
  41module_init(ath9k_init);
  42
  43static void __exit ath9k_exit(void)
  44{
  45	return;
  46}
  47module_exit(ath9k_exit);
  48
  49/* Private hardware callbacks */
  50
  51static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
  52{
  53	ath9k_hw_private_ops(ah)->init_cal_settings(ah);
  54}
  55
  56static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
  57{
  58	ath9k_hw_private_ops(ah)->init_mode_regs(ah);
  59}
  60
  61static u32 ath9k_hw_compute_pll_control(struct ath_hw *ah,
  62					struct ath9k_channel *chan)
  63{
  64	return ath9k_hw_private_ops(ah)->compute_pll_control(ah, chan);
  65}
  66
  67static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
  68{
  69	if (!ath9k_hw_private_ops(ah)->init_mode_gain_regs)
  70		return;
  71
  72	ath9k_hw_private_ops(ah)->init_mode_gain_regs(ah);
  73}
  74
  75static void ath9k_hw_ani_cache_ini_regs(struct ath_hw *ah)
  76{
  77	/* You will not have this callback if using the old ANI */
  78	if (!ath9k_hw_private_ops(ah)->ani_cache_ini_regs)
  79		return;
  80
  81	ath9k_hw_private_ops(ah)->ani_cache_ini_regs(ah);
  82}
  83
  84/********************/
  85/* Helper Functions */
  86/********************/
  87
  88#ifdef CONFIG_ATH9K_DEBUGFS
  89
  90void ath9k_debug_sync_cause(struct ath_common *common, u32 sync_cause)
  91{
  92	struct ath_softc *sc = common->priv;
  93	if (sync_cause)
  94		sc->debug.stats.istats.sync_cause_all++;
  95	if (sync_cause & AR_INTR_SYNC_RTC_IRQ)
  96		sc->debug.stats.istats.sync_rtc_irq++;
  97	if (sync_cause & AR_INTR_SYNC_MAC_IRQ)
  98		sc->debug.stats.istats.sync_mac_irq++;
  99	if (sync_cause & AR_INTR_SYNC_EEPROM_ILLEGAL_ACCESS)
 100		sc->debug.stats.istats.eeprom_illegal_access++;
 101	if (sync_cause & AR_INTR_SYNC_APB_TIMEOUT)
 102		sc->debug.stats.istats.apb_timeout++;
 103	if (sync_cause & AR_INTR_SYNC_PCI_MODE_CONFLICT)
 104		sc->debug.stats.istats.pci_mode_conflict++;
 105	if (sync_cause & AR_INTR_SYNC_HOST1_FATAL)
 106		sc->debug.stats.istats.host1_fatal++;
 107	if (sync_cause & AR_INTR_SYNC_HOST1_PERR)
 108		sc->debug.stats.istats.host1_perr++;
 109	if (sync_cause & AR_INTR_SYNC_TRCV_FIFO_PERR)
 110		sc->debug.stats.istats.trcv_fifo_perr++;
 111	if (sync_cause & AR_INTR_SYNC_RADM_CPL_EP)
 112		sc->debug.stats.istats.radm_cpl_ep++;
 113	if (sync_cause & AR_INTR_SYNC_RADM_CPL_DLLP_ABORT)
 114		sc->debug.stats.istats.radm_cpl_dllp_abort++;
 115	if (sync_cause & AR_INTR_SYNC_RADM_CPL_TLP_ABORT)
 116		sc->debug.stats.istats.radm_cpl_tlp_abort++;
 117	if (sync_cause & AR_INTR_SYNC_RADM_CPL_ECRC_ERR)
 118		sc->debug.stats.istats.radm_cpl_ecrc_err++;
 119	if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT)
 120		sc->debug.stats.istats.radm_cpl_timeout++;
 121	if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT)
 122		sc->debug.stats.istats.local_timeout++;
 123	if (sync_cause & AR_INTR_SYNC_PM_ACCESS)
 124		sc->debug.stats.istats.pm_access++;
 125	if (sync_cause & AR_INTR_SYNC_MAC_AWAKE)
 126		sc->debug.stats.istats.mac_awake++;
 127	if (sync_cause & AR_INTR_SYNC_MAC_ASLEEP)
 128		sc->debug.stats.istats.mac_asleep++;
 129	if (sync_cause & AR_INTR_SYNC_MAC_SLEEP_ACCESS)
 130		sc->debug.stats.istats.mac_sleep_access++;
 131}
 132#endif
 133
 134
 135static void ath9k_hw_set_clockrate(struct ath_hw *ah)
 136{
 137	struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
 138	struct ath_common *common = ath9k_hw_common(ah);
 139	unsigned int clockrate;
 140
 141	/* AR9287 v1.3+ uses async FIFO and runs the MAC at 117 MHz */
 142	if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah))
 143		clockrate = 117;
 144	else if (!ah->curchan) /* should really check for CCK instead */
 145		clockrate = ATH9K_CLOCK_RATE_CCK;
 146	else if (conf->channel->band == IEEE80211_BAND_2GHZ)
 147		clockrate = ATH9K_CLOCK_RATE_2GHZ_OFDM;
 148	else if (ah->caps.hw_caps & ATH9K_HW_CAP_FASTCLOCK)
 149		clockrate = ATH9K_CLOCK_FAST_RATE_5GHZ_OFDM;
 150	else
 151		clockrate = ATH9K_CLOCK_RATE_5GHZ_OFDM;
 152
 153	if (conf_is_ht40(conf))
 154		clockrate *= 2;
 155
 156	if (ah->curchan) {
 157		if (IS_CHAN_HALF_RATE(ah->curchan))
 158			clockrate /= 2;
 159		if (IS_CHAN_QUARTER_RATE(ah->curchan))
 160			clockrate /= 4;
 161	}
 162
 163	common->clockrate = clockrate;
 164}
 165
 166static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
 167{
 168	struct ath_common *common = ath9k_hw_common(ah);
 169
 170	return usecs * common->clockrate;
 171}
 172
 173bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
 174{
 175	int i;
 176
 177	BUG_ON(timeout < AH_TIME_QUANTUM);
 178
 179	for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
 180		if ((REG_READ(ah, reg) & mask) == val)
 181			return true;
 182
 183		udelay(AH_TIME_QUANTUM);
 184	}
 185
 186	ath_dbg(ath9k_hw_common(ah), ANY,
 187		"timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
 188		timeout, reg, REG_READ(ah, reg), mask, val);
 189
 190	return false;
 191}
 192EXPORT_SYMBOL(ath9k_hw_wait);
 193
 194void ath9k_hw_synth_delay(struct ath_hw *ah, struct ath9k_channel *chan,
 195			  int hw_delay)
 196{
 197	if (IS_CHAN_B(chan))
 198		hw_delay = (4 * hw_delay) / 22;
 199	else
 200		hw_delay /= 10;
 201
 202	if (IS_CHAN_HALF_RATE(chan))
 203		hw_delay *= 2;
 204	else if (IS_CHAN_QUARTER_RATE(chan))
 205		hw_delay *= 4;
 206
 207	udelay(hw_delay + BASE_ACTIVATE_DELAY);
 208}
 209
 210void ath9k_hw_write_array(struct ath_hw *ah, struct ar5416IniArray *array,
 211			  int column, unsigned int *writecnt)
 212{
 213	int r;
 214
 215	ENABLE_REGWRITE_BUFFER(ah);
 216	for (r = 0; r < array->ia_rows; r++) {
 217		REG_WRITE(ah, INI_RA(array, r, 0),
 218			  INI_RA(array, r, column));
 219		DO_DELAY(*writecnt);
 220	}
 221	REGWRITE_BUFFER_FLUSH(ah);
 222}
 223
 224u32 ath9k_hw_reverse_bits(u32 val, u32 n)
 225{
 226	u32 retval;
 227	int i;
 228
 229	for (i = 0, retval = 0; i < n; i++) {
 230		retval = (retval << 1) | (val & 1);
 231		val >>= 1;
 232	}
 233	return retval;
 234}
 235
 236u16 ath9k_hw_computetxtime(struct ath_hw *ah,
 237			   u8 phy, int kbps,
 238			   u32 frameLen, u16 rateix,
 239			   bool shortPreamble)
 240{
 241	u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
 242
 243	if (kbps == 0)
 244		return 0;
 245
 246	switch (phy) {
 247	case WLAN_RC_PHY_CCK:
 248		phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
 249		if (shortPreamble)
 250			phyTime >>= 1;
 251		numBits = frameLen << 3;
 252		txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
 253		break;
 254	case WLAN_RC_PHY_OFDM:
 255		if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
 256			bitsPerSymbol =	(kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
 257			numBits = OFDM_PLCP_BITS + (frameLen << 3);
 258			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
 259			txTime = OFDM_SIFS_TIME_QUARTER
 260				+ OFDM_PREAMBLE_TIME_QUARTER
 261				+ (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
 262		} else if (ah->curchan &&
 263			   IS_CHAN_HALF_RATE(ah->curchan)) {
 264			bitsPerSymbol =	(kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
 265			numBits = OFDM_PLCP_BITS + (frameLen << 3);
 266			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
 267			txTime = OFDM_SIFS_TIME_HALF +
 268				OFDM_PREAMBLE_TIME_HALF
 269				+ (numSymbols * OFDM_SYMBOL_TIME_HALF);
 270		} else {
 271			bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
 272			numBits = OFDM_PLCP_BITS + (frameLen << 3);
 273			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
 274			txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
 275				+ (numSymbols * OFDM_SYMBOL_TIME);
 276		}
 277		break;
 278	default:
 279		ath_err(ath9k_hw_common(ah),
 280			"Unknown phy %u (rate ix %u)\n", phy, rateix);
 281		txTime = 0;
 282		break;
 283	}
 284
 285	return txTime;
 286}
 287EXPORT_SYMBOL(ath9k_hw_computetxtime);
 288
 289void ath9k_hw_get_channel_centers(struct ath_hw *ah,
 290				  struct ath9k_channel *chan,
 291				  struct chan_centers *centers)
 292{
 293	int8_t extoff;
 294
 295	if (!IS_CHAN_HT40(chan)) {
 296		centers->ctl_center = centers->ext_center =
 297			centers->synth_center = chan->channel;
 298		return;
 299	}
 300
 301	if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
 302	    (chan->chanmode == CHANNEL_G_HT40PLUS)) {
 303		centers->synth_center =
 304			chan->channel + HT40_CHANNEL_CENTER_SHIFT;
 305		extoff = 1;
 306	} else {
 307		centers->synth_center =
 308			chan->channel - HT40_CHANNEL_CENTER_SHIFT;
 309		extoff = -1;
 310	}
 311
 312	centers->ctl_center =
 313		centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
 314	/* 25 MHz spacing is supported by hw but not on upper layers */
 315	centers->ext_center =
 316		centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT);
 317}
 318
 319/******************/
 320/* Chip Revisions */
 321/******************/
 322
 323static void ath9k_hw_read_revisions(struct ath_hw *ah)
 324{
 325	u32 val;
 326
 327	switch (ah->hw_version.devid) {
 328	case AR5416_AR9100_DEVID:
 329		ah->hw_version.macVersion = AR_SREV_VERSION_9100;
 330		break;
 331	case AR9300_DEVID_AR9330:
 332		ah->hw_version.macVersion = AR_SREV_VERSION_9330;
 333		if (ah->get_mac_revision) {
 334			ah->hw_version.macRev = ah->get_mac_revision();
 335		} else {
 336			val = REG_READ(ah, AR_SREV);
 337			ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
 338		}
 339		return;
 340	case AR9300_DEVID_AR9340:
 341		ah->hw_version.macVersion = AR_SREV_VERSION_9340;
 342		val = REG_READ(ah, AR_SREV);
 343		ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
 344		return;
 345	}
 346
 347	val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
 348
 349	if (val == 0xFF) {
 350		val = REG_READ(ah, AR_SREV);
 351		ah->hw_version.macVersion =
 352			(val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
 353		ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
 354
 355		if (AR_SREV_9462(ah))
 356			ah->is_pciexpress = true;
 357		else
 358			ah->is_pciexpress = (val &
 359					     AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
 360	} else {
 361		if (!AR_SREV_9100(ah))
 362			ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
 363
 364		ah->hw_version.macRev = val & AR_SREV_REVISION;
 365
 366		if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
 367			ah->is_pciexpress = true;
 368	}
 369}
 370
 371/************************************/
 372/* HW Attach, Detach, Init Routines */
 373/************************************/
 374
 375static void ath9k_hw_disablepcie(struct ath_hw *ah)
 376{
 377	if (!AR_SREV_5416(ah))
 378		return;
 379
 380	REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
 381	REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
 382	REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
 383	REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
 384	REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
 385	REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
 386	REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
 387	REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
 388	REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
 389
 390	REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
 391}
 392
 393static void ath9k_hw_aspm_init(struct ath_hw *ah)
 394{
 395	struct ath_common *common = ath9k_hw_common(ah);
 396
 397	if (common->bus_ops->aspm_init)
 398		common->bus_ops->aspm_init(common);
 399}
 400
 401/* This should work for all families including legacy */
 402static bool ath9k_hw_chip_test(struct ath_hw *ah)
 403{
 404	struct ath_common *common = ath9k_hw_common(ah);
 405	u32 regAddr[2] = { AR_STA_ID0 };
 406	u32 regHold[2];
 407	static const u32 patternData[4] = {
 408		0x55555555, 0xaaaaaaaa, 0x66666666, 0x99999999
 409	};
 410	int i, j, loop_max;
 411
 412	if (!AR_SREV_9300_20_OR_LATER(ah)) {
 413		loop_max = 2;
 414		regAddr[1] = AR_PHY_BASE + (8 << 2);
 415	} else
 416		loop_max = 1;
 417
 418	for (i = 0; i < loop_max; i++) {
 419		u32 addr = regAddr[i];
 420		u32 wrData, rdData;
 421
 422		regHold[i] = REG_READ(ah, addr);
 423		for (j = 0; j < 0x100; j++) {
 424			wrData = (j << 16) | j;
 425			REG_WRITE(ah, addr, wrData);
 426			rdData = REG_READ(ah, addr);
 427			if (rdData != wrData) {
 428				ath_err(common,
 429					"address test failed addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
 430					addr, wrData, rdData);
 431				return false;
 432			}
 433		}
 434		for (j = 0; j < 4; j++) {
 435			wrData = patternData[j];
 436			REG_WRITE(ah, addr, wrData);
 437			rdData = REG_READ(ah, addr);
 438			if (wrData != rdData) {
 439				ath_err(common,
 440					"address test failed addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
 441					addr, wrData, rdData);
 442				return false;
 443			}
 444		}
 445		REG_WRITE(ah, regAddr[i], regHold[i]);
 446	}
 447	udelay(100);
 448
 449	return true;
 450}
 451
 452static void ath9k_hw_init_config(struct ath_hw *ah)
 453{
 454	int i;
 455
 456	ah->config.dma_beacon_response_time = 1;
 457	ah->config.sw_beacon_response_time = 6;
 458	ah->config.additional_swba_backoff = 0;
 459	ah->config.ack_6mb = 0x0;
 460	ah->config.cwm_ignore_extcca = 0;
 461	ah->config.pcie_clock_req = 0;
 462	ah->config.pcie_waen = 0;
 463	ah->config.analog_shiftreg = 1;
 464	ah->config.enable_ani = true;
 465
 466	for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
 467		ah->config.spurchans[i][0] = AR_NO_SPUR;
 468		ah->config.spurchans[i][1] = AR_NO_SPUR;
 469	}
 470
 471	/* PAPRD needs some more work to be enabled */
 472	ah->config.paprd_disable = 1;
 473
 474	ah->config.rx_intr_mitigation = true;
 475	ah->config.pcieSerDesWrite = true;
 476
 477	/*
 478	 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
 479	 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
 480	 * This means we use it for all AR5416 devices, and the few
 481	 * minor PCI AR9280 devices out there.
 482	 *
 483	 * Serialization is required because these devices do not handle
 484	 * well the case of two concurrent reads/writes due to the latency
 485	 * involved. During one read/write another read/write can be issued
 486	 * on another CPU while the previous read/write may still be working
 487	 * on our hardware, if we hit this case the hardware poops in a loop.
 488	 * We prevent this by serializing reads and writes.
 489	 *
 490	 * This issue is not present on PCI-Express devices or pre-AR5416
 491	 * devices (legacy, 802.11abg).
 492	 */
 493	if (num_possible_cpus() > 1)
 494		ah->config.serialize_regmode = SER_REG_MODE_AUTO;
 495}
 496
 497static void ath9k_hw_init_defaults(struct ath_hw *ah)
 498{
 499	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
 500
 501	regulatory->country_code = CTRY_DEFAULT;
 502	regulatory->power_limit = MAX_RATE_POWER;
 
 503
 504	ah->hw_version.magic = AR5416_MAGIC;
 505	ah->hw_version.subvendorid = 0;
 506
 507	ah->atim_window = 0;
 508	ah->sta_id1_defaults =
 509		AR_STA_ID1_CRPT_MIC_ENABLE |
 510		AR_STA_ID1_MCAST_KSRCH;
 511	if (AR_SREV_9100(ah))
 512		ah->sta_id1_defaults |= AR_STA_ID1_AR9100_BA_FIX;
 513	ah->slottime = ATH9K_SLOT_TIME_9;
 
 514	ah->globaltxtimeout = (u32) -1;
 515	ah->power_mode = ATH9K_PM_UNDEFINED;
 516	ah->htc_reset_init = true;
 517}
 518
 519static int ath9k_hw_init_macaddr(struct ath_hw *ah)
 520{
 521	struct ath_common *common = ath9k_hw_common(ah);
 522	u32 sum;
 523	int i;
 524	u16 eeval;
 525	static const u32 EEP_MAC[] = { EEP_MAC_LSW, EEP_MAC_MID, EEP_MAC_MSW };
 526
 527	sum = 0;
 528	for (i = 0; i < 3; i++) {
 529		eeval = ah->eep_ops->get_eeprom(ah, EEP_MAC[i]);
 530		sum += eeval;
 531		common->macaddr[2 * i] = eeval >> 8;
 532		common->macaddr[2 * i + 1] = eeval & 0xff;
 533	}
 534	if (sum == 0 || sum == 0xffff * 3)
 535		return -EADDRNOTAVAIL;
 536
 537	return 0;
 538}
 539
 540static int ath9k_hw_post_init(struct ath_hw *ah)
 541{
 542	struct ath_common *common = ath9k_hw_common(ah);
 543	int ecode;
 544
 545	if (common->bus_ops->ath_bus_type != ATH_USB) {
 546		if (!ath9k_hw_chip_test(ah))
 547			return -ENODEV;
 548	}
 549
 550	if (!AR_SREV_9300_20_OR_LATER(ah)) {
 551		ecode = ar9002_hw_rf_claim(ah);
 552		if (ecode != 0)
 553			return ecode;
 554	}
 555
 556	ecode = ath9k_hw_eeprom_init(ah);
 557	if (ecode != 0)
 558		return ecode;
 559
 560	ath_dbg(ath9k_hw_common(ah), CONFIG, "Eeprom VER: %d, REV: %d\n",
 
 561		ah->eep_ops->get_eeprom_ver(ah),
 562		ah->eep_ops->get_eeprom_rev(ah));
 563
 564	ecode = ath9k_hw_rf_alloc_ext_banks(ah);
 565	if (ecode) {
 566		ath_err(ath9k_hw_common(ah),
 567			"Failed allocating banks for external radio\n");
 568		ath9k_hw_rf_free_ext_banks(ah);
 569		return ecode;
 570	}
 571
 572	if (ah->config.enable_ani) {
 573		ath9k_hw_ani_setup(ah);
 574		ath9k_hw_ani_init(ah);
 575	}
 576
 577	return 0;
 578}
 579
 580static void ath9k_hw_attach_ops(struct ath_hw *ah)
 581{
 582	if (AR_SREV_9300_20_OR_LATER(ah))
 583		ar9003_hw_attach_ops(ah);
 584	else
 585		ar9002_hw_attach_ops(ah);
 586}
 587
 588/* Called for all hardware families */
 589static int __ath9k_hw_init(struct ath_hw *ah)
 590{
 591	struct ath_common *common = ath9k_hw_common(ah);
 592	int r = 0;
 593
 594	ath9k_hw_read_revisions(ah);
 595
 596	/*
 597	 * Read back AR_WA into a permanent copy and set bits 14 and 17.
 598	 * We need to do this to avoid RMW of this register. We cannot
 599	 * read the reg when chip is asleep.
 600	 */
 601	ah->WARegVal = REG_READ(ah, AR_WA);
 602	ah->WARegVal |= (AR_WA_D3_L1_DISABLE |
 603			 AR_WA_ASPM_TIMER_BASED_DISABLE);
 604
 605	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
 606		ath_err(common, "Couldn't reset chip\n");
 607		return -EIO;
 608	}
 609
 610	if (AR_SREV_9462(ah))
 611		ah->WARegVal &= ~AR_WA_D3_L1_DISABLE;
 612
 613	ath9k_hw_init_defaults(ah);
 614	ath9k_hw_init_config(ah);
 615
 616	ath9k_hw_attach_ops(ah);
 617
 618	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
 619		ath_err(common, "Couldn't wakeup chip\n");
 620		return -EIO;
 621	}
 622
 623	if (NR_CPUS > 1 && ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
 624		if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
 625		    ((AR_SREV_9160(ah) || AR_SREV_9280(ah) || AR_SREV_9287(ah)) &&
 626		     !ah->is_pciexpress)) {
 627			ah->config.serialize_regmode =
 628				SER_REG_MODE_ON;
 629		} else {
 630			ah->config.serialize_regmode =
 631				SER_REG_MODE_OFF;
 632		}
 633	}
 634
 635	ath_dbg(common, RESET, "serialize_regmode is %d\n",
 636		ah->config.serialize_regmode);
 637
 638	if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
 639		ah->config.max_txtrig_level = MAX_TX_FIFO_THRESHOLD >> 1;
 640	else
 641		ah->config.max_txtrig_level = MAX_TX_FIFO_THRESHOLD;
 642
 643	switch (ah->hw_version.macVersion) {
 644	case AR_SREV_VERSION_5416_PCI:
 645	case AR_SREV_VERSION_5416_PCIE:
 646	case AR_SREV_VERSION_9160:
 647	case AR_SREV_VERSION_9100:
 648	case AR_SREV_VERSION_9280:
 649	case AR_SREV_VERSION_9285:
 650	case AR_SREV_VERSION_9287:
 651	case AR_SREV_VERSION_9271:
 652	case AR_SREV_VERSION_9300:
 653	case AR_SREV_VERSION_9330:
 654	case AR_SREV_VERSION_9485:
 655	case AR_SREV_VERSION_9340:
 656	case AR_SREV_VERSION_9462:
 657		break;
 658	default:
 659		ath_err(common,
 660			"Mac Chip Rev 0x%02x.%x is not supported by this driver\n",
 661			ah->hw_version.macVersion, ah->hw_version.macRev);
 662		return -EOPNOTSUPP;
 663	}
 664
 665	if (AR_SREV_9271(ah) || AR_SREV_9100(ah) || AR_SREV_9340(ah) ||
 666	    AR_SREV_9330(ah))
 667		ah->is_pciexpress = false;
 668
 669	ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
 670	ath9k_hw_init_cal_settings(ah);
 671
 672	ah->ani_function = ATH9K_ANI_ALL;
 673	if (AR_SREV_9280_20_OR_LATER(ah) && !AR_SREV_9300_20_OR_LATER(ah))
 674		ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
 675	if (!AR_SREV_9300_20_OR_LATER(ah))
 676		ah->ani_function &= ~ATH9K_ANI_MRC_CCK;
 677
 678	/* disable ANI for 9340 */
 679	if (AR_SREV_9340(ah))
 680		ah->config.enable_ani = false;
 681
 682	ath9k_hw_init_mode_regs(ah);
 683
 684	if (!ah->is_pciexpress)
 
 
 685		ath9k_hw_disablepcie(ah);
 686
 
 
 
 687	r = ath9k_hw_post_init(ah);
 688	if (r)
 689		return r;
 690
 691	ath9k_hw_init_mode_gain_regs(ah);
 692	r = ath9k_hw_fill_cap_info(ah);
 693	if (r)
 694		return r;
 695
 696	if (ah->is_pciexpress)
 697		ath9k_hw_aspm_init(ah);
 698
 699	r = ath9k_hw_init_macaddr(ah);
 700	if (r) {
 701		ath_err(common, "Failed to initialize MAC address\n");
 702		return r;
 703	}
 704
 705	if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
 706		ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
 707	else
 708		ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
 709
 710	if (AR_SREV_9330(ah))
 711		ah->bb_watchdog_timeout_ms = 85;
 712	else
 713		ah->bb_watchdog_timeout_ms = 25;
 714
 715	common->state = ATH_HW_INITIALIZED;
 716
 717	return 0;
 718}
 719
 720int ath9k_hw_init(struct ath_hw *ah)
 721{
 722	int ret;
 723	struct ath_common *common = ath9k_hw_common(ah);
 724
 725	/* These are all the AR5008/AR9001/AR9002 hardware family of chipsets */
 726	switch (ah->hw_version.devid) {
 727	case AR5416_DEVID_PCI:
 728	case AR5416_DEVID_PCIE:
 729	case AR5416_AR9100_DEVID:
 730	case AR9160_DEVID_PCI:
 731	case AR9280_DEVID_PCI:
 732	case AR9280_DEVID_PCIE:
 733	case AR9285_DEVID_PCIE:
 734	case AR9287_DEVID_PCI:
 735	case AR9287_DEVID_PCIE:
 736	case AR2427_DEVID_PCIE:
 737	case AR9300_DEVID_PCIE:
 738	case AR9300_DEVID_AR9485_PCIE:
 739	case AR9300_DEVID_AR9330:
 740	case AR9300_DEVID_AR9340:
 741	case AR9300_DEVID_AR9580:
 742	case AR9300_DEVID_AR9462:
 743	case AR9485_DEVID_AR1111:
 744		break;
 745	default:
 746		if (common->bus_ops->ath_bus_type == ATH_USB)
 747			break;
 748		ath_err(common, "Hardware device ID 0x%04x not supported\n",
 749			ah->hw_version.devid);
 750		return -EOPNOTSUPP;
 751	}
 752
 753	ret = __ath9k_hw_init(ah);
 754	if (ret) {
 755		ath_err(common,
 756			"Unable to initialize hardware; initialization status: %d\n",
 757			ret);
 758		return ret;
 759	}
 760
 761	return 0;
 762}
 763EXPORT_SYMBOL(ath9k_hw_init);
 764
 765static void ath9k_hw_init_qos(struct ath_hw *ah)
 766{
 767	ENABLE_REGWRITE_BUFFER(ah);
 768
 769	REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
 770	REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
 771
 772	REG_WRITE(ah, AR_QOS_NO_ACK,
 773		  SM(2, AR_QOS_NO_ACK_TWO_BIT) |
 774		  SM(5, AR_QOS_NO_ACK_BIT_OFF) |
 775		  SM(0, AR_QOS_NO_ACK_BYTE_OFF));
 776
 777	REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
 778	REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
 779	REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
 780	REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
 781	REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
 782
 783	REGWRITE_BUFFER_FLUSH(ah);
 784}
 785
 786u32 ar9003_get_pll_sqsum_dvc(struct ath_hw *ah)
 787{
 788	struct ath_common *common = ath9k_hw_common(ah);
 789	int i = 0;
 790
 791	REG_CLR_BIT(ah, PLL3, PLL3_DO_MEAS_MASK);
 792	udelay(100);
 793	REG_SET_BIT(ah, PLL3, PLL3_DO_MEAS_MASK);
 794
 795	while ((REG_READ(ah, PLL4) & PLL4_MEAS_DONE) == 0) {
 796
 797		udelay(100);
 798
 799		if (WARN_ON_ONCE(i >= 100)) {
 800			ath_err(common, "PLL4 meaurement not done\n");
 801			break;
 802		}
 803
 804		i++;
 805	}
 806
 807	return (REG_READ(ah, PLL3) & SQSUM_DVC_MASK) >> 3;
 808}
 809EXPORT_SYMBOL(ar9003_get_pll_sqsum_dvc);
 810
 811static void ath9k_hw_init_pll(struct ath_hw *ah,
 812			      struct ath9k_channel *chan)
 813{
 814	u32 pll;
 815
 816	if (AR_SREV_9485(ah)) {
 817
 818		/* program BB PLL ki and kd value, ki=0x4, kd=0x40 */
 819		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 820			      AR_CH0_BB_DPLL2_PLL_PWD, 0x1);
 821		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 822			      AR_CH0_DPLL2_KD, 0x40);
 823		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 824			      AR_CH0_DPLL2_KI, 0x4);
 825
 826		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
 827			      AR_CH0_BB_DPLL1_REFDIV, 0x5);
 828		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
 829			      AR_CH0_BB_DPLL1_NINI, 0x58);
 830		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
 831			      AR_CH0_BB_DPLL1_NFRAC, 0x0);
 832
 833		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 834			      AR_CH0_BB_DPLL2_OUTDIV, 0x1);
 835		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 836			      AR_CH0_BB_DPLL2_LOCAL_PLL, 0x1);
 837		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 838			      AR_CH0_BB_DPLL2_EN_NEGTRIG, 0x1);
 839
 840		/* program BB PLL phase_shift to 0x6 */
 841		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL3,
 842			      AR_CH0_BB_DPLL3_PHASE_SHIFT, 0x6);
 843
 844		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
 845			      AR_CH0_BB_DPLL2_PLL_PWD, 0x0);
 846		udelay(1000);
 847	} else if (AR_SREV_9330(ah)) {
 848		u32 ddr_dpll2, pll_control2, kd;
 849
 850		if (ah->is_clk_25mhz) {
 851			ddr_dpll2 = 0x18e82f01;
 852			pll_control2 = 0xe04a3d;
 853			kd = 0x1d;
 854		} else {
 855			ddr_dpll2 = 0x19e82f01;
 856			pll_control2 = 0x886666;
 857			kd = 0x3d;
 858		}
 859
 860		/* program DDR PLL ki and kd value */
 861		REG_WRITE(ah, AR_CH0_DDR_DPLL2, ddr_dpll2);
 862
 863		/* program DDR PLL phase_shift */
 864		REG_RMW_FIELD(ah, AR_CH0_DDR_DPLL3,
 865			      AR_CH0_DPLL3_PHASE_SHIFT, 0x1);
 866
 867		REG_WRITE(ah, AR_RTC_PLL_CONTROL, 0x1142c);
 868		udelay(1000);
 869
 870		/* program refdiv, nint, frac to RTC register */
 871		REG_WRITE(ah, AR_RTC_PLL_CONTROL2, pll_control2);
 872
 873		/* program BB PLL kd and ki value */
 874		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2, AR_CH0_DPLL2_KD, kd);
 875		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2, AR_CH0_DPLL2_KI, 0x06);
 876
 877		/* program BB PLL phase_shift */
 878		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL3,
 879			      AR_CH0_BB_DPLL3_PHASE_SHIFT, 0x1);
 880	} else if (AR_SREV_9340(ah)) {
 881		u32 regval, pll2_divint, pll2_divfrac, refdiv;
 882
 883		REG_WRITE(ah, AR_RTC_PLL_CONTROL, 0x1142c);
 884		udelay(1000);
 885
 886		REG_SET_BIT(ah, AR_PHY_PLL_MODE, 0x1 << 16);
 887		udelay(100);
 888
 889		if (ah->is_clk_25mhz) {
 890			pll2_divint = 0x54;
 891			pll2_divfrac = 0x1eb85;
 892			refdiv = 3;
 893		} else {
 894			pll2_divint = 88;
 895			pll2_divfrac = 0;
 896			refdiv = 5;
 897		}
 898
 899		regval = REG_READ(ah, AR_PHY_PLL_MODE);
 900		regval |= (0x1 << 16);
 901		REG_WRITE(ah, AR_PHY_PLL_MODE, regval);
 902		udelay(100);
 903
 904		REG_WRITE(ah, AR_PHY_PLL_CONTROL, (refdiv << 27) |
 905			  (pll2_divint << 18) | pll2_divfrac);
 906		udelay(100);
 907
 908		regval = REG_READ(ah, AR_PHY_PLL_MODE);
 909		regval = (regval & 0x80071fff) | (0x1 << 30) | (0x1 << 13) |
 910			 (0x4 << 26) | (0x18 << 19);
 911		REG_WRITE(ah, AR_PHY_PLL_MODE, regval);
 912		REG_WRITE(ah, AR_PHY_PLL_MODE,
 913			  REG_READ(ah, AR_PHY_PLL_MODE) & 0xfffeffff);
 914		udelay(1000);
 915	}
 916
 917	pll = ath9k_hw_compute_pll_control(ah, chan);
 918
 919	REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
 920
 921	if (AR_SREV_9485(ah) || AR_SREV_9340(ah) || AR_SREV_9330(ah))
 922		udelay(1000);
 923
 924	/* Switch the core clock for ar9271 to 117Mhz */
 925	if (AR_SREV_9271(ah)) {
 926		udelay(500);
 927		REG_WRITE(ah, 0x50040, 0x304);
 928	}
 929
 930	udelay(RTC_PLL_SETTLE_DELAY);
 931
 932	REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
 933
 934	if (AR_SREV_9340(ah)) {
 935		if (ah->is_clk_25mhz) {
 936			REG_WRITE(ah, AR_RTC_DERIVED_CLK, 0x17c << 1);
 937			REG_WRITE(ah, AR_SLP32_MODE, 0x0010f3d7);
 938			REG_WRITE(ah,  AR_SLP32_INC, 0x0001e7ae);
 939		} else {
 940			REG_WRITE(ah, AR_RTC_DERIVED_CLK, 0x261 << 1);
 941			REG_WRITE(ah, AR_SLP32_MODE, 0x0010f400);
 942			REG_WRITE(ah,  AR_SLP32_INC, 0x0001e800);
 943		}
 944		udelay(100);
 945	}
 946}
 947
 948static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
 949					  enum nl80211_iftype opmode)
 950{
 951	u32 sync_default = AR_INTR_SYNC_DEFAULT;
 952	u32 imr_reg = AR_IMR_TXERR |
 953		AR_IMR_TXURN |
 954		AR_IMR_RXERR |
 955		AR_IMR_RXORN |
 956		AR_IMR_BCNMISC;
 957
 958	if (AR_SREV_9340(ah))
 959		sync_default &= ~AR_INTR_SYNC_HOST1_FATAL;
 960
 961	if (AR_SREV_9300_20_OR_LATER(ah)) {
 962		imr_reg |= AR_IMR_RXOK_HP;
 963		if (ah->config.rx_intr_mitigation)
 964			imr_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
 965		else
 966			imr_reg |= AR_IMR_RXOK_LP;
 967
 968	} else {
 969		if (ah->config.rx_intr_mitigation)
 970			imr_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
 971		else
 972			imr_reg |= AR_IMR_RXOK;
 973	}
 974
 975	if (ah->config.tx_intr_mitigation)
 976		imr_reg |= AR_IMR_TXINTM | AR_IMR_TXMINTR;
 977	else
 978		imr_reg |= AR_IMR_TXOK;
 979
 980	if (opmode == NL80211_IFTYPE_AP)
 981		imr_reg |= AR_IMR_MIB;
 982
 983	ENABLE_REGWRITE_BUFFER(ah);
 984
 985	REG_WRITE(ah, AR_IMR, imr_reg);
 986	ah->imrs2_reg |= AR_IMR_S2_GTT;
 987	REG_WRITE(ah, AR_IMR_S2, ah->imrs2_reg);
 988
 989	if (!AR_SREV_9100(ah)) {
 990		REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
 991		REG_WRITE(ah, AR_INTR_SYNC_ENABLE, sync_default);
 992		REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
 993	}
 994
 995	REGWRITE_BUFFER_FLUSH(ah);
 996
 997	if (AR_SREV_9300_20_OR_LATER(ah)) {
 998		REG_WRITE(ah, AR_INTR_PRIO_ASYNC_ENABLE, 0);
 999		REG_WRITE(ah, AR_INTR_PRIO_ASYNC_MASK, 0);
1000		REG_WRITE(ah, AR_INTR_PRIO_SYNC_ENABLE, 0);
1001		REG_WRITE(ah, AR_INTR_PRIO_SYNC_MASK, 0);
1002	}
1003}
1004
1005static void ath9k_hw_set_sifs_time(struct ath_hw *ah, u32 us)
1006{
1007	u32 val = ath9k_hw_mac_to_clks(ah, us - 2);
1008	val = min(val, (u32) 0xFFFF);
1009	REG_WRITE(ah, AR_D_GBL_IFS_SIFS, val);
1010}
1011
1012static void ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
1013{
1014	u32 val = ath9k_hw_mac_to_clks(ah, us);
1015	val = min(val, (u32) 0xFFFF);
1016	REG_WRITE(ah, AR_D_GBL_IFS_SLOT, val);
1017}
1018
1019static void ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1020{
1021	u32 val = ath9k_hw_mac_to_clks(ah, us);
1022	val = min(val, (u32) MS(0xFFFFFFFF, AR_TIME_OUT_ACK));
1023	REG_RMW_FIELD(ah, AR_TIME_OUT, AR_TIME_OUT_ACK, val);
1024}
1025
1026static void ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1027{
1028	u32 val = ath9k_hw_mac_to_clks(ah, us);
1029	val = min(val, (u32) MS(0xFFFFFFFF, AR_TIME_OUT_CTS));
1030	REG_RMW_FIELD(ah, AR_TIME_OUT, AR_TIME_OUT_CTS, val);
1031}
1032
1033static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1034{
1035	if (tu > 0xFFFF) {
1036		ath_dbg(ath9k_hw_common(ah), XMIT, "bad global tx timeout %u\n",
1037			tu);
1038		ah->globaltxtimeout = (u32) -1;
1039		return false;
1040	} else {
1041		REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1042		ah->globaltxtimeout = tu;
1043		return true;
1044	}
1045}
1046
1047void ath9k_hw_init_global_settings(struct ath_hw *ah)
1048{
1049	struct ath_common *common = ath9k_hw_common(ah);
1050	struct ieee80211_conf *conf = &common->hw->conf;
1051	const struct ath9k_channel *chan = ah->curchan;
1052	int acktimeout, ctstimeout, ack_offset = 0;
1053	int slottime;
1054	int sifstime;
1055	int rx_lat = 0, tx_lat = 0, eifs = 0;
1056	u32 reg;
1057
1058	ath_dbg(ath9k_hw_common(ah), RESET, "ah->misc_mode 0x%x\n",
1059		ah->misc_mode);
1060
1061	if (!chan)
1062		return;
1063
1064	if (ah->misc_mode != 0)
1065		REG_SET_BIT(ah, AR_PCU_MISC, ah->misc_mode);
1066
1067	if (IS_CHAN_A_FAST_CLOCK(ah, chan))
1068		rx_lat = 41;
1069	else
1070		rx_lat = 37;
1071	tx_lat = 54;
1072
1073	if (IS_CHAN_5GHZ(chan))
1074		sifstime = 16;
1075	else
1076		sifstime = 10;
1077
1078	if (IS_CHAN_HALF_RATE(chan)) {
1079		eifs = 175;
1080		rx_lat *= 2;
1081		tx_lat *= 2;
1082		if (IS_CHAN_A_FAST_CLOCK(ah, chan))
1083		    tx_lat += 11;
1084
1085		sifstime *= 2;
1086		ack_offset = 16;
1087		slottime = 13;
 
1088	} else if (IS_CHAN_QUARTER_RATE(chan)) {
1089		eifs = 340;
1090		rx_lat = (rx_lat * 4) - 1;
1091		tx_lat *= 4;
1092		if (IS_CHAN_A_FAST_CLOCK(ah, chan))
1093		    tx_lat += 22;
1094
1095		sifstime *= 4;
1096		ack_offset = 32;
1097		slottime = 21;
 
1098	} else {
1099		if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah)) {
1100			eifs = AR_D_GBL_IFS_EIFS_ASYNC_FIFO;
1101			reg = AR_USEC_ASYNC_FIFO;
1102		} else {
1103			eifs = REG_READ(ah, AR_D_GBL_IFS_EIFS)/
1104				common->clockrate;
1105			reg = REG_READ(ah, AR_USEC);
1106		}
1107		rx_lat = MS(reg, AR_USEC_RX_LAT);
1108		tx_lat = MS(reg, AR_USEC_TX_LAT);
1109
1110		slottime = ah->slottime;
 
 
 
 
1111	}
1112
1113	/* As defined by IEEE 802.11-2007 17.3.8.6 */
1114	acktimeout = slottime + sifstime + 3 * ah->coverage_class + ack_offset;
1115	ctstimeout = acktimeout;
1116
1117	/*
1118	 * Workaround for early ACK timeouts, add an offset to match the
1119	 * initval's 64us ack timeout value. Use 48us for the CTS timeout.
1120	 * This was initially only meant to work around an issue with delayed
1121	 * BA frames in some implementations, but it has been found to fix ACK
1122	 * timeout issues in other cases as well.
1123	 */
1124	if (conf->channel && conf->channel->band == IEEE80211_BAND_2GHZ &&
1125	    !IS_CHAN_HALF_RATE(chan) && !IS_CHAN_QUARTER_RATE(chan)) {
1126		acktimeout += 64 - sifstime - ah->slottime;
1127		ctstimeout += 48 - sifstime - ah->slottime;
1128	}
1129
1130
1131	ath9k_hw_set_sifs_time(ah, sifstime);
1132	ath9k_hw_setslottime(ah, slottime);
1133	ath9k_hw_set_ack_timeout(ah, acktimeout);
1134	ath9k_hw_set_cts_timeout(ah, ctstimeout);
1135	if (ah->globaltxtimeout != (u32) -1)
1136		ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1137
1138	REG_WRITE(ah, AR_D_GBL_IFS_EIFS, ath9k_hw_mac_to_clks(ah, eifs));
1139	REG_RMW(ah, AR_USEC,
1140		(common->clockrate - 1) |
1141		SM(rx_lat, AR_USEC_RX_LAT) |
1142		SM(tx_lat, AR_USEC_TX_LAT),
1143		AR_USEC_TX_LAT | AR_USEC_RX_LAT | AR_USEC_USEC);
1144
1145}
1146EXPORT_SYMBOL(ath9k_hw_init_global_settings);
1147
1148void ath9k_hw_deinit(struct ath_hw *ah)
1149{
1150	struct ath_common *common = ath9k_hw_common(ah);
1151
1152	if (common->state < ATH_HW_INITIALIZED)
1153		goto free_hw;
1154
1155	ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1156
1157free_hw:
1158	ath9k_hw_rf_free_ext_banks(ah);
1159}
1160EXPORT_SYMBOL(ath9k_hw_deinit);
1161
1162/*******/
1163/* INI */
1164/*******/
1165
1166u32 ath9k_regd_get_ctl(struct ath_regulatory *reg, struct ath9k_channel *chan)
1167{
1168	u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1169
1170	if (IS_CHAN_B(chan))
1171		ctl |= CTL_11B;
1172	else if (IS_CHAN_G(chan))
1173		ctl |= CTL_11G;
1174	else
1175		ctl |= CTL_11A;
1176
1177	return ctl;
1178}
1179
1180/****************************************/
1181/* Reset and Channel Switching Routines */
1182/****************************************/
1183
1184static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1185{
1186	struct ath_common *common = ath9k_hw_common(ah);
1187
1188	ENABLE_REGWRITE_BUFFER(ah);
1189
1190	/*
1191	 * set AHB_MODE not to do cacheline prefetches
1192	*/
1193	if (!AR_SREV_9300_20_OR_LATER(ah))
1194		REG_SET_BIT(ah, AR_AHB_MODE, AR_AHB_PREFETCH_RD_EN);
1195
1196	/*
1197	 * let mac dma reads be in 128 byte chunks
1198	 */
1199	REG_RMW(ah, AR_TXCFG, AR_TXCFG_DMASZ_128B, AR_TXCFG_DMASZ_MASK);
1200
1201	REGWRITE_BUFFER_FLUSH(ah);
1202
1203	/*
1204	 * Restore TX Trigger Level to its pre-reset value.
1205	 * The initial value depends on whether aggregation is enabled, and is
1206	 * adjusted whenever underruns are detected.
1207	 */
1208	if (!AR_SREV_9300_20_OR_LATER(ah))
1209		REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1210
1211	ENABLE_REGWRITE_BUFFER(ah);
1212
1213	/*
1214	 * let mac dma writes be in 128 byte chunks
1215	 */
1216	REG_RMW(ah, AR_RXCFG, AR_RXCFG_DMASZ_128B, AR_RXCFG_DMASZ_MASK);
1217
1218	/*
1219	 * Setup receive FIFO threshold to hold off TX activities
1220	 */
1221	REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1222
1223	if (AR_SREV_9300_20_OR_LATER(ah)) {
1224		REG_RMW_FIELD(ah, AR_RXBP_THRESH, AR_RXBP_THRESH_HP, 0x1);
1225		REG_RMW_FIELD(ah, AR_RXBP_THRESH, AR_RXBP_THRESH_LP, 0x1);
1226
1227		ath9k_hw_set_rx_bufsize(ah, common->rx_bufsize -
1228			ah->caps.rx_status_len);
1229	}
1230
1231	/*
1232	 * reduce the number of usable entries in PCU TXBUF to avoid
1233	 * wrap around issues.
1234	 */
1235	if (AR_SREV_9285(ah)) {
1236		/* For AR9285 the number of Fifos are reduced to half.
1237		 * So set the usable tx buf size also to half to
1238		 * avoid data/delimiter underruns
1239		 */
1240		REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1241			  AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1242	} else if (!AR_SREV_9271(ah)) {
1243		REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1244			  AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1245	}
1246
1247	REGWRITE_BUFFER_FLUSH(ah);
1248
1249	if (AR_SREV_9300_20_OR_LATER(ah))
1250		ath9k_hw_reset_txstatus_ring(ah);
1251}
1252
1253static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1254{
1255	u32 mask = AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC;
1256	u32 set = AR_STA_ID1_KSRCH_MODE;
1257
1258	switch (opmode) {
1259	case NL80211_IFTYPE_ADHOC:
1260	case NL80211_IFTYPE_MESH_POINT:
1261		set |= AR_STA_ID1_ADHOC;
1262		REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1263		break;
1264	case NL80211_IFTYPE_AP:
1265		set |= AR_STA_ID1_STA_AP;
1266		/* fall through */
1267	case NL80211_IFTYPE_STATION:
1268		REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1269		break;
1270	default:
1271		if (!ah->is_monitoring)
1272			set = 0;
1273		break;
1274	}
1275	REG_RMW(ah, AR_STA_ID1, set, mask);
1276}
1277
1278void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah, u32 coef_scaled,
1279				   u32 *coef_mantissa, u32 *coef_exponent)
1280{
1281	u32 coef_exp, coef_man;
1282
1283	for (coef_exp = 31; coef_exp > 0; coef_exp--)
1284		if ((coef_scaled >> coef_exp) & 0x1)
1285			break;
1286
1287	coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1288
1289	coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1290
1291	*coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1292	*coef_exponent = coef_exp - 16;
1293}
1294
1295static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1296{
1297	u32 rst_flags;
1298	u32 tmpReg;
1299
1300	if (AR_SREV_9100(ah)) {
1301		REG_RMW_FIELD(ah, AR_RTC_DERIVED_CLK,
1302			      AR_RTC_DERIVED_CLK_PERIOD, 1);
1303		(void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1304	}
1305
1306	ENABLE_REGWRITE_BUFFER(ah);
1307
1308	if (AR_SREV_9300_20_OR_LATER(ah)) {
1309		REG_WRITE(ah, AR_WA, ah->WARegVal);
1310		udelay(10);
1311	}
1312
1313	REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1314		  AR_RTC_FORCE_WAKE_ON_INT);
1315
1316	if (AR_SREV_9100(ah)) {
1317		rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1318			AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1319	} else {
1320		tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1321		if (tmpReg &
1322		    (AR_INTR_SYNC_LOCAL_TIMEOUT |
1323		     AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1324			u32 val;
1325			REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1326
1327			val = AR_RC_HOSTIF;
1328			if (!AR_SREV_9300_20_OR_LATER(ah))
1329				val |= AR_RC_AHB;
1330			REG_WRITE(ah, AR_RC, val);
1331
1332		} else if (!AR_SREV_9300_20_OR_LATER(ah))
1333			REG_WRITE(ah, AR_RC, AR_RC_AHB);
1334
1335		rst_flags = AR_RTC_RC_MAC_WARM;
1336		if (type == ATH9K_RESET_COLD)
1337			rst_flags |= AR_RTC_RC_MAC_COLD;
1338	}
1339
1340	if (AR_SREV_9330(ah)) {
1341		int npend = 0;
1342		int i;
1343
1344		/* AR9330 WAR:
1345		 * call external reset function to reset WMAC if:
1346		 * - doing a cold reset
1347		 * - we have pending frames in the TX queues
1348		 */
1349
1350		for (i = 0; i < AR_NUM_QCU; i++) {
1351			npend = ath9k_hw_numtxpending(ah, i);
1352			if (npend)
1353				break;
1354		}
1355
1356		if (ah->external_reset &&
1357		    (npend || type == ATH9K_RESET_COLD)) {
1358			int reset_err = 0;
1359
1360			ath_dbg(ath9k_hw_common(ah), RESET,
1361				"reset MAC via external reset\n");
1362
1363			reset_err = ah->external_reset();
1364			if (reset_err) {
1365				ath_err(ath9k_hw_common(ah),
1366					"External reset failed, err=%d\n",
1367					reset_err);
1368				return false;
1369			}
1370
1371			REG_WRITE(ah, AR_RTC_RESET, 1);
1372		}
1373	}
1374
1375	REG_WRITE(ah, AR_RTC_RC, rst_flags);
1376
1377	REGWRITE_BUFFER_FLUSH(ah);
1378
1379	udelay(50);
1380
1381	REG_WRITE(ah, AR_RTC_RC, 0);
1382	if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1383		ath_dbg(ath9k_hw_common(ah), RESET, "RTC stuck in MAC reset\n");
 
1384		return false;
1385	}
1386
1387	if (!AR_SREV_9100(ah))
1388		REG_WRITE(ah, AR_RC, 0);
1389
1390	if (AR_SREV_9100(ah))
1391		udelay(50);
1392
1393	return true;
1394}
1395
1396static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1397{
1398	ENABLE_REGWRITE_BUFFER(ah);
1399
1400	if (AR_SREV_9300_20_OR_LATER(ah)) {
1401		REG_WRITE(ah, AR_WA, ah->WARegVal);
1402		udelay(10);
1403	}
1404
1405	REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1406		  AR_RTC_FORCE_WAKE_ON_INT);
1407
1408	if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
1409		REG_WRITE(ah, AR_RC, AR_RC_AHB);
1410
1411	REG_WRITE(ah, AR_RTC_RESET, 0);
1412
1413	REGWRITE_BUFFER_FLUSH(ah);
1414
1415	if (!AR_SREV_9300_20_OR_LATER(ah))
1416		udelay(2);
1417
1418	if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
1419		REG_WRITE(ah, AR_RC, 0);
1420
1421	REG_WRITE(ah, AR_RTC_RESET, 1);
1422
1423	if (!ath9k_hw_wait(ah,
1424			   AR_RTC_STATUS,
1425			   AR_RTC_STATUS_M,
1426			   AR_RTC_STATUS_ON,
1427			   AH_WAIT_TIMEOUT)) {
1428		ath_dbg(ath9k_hw_common(ah), RESET, "RTC not waking up\n");
 
1429		return false;
1430	}
1431
1432	return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1433}
1434
1435static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1436{
1437	bool ret = false;
1438
1439	if (AR_SREV_9300_20_OR_LATER(ah)) {
1440		REG_WRITE(ah, AR_WA, ah->WARegVal);
1441		udelay(10);
1442	}
1443
1444	REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1445		  AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1446
1447	switch (type) {
1448	case ATH9K_RESET_POWER_ON:
1449		ret = ath9k_hw_set_reset_power_on(ah);
1450		break;
1451	case ATH9K_RESET_WARM:
1452	case ATH9K_RESET_COLD:
1453		ret = ath9k_hw_set_reset(ah, type);
1454		break;
1455	default:
1456		break;
1457	}
1458
1459	if (ah->caps.hw_caps & ATH9K_HW_CAP_MCI)
1460		REG_WRITE(ah, AR_RTC_KEEP_AWAKE, 0x2);
1461
1462	return ret;
1463}
1464
1465static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1466				struct ath9k_channel *chan)
1467{
1468	int reset_type = ATH9K_RESET_WARM;
1469
1470	if (AR_SREV_9280(ah)) {
1471		if (ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
1472			reset_type = ATH9K_RESET_POWER_ON;
1473		else
1474			reset_type = ATH9K_RESET_COLD;
1475	}
1476
1477	if (!ath9k_hw_set_reset_reg(ah, reset_type))
1478		return false;
1479
1480	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1481		return false;
1482
1483	ah->chip_fullsleep = false;
1484
1485	if (AR_SREV_9330(ah))
1486		ar9003_hw_internal_regulator_apply(ah);
1487	ath9k_hw_init_pll(ah, chan);
1488	ath9k_hw_set_rfmode(ah, chan);
1489
1490	return true;
1491}
1492
1493static bool ath9k_hw_channel_change(struct ath_hw *ah,
1494				    struct ath9k_channel *chan)
1495{
 
1496	struct ath_common *common = ath9k_hw_common(ah);
 
1497	u32 qnum;
1498	int r;
1499	bool edma = !!(ah->caps.hw_caps & ATH9K_HW_CAP_EDMA);
1500	bool band_switch, mode_diff;
1501	u8 ini_reloaded;
1502
1503	band_switch = (chan->channelFlags & (CHANNEL_2GHZ | CHANNEL_5GHZ)) !=
1504		      (ah->curchan->channelFlags & (CHANNEL_2GHZ |
1505						    CHANNEL_5GHZ));
1506	mode_diff = (chan->chanmode != ah->curchan->chanmode);
1507
1508	for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1509		if (ath9k_hw_numtxpending(ah, qnum)) {
1510			ath_dbg(common, QUEUE,
1511				"Transmit frames pending on queue %d\n", qnum);
1512			return false;
1513		}
1514	}
1515
1516	if (!ath9k_hw_rfbus_req(ah)) {
1517		ath_err(common, "Could not kill baseband RX\n");
1518		return false;
1519	}
1520
1521	if (edma && (band_switch || mode_diff)) {
1522		ath9k_hw_mark_phy_inactive(ah);
1523		udelay(5);
1524
1525		ath9k_hw_init_pll(ah, NULL);
1526
1527		if (ath9k_hw_fast_chan_change(ah, chan, &ini_reloaded)) {
1528			ath_err(common, "Failed to do fast channel change\n");
1529			return false;
1530		}
1531	}
1532
1533	ath9k_hw_set_channel_regs(ah, chan);
1534
1535	r = ath9k_hw_rf_set_freq(ah, chan);
1536	if (r) {
1537		ath_err(common, "Failed to set channel\n");
1538		return false;
1539	}
1540	ath9k_hw_set_clockrate(ah);
1541	ath9k_hw_apply_txpower(ah, chan, false);
 
 
 
 
 
 
 
1542	ath9k_hw_rfbus_done(ah);
1543
1544	if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1545		ath9k_hw_set_delta_slope(ah, chan);
1546
1547	ath9k_hw_spur_mitigate_freq(ah, chan);
1548
1549	if (edma && (band_switch || mode_diff)) {
1550		ah->ah_flags |= AH_FASTCC;
1551		if (band_switch || ini_reloaded)
1552			ah->eep_ops->set_board_values(ah, chan);
1553
1554		ath9k_hw_init_bb(ah, chan);
1555
1556		if (band_switch || ini_reloaded)
1557			ath9k_hw_init_cal(ah, chan);
1558		ah->ah_flags &= ~AH_FASTCC;
1559	}
1560
1561	return true;
1562}
1563
1564static void ath9k_hw_apply_gpio_override(struct ath_hw *ah)
1565{
1566	u32 gpio_mask = ah->gpio_mask;
1567	int i;
1568
1569	for (i = 0; gpio_mask; i++, gpio_mask >>= 1) {
1570		if (!(gpio_mask & 1))
1571			continue;
1572
1573		ath9k_hw_cfg_output(ah, i, AR_GPIO_OUTPUT_MUX_AS_OUTPUT);
1574		ath9k_hw_set_gpio(ah, i, !!(ah->gpio_val & BIT(i)));
1575	}
1576}
1577
1578static bool ath9k_hw_check_dcs(u32 dma_dbg, u32 num_dcu_states,
1579			       int *hang_state, int *hang_pos)
1580{
1581	static u32 dcu_chain_state[] = {5, 6, 9}; /* DCU chain stuck states */
1582	u32 chain_state, dcs_pos, i;
1583
1584	for (dcs_pos = 0; dcs_pos < num_dcu_states; dcs_pos++) {
1585		chain_state = (dma_dbg >> (5 * dcs_pos)) & 0x1f;
1586		for (i = 0; i < 3; i++) {
1587			if (chain_state == dcu_chain_state[i]) {
1588				*hang_state = chain_state;
1589				*hang_pos = dcs_pos;
1590				return true;
1591			}
1592		}
1593	}
1594	return false;
1595}
1596
1597#define DCU_COMPLETE_STATE        1
1598#define DCU_COMPLETE_STATE_MASK 0x3
1599#define NUM_STATUS_READS         50
1600static bool ath9k_hw_detect_mac_hang(struct ath_hw *ah)
1601{
1602	u32 chain_state, comp_state, dcs_reg = AR_DMADBG_4;
1603	u32 i, hang_pos, hang_state, num_state = 6;
1604
1605	comp_state = REG_READ(ah, AR_DMADBG_6);
1606
1607	if ((comp_state & DCU_COMPLETE_STATE_MASK) != DCU_COMPLETE_STATE) {
1608		ath_dbg(ath9k_hw_common(ah), RESET,
1609			"MAC Hang signature not found at DCU complete\n");
1610		return false;
1611	}
1612
1613	chain_state = REG_READ(ah, dcs_reg);
1614	if (ath9k_hw_check_dcs(chain_state, num_state, &hang_state, &hang_pos))
1615		goto hang_check_iter;
1616
1617	dcs_reg = AR_DMADBG_5;
1618	num_state = 4;
1619	chain_state = REG_READ(ah, dcs_reg);
1620	if (ath9k_hw_check_dcs(chain_state, num_state, &hang_state, &hang_pos))
1621		goto hang_check_iter;
1622
1623	ath_dbg(ath9k_hw_common(ah), RESET,
1624		"MAC Hang signature 1 not found\n");
1625	return false;
1626
1627hang_check_iter:
1628	ath_dbg(ath9k_hw_common(ah), RESET,
1629		"DCU registers: chain %08x complete %08x Hang: state %d pos %d\n",
1630		chain_state, comp_state, hang_state, hang_pos);
1631
1632	for (i = 0; i < NUM_STATUS_READS; i++) {
1633		chain_state = REG_READ(ah, dcs_reg);
1634		chain_state = (chain_state >> (5 * hang_pos)) & 0x1f;
1635		comp_state = REG_READ(ah, AR_DMADBG_6);
1636
1637		if (((comp_state & DCU_COMPLETE_STATE_MASK) !=
1638					DCU_COMPLETE_STATE) ||
1639		    (chain_state != hang_state))
1640			return false;
1641	}
1642
1643	ath_dbg(ath9k_hw_common(ah), RESET, "MAC Hang signature 1 found\n");
1644
1645	return true;
1646}
1647
1648bool ath9k_hw_check_alive(struct ath_hw *ah)
1649{
1650	int count = 50;
1651	u32 reg;
1652
1653	if (AR_SREV_9300(ah))
1654		return !ath9k_hw_detect_mac_hang(ah);
1655
1656	if (AR_SREV_9285_12_OR_LATER(ah))
1657		return true;
1658
1659	do {
1660		reg = REG_READ(ah, AR_OBS_BUS_1);
1661
1662		if ((reg & 0x7E7FFFEF) == 0x00702400)
1663			continue;
1664
1665		switch (reg & 0x7E000B00) {
1666		case 0x1E000000:
1667		case 0x52000B00:
1668		case 0x18000B00:
1669			continue;
1670		default:
1671			return true;
1672		}
1673	} while (count-- > 0);
1674
1675	return false;
1676}
1677EXPORT_SYMBOL(ath9k_hw_check_alive);
1678
1679/*
1680 * Fast channel change:
1681 * (Change synthesizer based on channel freq without resetting chip)
1682 *
1683 * Don't do FCC when
1684 *   - Flag is not set
1685 *   - Chip is just coming out of full sleep
1686 *   - Channel to be set is same as current channel
1687 *   - Channel flags are different, (eg.,moving from 2GHz to 5GHz channel)
1688 */
1689static int ath9k_hw_do_fastcc(struct ath_hw *ah, struct ath9k_channel *chan)
1690{
1691	struct ath_common *common = ath9k_hw_common(ah);
1692	int ret;
1693
1694	if (AR_SREV_9280(ah) && common->bus_ops->ath_bus_type == ATH_PCI)
1695		goto fail;
1696
1697	if (ah->chip_fullsleep)
1698		goto fail;
1699
1700	if (!ah->curchan)
1701		goto fail;
1702
1703	if (chan->channel == ah->curchan->channel)
1704		goto fail;
1705
1706	if ((ah->curchan->channelFlags | chan->channelFlags) &
1707	    (CHANNEL_HALF | CHANNEL_QUARTER))
1708		goto fail;
1709
1710	if ((chan->channelFlags & CHANNEL_ALL) !=
1711	    (ah->curchan->channelFlags & CHANNEL_ALL))
1712		goto fail;
1713
1714	if (!ath9k_hw_check_alive(ah))
1715		goto fail;
1716
1717	/*
1718	 * For AR9462, make sure that calibration data for
1719	 * re-using are present.
1720	 */
1721	if (AR_SREV_9462(ah) && (ah->caldata &&
1722				 (!ah->caldata->done_txiqcal_once ||
1723				  !ah->caldata->done_txclcal_once ||
1724				  !ah->caldata->rtt_done)))
1725		goto fail;
1726
1727	ath_dbg(common, RESET, "FastChannelChange for %d -> %d\n",
1728		ah->curchan->channel, chan->channel);
1729
1730	ret = ath9k_hw_channel_change(ah, chan);
1731	if (!ret)
1732		goto fail;
1733
1734	ath9k_hw_loadnf(ah, ah->curchan);
1735	ath9k_hw_start_nfcal(ah, true);
1736
1737	if ((ah->caps.hw_caps & ATH9K_HW_CAP_MCI) && ar9003_mci_is_ready(ah))
1738		ar9003_mci_2g5g_switch(ah, true);
1739
1740	if (AR_SREV_9271(ah))
1741		ar9002_hw_load_ani_reg(ah, chan);
1742
1743	return 0;
1744fail:
1745	return -EINVAL;
1746}
1747
1748int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
1749		   struct ath9k_hw_cal_data *caldata, bool fastcc)
1750{
1751	struct ath_common *common = ath9k_hw_common(ah);
1752	u32 saveLedState;
 
1753	u32 saveDefAntenna;
1754	u32 macStaId1;
1755	u64 tsf = 0;
1756	int i, r;
1757	bool start_mci_reset = false;
1758	bool mci = !!(ah->caps.hw_caps & ATH9K_HW_CAP_MCI);
1759	bool save_fullsleep = ah->chip_fullsleep;
1760
1761	if (mci) {
1762		start_mci_reset = ar9003_mci_start_reset(ah, chan);
1763		if (start_mci_reset)
1764			return 0;
1765	}
1766
1767	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1768		return -EIO;
1769
1770	if (ah->curchan && !ah->chip_fullsleep)
1771		ath9k_hw_getnf(ah, ah->curchan);
1772
1773	ah->caldata = caldata;
1774	if (caldata &&
1775	    (chan->channel != caldata->channel ||
1776	     (chan->channelFlags & ~CHANNEL_CW_INT) !=
1777	     (caldata->channelFlags & ~CHANNEL_CW_INT))) {
1778		/* Operating channel changed, reset channel calibration data */
1779		memset(caldata, 0, sizeof(*caldata));
1780		ath9k_init_nfcal_hist_buffer(ah, chan);
1781	}
1782	ah->noise = ath9k_hw_getchan_noise(ah, chan);
1783
1784	if (fastcc) {
1785		r = ath9k_hw_do_fastcc(ah, chan);
1786		if (!r)
1787			return r;
 
 
 
 
 
 
 
 
 
 
 
1788	}
1789
1790	if (mci)
1791		ar9003_mci_stop_bt(ah, save_fullsleep);
1792
1793	saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
1794	if (saveDefAntenna == 0)
1795		saveDefAntenna = 1;
1796
1797	macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
1798
1799	/* For chips on which RTC reset is done, save TSF before it gets cleared */
1800	if (AR_SREV_9100(ah) ||
1801	    (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)))
1802		tsf = ath9k_hw_gettsf64(ah);
1803
1804	saveLedState = REG_READ(ah, AR_CFG_LED) &
1805		(AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
1806		 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
1807
1808	ath9k_hw_mark_phy_inactive(ah);
1809
1810	ah->paprd_table_write_done = false;
1811
1812	/* Only required on the first reset */
1813	if (AR_SREV_9271(ah) && ah->htc_reset_init) {
1814		REG_WRITE(ah,
1815			  AR9271_RESET_POWER_DOWN_CONTROL,
1816			  AR9271_RADIO_RF_RST);
1817		udelay(50);
1818	}
1819
1820	if (!ath9k_hw_chip_reset(ah, chan)) {
1821		ath_err(common, "Chip reset failed\n");
1822		return -EINVAL;
1823	}
1824
1825	/* Only required on the first reset */
1826	if (AR_SREV_9271(ah) && ah->htc_reset_init) {
1827		ah->htc_reset_init = false;
1828		REG_WRITE(ah,
1829			  AR9271_RESET_POWER_DOWN_CONTROL,
1830			  AR9271_GATE_MAC_CTL);
1831		udelay(50);
1832	}
1833
1834	/* Restore TSF */
1835	if (tsf)
1836		ath9k_hw_settsf64(ah, tsf);
1837
1838	if (AR_SREV_9280_20_OR_LATER(ah))
1839		REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
1840
1841	if (!AR_SREV_9300_20_OR_LATER(ah))
1842		ar9002_hw_enable_async_fifo(ah);
1843
1844	r = ath9k_hw_process_ini(ah, chan);
1845	if (r)
1846		return r;
1847
1848	if (mci)
1849		ar9003_mci_reset(ah, false, IS_CHAN_2GHZ(chan), save_fullsleep);
1850
1851	/*
1852	 * Some AR91xx SoC devices frequently fail to accept TSF writes
1853	 * right after the chip reset. When that happens, write a new
1854	 * value after the initvals have been applied, with an offset
1855	 * based on measured time difference
1856	 */
1857	if (AR_SREV_9100(ah) && (ath9k_hw_gettsf64(ah) < tsf)) {
1858		tsf += 1500;
1859		ath9k_hw_settsf64(ah, tsf);
1860	}
1861
1862	/* Setup MFP options for CCMP */
1863	if (AR_SREV_9280_20_OR_LATER(ah)) {
1864		/* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
1865		 * frames when constructing CCMP AAD. */
1866		REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
1867			      0xc7ff);
1868		ah->sw_mgmt_crypto = false;
1869	} else if (AR_SREV_9160_10_OR_LATER(ah)) {
1870		/* Disable hardware crypto for management frames */
1871		REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
1872			    AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
1873		REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
1874			    AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
1875		ah->sw_mgmt_crypto = true;
1876	} else
1877		ah->sw_mgmt_crypto = true;
1878
1879	if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1880		ath9k_hw_set_delta_slope(ah, chan);
1881
1882	ath9k_hw_spur_mitigate_freq(ah, chan);
1883	ah->eep_ops->set_board_values(ah, chan);
1884
1885	ENABLE_REGWRITE_BUFFER(ah);
1886
1887	REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(common->macaddr));
1888	REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(common->macaddr + 4)
1889		  | macStaId1
1890		  | AR_STA_ID1_RTS_USE_DEF
1891		  | (ah->config.
1892		     ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
1893		  | ah->sta_id1_defaults);
1894	ath_hw_setbssidmask(common);
1895	REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
1896	ath9k_hw_write_associd(ah);
1897	REG_WRITE(ah, AR_ISR, ~0);
1898	REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
1899
1900	REGWRITE_BUFFER_FLUSH(ah);
1901
1902	ath9k_hw_set_operating_mode(ah, ah->opmode);
1903
1904	r = ath9k_hw_rf_set_freq(ah, chan);
1905	if (r)
1906		return r;
1907
1908	ath9k_hw_set_clockrate(ah);
1909
1910	ENABLE_REGWRITE_BUFFER(ah);
1911
1912	for (i = 0; i < AR_NUM_DCU; i++)
1913		REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
1914
1915	REGWRITE_BUFFER_FLUSH(ah);
1916
1917	ah->intr_txqs = 0;
1918	for (i = 0; i < ATH9K_NUM_TX_QUEUES; i++)
1919		ath9k_hw_resettxqueue(ah, i);
1920
1921	ath9k_hw_init_interrupt_masks(ah, ah->opmode);
1922	ath9k_hw_ani_cache_ini_regs(ah);
1923	ath9k_hw_init_qos(ah);
1924
1925	if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
1926		ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
1927
1928	ath9k_hw_init_global_settings(ah);
1929
1930	if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah)) {
1931		REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
1932			    AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
1933		REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
1934			      AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
1935		REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
1936			    AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
1937	}
1938
1939	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PRESERVE_SEQNUM);
1940
1941	ath9k_hw_set_dma(ah);
1942
1943	REG_WRITE(ah, AR_OBS, 8);
1944
1945	if (ah->config.rx_intr_mitigation) {
1946		REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
1947		REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
1948	}
1949
1950	if (ah->config.tx_intr_mitigation) {
1951		REG_RMW_FIELD(ah, AR_TIMT, AR_TIMT_LAST, 300);
1952		REG_RMW_FIELD(ah, AR_TIMT, AR_TIMT_FIRST, 750);
1953	}
1954
1955	ath9k_hw_init_bb(ah, chan);
1956
1957	if (caldata) {
1958		caldata->done_txiqcal_once = false;
1959		caldata->done_txclcal_once = false;
1960	}
1961	if (!ath9k_hw_init_cal(ah, chan))
1962		return -EIO;
1963
1964	ath9k_hw_loadnf(ah, chan);
1965	ath9k_hw_start_nfcal(ah, true);
1966
1967	if (mci && ar9003_mci_end_reset(ah, chan, caldata))
1968		return -EIO;
1969
1970	ENABLE_REGWRITE_BUFFER(ah);
1971
1972	ath9k_hw_restore_chainmask(ah);
1973	REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
1974
1975	REGWRITE_BUFFER_FLUSH(ah);
1976
1977	/*
1978	 * For big endian systems turn on swapping for descriptors
1979	 */
1980	if (AR_SREV_9100(ah)) {
1981		u32 mask;
1982		mask = REG_READ(ah, AR_CFG);
1983		if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
1984			ath_dbg(common, RESET, "CFG Byte Swap Set 0x%x\n",
1985				mask);
1986		} else {
1987			mask =
1988				INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
1989			REG_WRITE(ah, AR_CFG, mask);
1990			ath_dbg(common, RESET, "Setting CFG 0x%x\n",
1991				REG_READ(ah, AR_CFG));
1992		}
1993	} else {
1994		if (common->bus_ops->ath_bus_type == ATH_USB) {
1995			/* Configure AR9271 target WLAN */
1996			if (AR_SREV_9271(ah))
1997				REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
1998			else
1999				REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2000		}
2001#ifdef __BIG_ENDIAN
2002		else if (AR_SREV_9330(ah) || AR_SREV_9340(ah))
2003			REG_RMW(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB, 0);
2004		else
2005			REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2006#endif
2007	}
2008
2009	if (ath9k_hw_btcoex_is_enabled(ah))
2010		ath9k_hw_btcoex_enable(ah);
2011
2012	if (mci)
2013		ar9003_mci_check_bt(ah);
2014
2015	if (AR_SREV_9300_20_OR_LATER(ah)) {
2016		ar9003_hw_bb_watchdog_config(ah);
2017
2018		ar9003_hw_disable_phy_restart(ah);
2019	}
2020
2021	ath9k_hw_apply_gpio_override(ah);
2022
2023	return 0;
2024}
2025EXPORT_SYMBOL(ath9k_hw_reset);
2026
2027/******************************/
2028/* Power Management (Chipset) */
2029/******************************/
2030
2031/*
2032 * Notify Power Mgt is disabled in self-generated frames.
2033 * If requested, force chip to sleep.
2034 */
2035static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2036{
2037	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2038	if (setChip) {
2039		if (AR_SREV_9462(ah)) {
2040			REG_WRITE(ah, AR_TIMER_MODE,
2041				  REG_READ(ah, AR_TIMER_MODE) & 0xFFFFFF00);
2042			REG_WRITE(ah, AR_NDP2_TIMER_MODE, REG_READ(ah,
2043				  AR_NDP2_TIMER_MODE) & 0xFFFFFF00);
2044			REG_WRITE(ah, AR_SLP32_INC,
2045				  REG_READ(ah, AR_SLP32_INC) & 0xFFF00000);
2046			/* xxx Required for WLAN only case ? */
2047			REG_WRITE(ah, AR_MCI_INTERRUPT_RX_MSG_EN, 0);
2048			udelay(100);
2049		}
2050
2051		/*
2052		 * Clear the RTC force wake bit to allow the
2053		 * mac to go to sleep.
2054		 */
2055		REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN);
2056
2057		if (AR_SREV_9462(ah))
2058			udelay(100);
2059
2060		if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
2061			REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2062
2063		/* Shutdown chip. Active low */
2064		if (!AR_SREV_5416(ah) && !AR_SREV_9271(ah)) {
2065			REG_CLR_BIT(ah, AR_RTC_RESET, AR_RTC_RESET_EN);
2066			udelay(2);
2067		}
2068	}
2069
2070	/* Clear Bit 14 of AR_WA after putting chip into Full Sleep mode. */
2071	if (AR_SREV_9300_20_OR_LATER(ah))
2072		REG_WRITE(ah, AR_WA, ah->WARegVal & ~AR_WA_D3_L1_DISABLE);
 
2073}
2074
2075/*
2076 * Notify Power Management is enabled in self-generating
2077 * frames. If request, set power mode of chip to
2078 * auto/normal.  Duration in units of 128us (1/8 TU).
2079 */
2080static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2081{
2082	u32 val;
2083
2084	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2085	if (setChip) {
2086		struct ath9k_hw_capabilities *pCap = &ah->caps;
2087
2088		if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2089			/* Set WakeOnInterrupt bit; clear ForceWake bit */
2090			REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2091				  AR_RTC_FORCE_WAKE_ON_INT);
2092		} else {
2093
2094			/* When chip goes into network sleep, it could be waken
2095			 * up by MCI_INT interrupt caused by BT's HW messages
2096			 * (LNA_xxx, CONT_xxx) which chould be in a very fast
2097			 * rate (~100us). This will cause chip to leave and
2098			 * re-enter network sleep mode frequently, which in
2099			 * consequence will have WLAN MCI HW to generate lots of
2100			 * SYS_WAKING and SYS_SLEEPING messages which will make
2101			 * BT CPU to busy to process.
2102			 */
2103			if (AR_SREV_9462(ah)) {
2104				val = REG_READ(ah, AR_MCI_INTERRUPT_RX_MSG_EN) &
2105					~AR_MCI_INTERRUPT_RX_HW_MSG_MASK;
2106				REG_WRITE(ah, AR_MCI_INTERRUPT_RX_MSG_EN, val);
2107			}
2108			/*
2109			 * Clear the RTC force wake bit to allow the
2110			 * mac to go to sleep.
2111			 */
2112			REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2113				    AR_RTC_FORCE_WAKE_EN);
2114
2115			if (AR_SREV_9462(ah))
2116				udelay(30);
2117		}
2118	}
2119
2120	/* Clear Bit 14 of AR_WA after putting chip into Net Sleep mode. */
2121	if (AR_SREV_9300_20_OR_LATER(ah))
2122		REG_WRITE(ah, AR_WA, ah->WARegVal & ~AR_WA_D3_L1_DISABLE);
2123}
2124
2125static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2126{
2127	u32 val;
2128	int i;
2129
2130	/* Set Bits 14 and 17 of AR_WA before powering on the chip. */
2131	if (AR_SREV_9300_20_OR_LATER(ah)) {
2132		REG_WRITE(ah, AR_WA, ah->WARegVal);
2133		udelay(10);
2134	}
2135
2136	if (setChip) {
2137		if ((REG_READ(ah, AR_RTC_STATUS) &
2138		     AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2139			if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
 
2140				return false;
2141			}
2142			if (!AR_SREV_9300_20_OR_LATER(ah))
2143				ath9k_hw_init_pll(ah, NULL);
2144		}
2145		if (AR_SREV_9100(ah))
2146			REG_SET_BIT(ah, AR_RTC_RESET,
2147				    AR_RTC_RESET_EN);
2148
2149		REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2150			    AR_RTC_FORCE_WAKE_EN);
2151		udelay(50);
2152
2153		for (i = POWER_UP_TIME / 50; i > 0; i--) {
2154			val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2155			if (val == AR_RTC_STATUS_ON)
2156				break;
2157			udelay(50);
2158			REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2159				    AR_RTC_FORCE_WAKE_EN);
2160		}
2161		if (i == 0) {
2162			ath_err(ath9k_hw_common(ah),
2163				"Failed to wakeup in %uus\n",
2164				POWER_UP_TIME / 20);
2165			return false;
2166		}
2167	}
2168
2169	REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2170
2171	return true;
2172}
2173
2174bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2175{
2176	struct ath_common *common = ath9k_hw_common(ah);
2177	int status = true, setChip = true;
2178	static const char *modes[] = {
2179		"AWAKE",
2180		"FULL-SLEEP",
2181		"NETWORK SLEEP",
2182		"UNDEFINED"
2183	};
2184
2185	if (ah->power_mode == mode)
2186		return status;
2187
2188	ath_dbg(common, RESET, "%s -> %s\n",
2189		modes[ah->power_mode], modes[mode]);
2190
2191	switch (mode) {
2192	case ATH9K_PM_AWAKE:
2193		status = ath9k_hw_set_power_awake(ah, setChip);
2194
2195		if (ah->caps.hw_caps & ATH9K_HW_CAP_MCI)
2196			REG_WRITE(ah, AR_RTC_KEEP_AWAKE, 0x2);
2197
2198		break;
2199	case ATH9K_PM_FULL_SLEEP:
2200		if (ah->caps.hw_caps & ATH9K_HW_CAP_MCI)
2201			ar9003_mci_set_full_sleep(ah);
2202
2203		ath9k_set_power_sleep(ah, setChip);
2204		ah->chip_fullsleep = true;
2205		break;
2206	case ATH9K_PM_NETWORK_SLEEP:
2207
2208		if (ah->caps.hw_caps & ATH9K_HW_CAP_MCI)
2209			REG_WRITE(ah, AR_RTC_KEEP_AWAKE, 0x2);
2210
2211		ath9k_set_power_network_sleep(ah, setChip);
2212		break;
2213	default:
2214		ath_err(common, "Unknown power mode %u\n", mode);
2215		return false;
2216	}
2217	ah->power_mode = mode;
2218
2219	/*
2220	 * XXX: If this warning never comes up after a while then
2221	 * simply keep the ATH_DBG_WARN_ON_ONCE() but make
2222	 * ath9k_hw_setpower() return type void.
2223	 */
2224
2225	if (!(ah->ah_flags & AH_UNPLUGGED))
2226		ATH_DBG_WARN_ON_ONCE(!status);
2227
2228	return status;
2229}
2230EXPORT_SYMBOL(ath9k_hw_setpower);
2231
2232/*******************/
2233/* Beacon Handling */
2234/*******************/
2235
2236void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
2237{
2238	int flags = 0;
2239
2240	ENABLE_REGWRITE_BUFFER(ah);
2241
2242	switch (ah->opmode) {
2243	case NL80211_IFTYPE_ADHOC:
2244	case NL80211_IFTYPE_MESH_POINT:
2245		REG_SET_BIT(ah, AR_TXCFG,
2246			    AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
2247		REG_WRITE(ah, AR_NEXT_NDP_TIMER, next_beacon +
2248			  TU_TO_USEC(ah->atim_window ? ah->atim_window : 1));
2249		flags |= AR_NDP_TIMER_EN;
2250	case NL80211_IFTYPE_AP:
2251		REG_WRITE(ah, AR_NEXT_TBTT_TIMER, next_beacon);
2252		REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, next_beacon -
2253			  TU_TO_USEC(ah->config.dma_beacon_response_time));
2254		REG_WRITE(ah, AR_NEXT_SWBA, next_beacon -
2255			  TU_TO_USEC(ah->config.sw_beacon_response_time));
2256		flags |=
2257			AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
2258		break;
2259	default:
2260		ath_dbg(ath9k_hw_common(ah), BEACON,
2261			"%s: unsupported opmode: %d\n", __func__, ah->opmode);
 
2262		return;
2263		break;
2264	}
2265
2266	REG_WRITE(ah, AR_BEACON_PERIOD, beacon_period);
2267	REG_WRITE(ah, AR_DMA_BEACON_PERIOD, beacon_period);
2268	REG_WRITE(ah, AR_SWBA_PERIOD, beacon_period);
2269	REG_WRITE(ah, AR_NDP_PERIOD, beacon_period);
2270
2271	REGWRITE_BUFFER_FLUSH(ah);
2272
2273	REG_SET_BIT(ah, AR_TIMER_MODE, flags);
2274}
2275EXPORT_SYMBOL(ath9k_hw_beaconinit);
2276
2277void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
2278				    const struct ath9k_beacon_state *bs)
2279{
2280	u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
2281	struct ath9k_hw_capabilities *pCap = &ah->caps;
2282	struct ath_common *common = ath9k_hw_common(ah);
2283
2284	ENABLE_REGWRITE_BUFFER(ah);
2285
2286	REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
2287
2288	REG_WRITE(ah, AR_BEACON_PERIOD,
2289		  TU_TO_USEC(bs->bs_intval));
2290	REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
2291		  TU_TO_USEC(bs->bs_intval));
2292
2293	REGWRITE_BUFFER_FLUSH(ah);
2294
2295	REG_RMW_FIELD(ah, AR_RSSI_THR,
2296		      AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
2297
2298	beaconintval = bs->bs_intval;
2299
2300	if (bs->bs_sleepduration > beaconintval)
2301		beaconintval = bs->bs_sleepduration;
2302
2303	dtimperiod = bs->bs_dtimperiod;
2304	if (bs->bs_sleepduration > dtimperiod)
2305		dtimperiod = bs->bs_sleepduration;
2306
2307	if (beaconintval == dtimperiod)
2308		nextTbtt = bs->bs_nextdtim;
2309	else
2310		nextTbtt = bs->bs_nexttbtt;
2311
2312	ath_dbg(common, BEACON, "next DTIM %d\n", bs->bs_nextdtim);
2313	ath_dbg(common, BEACON, "next beacon %d\n", nextTbtt);
2314	ath_dbg(common, BEACON, "beacon period %d\n", beaconintval);
2315	ath_dbg(common, BEACON, "DTIM period %d\n", dtimperiod);
2316
2317	ENABLE_REGWRITE_BUFFER(ah);
2318
2319	REG_WRITE(ah, AR_NEXT_DTIM,
2320		  TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
2321	REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
2322
2323	REG_WRITE(ah, AR_SLEEP1,
2324		  SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
2325		  | AR_SLEEP1_ASSUME_DTIM);
2326
2327	if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
2328		beacontimeout = (BEACON_TIMEOUT_VAL << 3);
2329	else
2330		beacontimeout = MIN_BEACON_TIMEOUT_VAL;
2331
2332	REG_WRITE(ah, AR_SLEEP2,
2333		  SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
2334
2335	REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
2336	REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
2337
2338	REGWRITE_BUFFER_FLUSH(ah);
2339
2340	REG_SET_BIT(ah, AR_TIMER_MODE,
2341		    AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
2342		    AR_DTIM_TIMER_EN);
2343
2344	/* TSF Out of Range Threshold */
2345	REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
2346}
2347EXPORT_SYMBOL(ath9k_hw_set_sta_beacon_timers);
2348
2349/*******************/
2350/* HW Capabilities */
2351/*******************/
2352
2353static u8 fixup_chainmask(u8 chip_chainmask, u8 eeprom_chainmask)
2354{
2355	eeprom_chainmask &= chip_chainmask;
2356	if (eeprom_chainmask)
2357		return eeprom_chainmask;
2358	else
2359		return chip_chainmask;
2360}
2361
2362/**
2363 * ath9k_hw_dfs_tested - checks if DFS has been tested with used chipset
2364 * @ah: the atheros hardware data structure
2365 *
2366 * We enable DFS support upstream on chipsets which have passed a series
2367 * of tests. The testing requirements are going to be documented. Desired
2368 * test requirements are documented at:
2369 *
2370 * http://wireless.kernel.org/en/users/Drivers/ath9k/dfs
2371 *
2372 * Once a new chipset gets properly tested an individual commit can be used
2373 * to document the testing for DFS for that chipset.
2374 */
2375static bool ath9k_hw_dfs_tested(struct ath_hw *ah)
2376{
2377
2378	switch (ah->hw_version.macVersion) {
2379	/* AR9580 will likely be our first target to get testing on */
2380	case AR_SREV_VERSION_9580:
2381	default:
2382		return false;
2383	}
2384}
2385
2386int ath9k_hw_fill_cap_info(struct ath_hw *ah)
2387{
2388	struct ath9k_hw_capabilities *pCap = &ah->caps;
2389	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
2390	struct ath_common *common = ath9k_hw_common(ah);
 
2391	unsigned int chip_chainmask;
2392
2393	u16 eeval;
2394	u8 ant_div_ctl1, tx_chainmask, rx_chainmask;
2395
2396	eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
2397	regulatory->current_rd = eeval;
2398
 
 
 
 
 
2399	if (ah->opmode != NL80211_IFTYPE_AP &&
2400	    ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
2401		if (regulatory->current_rd == 0x64 ||
2402		    regulatory->current_rd == 0x65)
2403			regulatory->current_rd += 5;
2404		else if (regulatory->current_rd == 0x41)
2405			regulatory->current_rd = 0x43;
2406		ath_dbg(common, REGULATORY, "regdomain mapped to 0x%x\n",
2407			regulatory->current_rd);
2408	}
2409
2410	eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
2411	if ((eeval & (AR5416_OPFLAGS_11G | AR5416_OPFLAGS_11A)) == 0) {
2412		ath_err(common,
2413			"no band has been marked as supported in EEPROM\n");
2414		return -EINVAL;
2415	}
2416
2417	if (eeval & AR5416_OPFLAGS_11A)
2418		pCap->hw_caps |= ATH9K_HW_CAP_5GHZ;
2419
2420	if (eeval & AR5416_OPFLAGS_11G)
2421		pCap->hw_caps |= ATH9K_HW_CAP_2GHZ;
2422
2423	if (AR_SREV_9485(ah) || AR_SREV_9285(ah) || AR_SREV_9330(ah))
2424		chip_chainmask = 1;
2425	else if (AR_SREV_9462(ah))
2426		chip_chainmask = 3;
2427	else if (!AR_SREV_9280_20_OR_LATER(ah))
2428		chip_chainmask = 7;
2429	else if (!AR_SREV_9300_20_OR_LATER(ah) || AR_SREV_9340(ah))
2430		chip_chainmask = 3;
2431	else
2432		chip_chainmask = 7;
2433
2434	pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
2435	/*
2436	 * For AR9271 we will temporarilly uses the rx chainmax as read from
2437	 * the EEPROM.
2438	 */
2439	if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
2440	    !(eeval & AR5416_OPFLAGS_11A) &&
2441	    !(AR_SREV_9271(ah)))
2442		/* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
2443		pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
2444	else if (AR_SREV_9100(ah))
2445		pCap->rx_chainmask = 0x7;
2446	else
2447		/* Use rx_chainmask from EEPROM. */
2448		pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
2449
2450	pCap->tx_chainmask = fixup_chainmask(chip_chainmask, pCap->tx_chainmask);
2451	pCap->rx_chainmask = fixup_chainmask(chip_chainmask, pCap->rx_chainmask);
2452	ah->txchainmask = pCap->tx_chainmask;
2453	ah->rxchainmask = pCap->rx_chainmask;
2454
2455	ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
2456
2457	/* enable key search for every frame in an aggregate */
2458	if (AR_SREV_9300_20_OR_LATER(ah))
2459		ah->misc_mode |= AR_PCU_ALWAYS_PERFORM_KEYSEARCH;
2460
2461	common->crypt_caps |= ATH_CRYPT_CAP_CIPHER_AESCCM;
2462
2463	if (ah->hw_version.devid != AR2427_DEVID_PCIE)
2464		pCap->hw_caps |= ATH9K_HW_CAP_HT;
2465	else
2466		pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
2467
2468	if (AR_SREV_9271(ah))
2469		pCap->num_gpio_pins = AR9271_NUM_GPIO;
2470	else if (AR_DEVID_7010(ah))
2471		pCap->num_gpio_pins = AR7010_NUM_GPIO;
2472	else if (AR_SREV_9300_20_OR_LATER(ah))
2473		pCap->num_gpio_pins = AR9300_NUM_GPIO;
2474	else if (AR_SREV_9287_11_OR_LATER(ah))
2475		pCap->num_gpio_pins = AR9287_NUM_GPIO;
2476	else if (AR_SREV_9285_12_OR_LATER(ah))
2477		pCap->num_gpio_pins = AR9285_NUM_GPIO;
2478	else if (AR_SREV_9280_20_OR_LATER(ah))
2479		pCap->num_gpio_pins = AR928X_NUM_GPIO;
2480	else
2481		pCap->num_gpio_pins = AR_NUM_GPIO;
2482
2483	if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah))
 
2484		pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
2485	else
2486		pCap->rts_aggr_limit = (8 * 1024);
 
2487
2488#if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE)
2489	ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
2490	if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
2491		ah->rfkill_gpio =
2492			MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
2493		ah->rfkill_polarity =
2494			MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
2495
2496		pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
2497	}
2498#endif
2499	if (AR_SREV_9271(ah) || AR_SREV_9300_20_OR_LATER(ah))
2500		pCap->hw_caps |= ATH9K_HW_CAP_AUTOSLEEP;
2501	else
2502		pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
2503
2504	if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
2505		pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
2506	else
2507		pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
2508
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2509	if (AR_SREV_9300_20_OR_LATER(ah)) {
2510		pCap->hw_caps |= ATH9K_HW_CAP_EDMA | ATH9K_HW_CAP_FASTCLOCK;
2511		if (!AR_SREV_9330(ah) && !AR_SREV_9485(ah))
2512			pCap->hw_caps |= ATH9K_HW_CAP_LDPC;
2513
2514		pCap->rx_hp_qdepth = ATH9K_HW_RX_HP_QDEPTH;
2515		pCap->rx_lp_qdepth = ATH9K_HW_RX_LP_QDEPTH;
2516		pCap->rx_status_len = sizeof(struct ar9003_rxs);
2517		pCap->tx_desc_len = sizeof(struct ar9003_txc);
2518		pCap->txs_len = sizeof(struct ar9003_txs);
2519		if (!ah->config.paprd_disable &&
2520		    ah->eep_ops->get_eeprom(ah, EEP_PAPRD))
2521			pCap->hw_caps |= ATH9K_HW_CAP_PAPRD;
2522	} else {
2523		pCap->tx_desc_len = sizeof(struct ath_desc);
2524		if (AR_SREV_9280_20(ah))
2525			pCap->hw_caps |= ATH9K_HW_CAP_FASTCLOCK;
2526	}
2527
2528	if (AR_SREV_9300_20_OR_LATER(ah))
2529		pCap->hw_caps |= ATH9K_HW_CAP_RAC_SUPPORTED;
2530
2531	if (AR_SREV_9300_20_OR_LATER(ah))
2532		ah->ent_mode = REG_READ(ah, AR_ENT_OTP);
2533
2534	if (AR_SREV_9287_11_OR_LATER(ah) || AR_SREV_9271(ah))
2535		pCap->hw_caps |= ATH9K_HW_CAP_SGI_20;
2536
2537	if (AR_SREV_9285(ah))
2538		if (ah->eep_ops->get_eeprom(ah, EEP_MODAL_VER) >= 3) {
2539			ant_div_ctl1 =
2540				ah->eep_ops->get_eeprom(ah, EEP_ANT_DIV_CTL1);
2541			if ((ant_div_ctl1 & 0x1) && ((ant_div_ctl1 >> 3) & 0x1))
2542				pCap->hw_caps |= ATH9K_HW_CAP_ANT_DIV_COMB;
2543		}
2544	if (AR_SREV_9300_20_OR_LATER(ah)) {
2545		if (ah->eep_ops->get_eeprom(ah, EEP_CHAIN_MASK_REDUCE))
2546			pCap->hw_caps |= ATH9K_HW_CAP_APM;
2547	}
2548
2549
2550	if (AR_SREV_9330(ah) || AR_SREV_9485(ah)) {
2551		ant_div_ctl1 = ah->eep_ops->get_eeprom(ah, EEP_ANT_DIV_CTL1);
2552		/*
2553		 * enable the diversity-combining algorithm only when
2554		 * both enable_lna_div and enable_fast_div are set
2555		 *		Table for Diversity
2556		 * ant_div_alt_lnaconf		bit 0-1
2557		 * ant_div_main_lnaconf		bit 2-3
2558		 * ant_div_alt_gaintb		bit 4
2559		 * ant_div_main_gaintb		bit 5
2560		 * enable_ant_div_lnadiv	bit 6
2561		 * enable_ant_fast_div		bit 7
2562		 */
2563		if ((ant_div_ctl1 >> 0x6) == 0x3)
2564			pCap->hw_caps |= ATH9K_HW_CAP_ANT_DIV_COMB;
2565	}
2566
2567	if (AR_SREV_9485_10(ah)) {
2568		pCap->pcie_lcr_extsync_en = true;
2569		pCap->pcie_lcr_offset = 0x80;
2570	}
2571
2572	if (ath9k_hw_dfs_tested(ah))
2573		pCap->hw_caps |= ATH9K_HW_CAP_DFS;
2574
2575	tx_chainmask = pCap->tx_chainmask;
2576	rx_chainmask = pCap->rx_chainmask;
2577	while (tx_chainmask || rx_chainmask) {
2578		if (tx_chainmask & BIT(0))
2579			pCap->max_txchains++;
2580		if (rx_chainmask & BIT(0))
2581			pCap->max_rxchains++;
2582
2583		tx_chainmask >>= 1;
2584		rx_chainmask >>= 1;
2585	}
2586
2587	if (AR_SREV_9300_20_OR_LATER(ah)) {
2588		ah->enabled_cals |= TX_IQ_CAL;
2589		if (AR_SREV_9485_OR_LATER(ah))
2590			ah->enabled_cals |= TX_IQ_ON_AGC_CAL;
2591	}
2592
2593	if (AR_SREV_9462(ah)) {
2594
2595		if (!(ah->ent_mode & AR_ENT_OTP_49GHZ_DISABLE))
2596			pCap->hw_caps |= ATH9K_HW_CAP_MCI;
2597
2598		if (AR_SREV_9462_20(ah))
2599			pCap->hw_caps |= ATH9K_HW_CAP_RTT;
2600
2601	}
2602
2603
2604	return 0;
2605}
2606
2607/****************************/
2608/* GPIO / RFKILL / Antennae */
2609/****************************/
2610
2611static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah,
2612					 u32 gpio, u32 type)
2613{
2614	int addr;
2615	u32 gpio_shift, tmp;
2616
2617	if (gpio > 11)
2618		addr = AR_GPIO_OUTPUT_MUX3;
2619	else if (gpio > 5)
2620		addr = AR_GPIO_OUTPUT_MUX2;
2621	else
2622		addr = AR_GPIO_OUTPUT_MUX1;
2623
2624	gpio_shift = (gpio % 6) * 5;
2625
2626	if (AR_SREV_9280_20_OR_LATER(ah)
2627	    || (addr != AR_GPIO_OUTPUT_MUX1)) {
2628		REG_RMW(ah, addr, (type << gpio_shift),
2629			(0x1f << gpio_shift));
2630	} else {
2631		tmp = REG_READ(ah, addr);
2632		tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
2633		tmp &= ~(0x1f << gpio_shift);
2634		tmp |= (type << gpio_shift);
2635		REG_WRITE(ah, addr, tmp);
2636	}
2637}
2638
2639void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio)
2640{
2641	u32 gpio_shift;
2642
2643	BUG_ON(gpio >= ah->caps.num_gpio_pins);
2644
2645	if (AR_DEVID_7010(ah)) {
2646		gpio_shift = gpio;
2647		REG_RMW(ah, AR7010_GPIO_OE,
2648			(AR7010_GPIO_OE_AS_INPUT << gpio_shift),
2649			(AR7010_GPIO_OE_MASK << gpio_shift));
2650		return;
2651	}
2652
2653	gpio_shift = gpio << 1;
2654	REG_RMW(ah,
2655		AR_GPIO_OE_OUT,
2656		(AR_GPIO_OE_OUT_DRV_NO << gpio_shift),
2657		(AR_GPIO_OE_OUT_DRV << gpio_shift));
2658}
2659EXPORT_SYMBOL(ath9k_hw_cfg_gpio_input);
2660
2661u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
2662{
2663#define MS_REG_READ(x, y) \
2664	(MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y)))
2665
2666	if (gpio >= ah->caps.num_gpio_pins)
2667		return 0xffffffff;
2668
2669	if (AR_DEVID_7010(ah)) {
2670		u32 val;
2671		val = REG_READ(ah, AR7010_GPIO_IN);
2672		return (MS(val, AR7010_GPIO_IN_VAL) & AR_GPIO_BIT(gpio)) == 0;
2673	} else if (AR_SREV_9300_20_OR_LATER(ah))
2674		return (MS(REG_READ(ah, AR_GPIO_IN), AR9300_GPIO_IN_VAL) &
2675			AR_GPIO_BIT(gpio)) != 0;
2676	else if (AR_SREV_9271(ah))
2677		return MS_REG_READ(AR9271, gpio) != 0;
2678	else if (AR_SREV_9287_11_OR_LATER(ah))
2679		return MS_REG_READ(AR9287, gpio) != 0;
2680	else if (AR_SREV_9285_12_OR_LATER(ah))
2681		return MS_REG_READ(AR9285, gpio) != 0;
2682	else if (AR_SREV_9280_20_OR_LATER(ah))
2683		return MS_REG_READ(AR928X, gpio) != 0;
2684	else
2685		return MS_REG_READ(AR, gpio) != 0;
2686}
2687EXPORT_SYMBOL(ath9k_hw_gpio_get);
2688
2689void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio,
2690			 u32 ah_signal_type)
2691{
2692	u32 gpio_shift;
2693
2694	if (AR_DEVID_7010(ah)) {
2695		gpio_shift = gpio;
2696		REG_RMW(ah, AR7010_GPIO_OE,
2697			(AR7010_GPIO_OE_AS_OUTPUT << gpio_shift),
2698			(AR7010_GPIO_OE_MASK << gpio_shift));
2699		return;
2700	}
2701
2702	ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
2703	gpio_shift = 2 * gpio;
2704	REG_RMW(ah,
2705		AR_GPIO_OE_OUT,
2706		(AR_GPIO_OE_OUT_DRV_ALL << gpio_shift),
2707		(AR_GPIO_OE_OUT_DRV << gpio_shift));
2708}
2709EXPORT_SYMBOL(ath9k_hw_cfg_output);
2710
2711void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
2712{
2713	if (AR_DEVID_7010(ah)) {
2714		val = val ? 0 : 1;
2715		REG_RMW(ah, AR7010_GPIO_OUT, ((val&1) << gpio),
2716			AR_GPIO_BIT(gpio));
2717		return;
2718	}
2719
2720	if (AR_SREV_9271(ah))
2721		val = ~val;
2722
2723	REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio),
2724		AR_GPIO_BIT(gpio));
2725}
2726EXPORT_SYMBOL(ath9k_hw_set_gpio);
2727
 
 
 
 
 
 
2728void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
2729{
2730	REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
2731}
2732EXPORT_SYMBOL(ath9k_hw_setantenna);
2733
2734/*********************/
2735/* General Operation */
2736/*********************/
2737
2738u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
2739{
2740	u32 bits = REG_READ(ah, AR_RX_FILTER);
2741	u32 phybits = REG_READ(ah, AR_PHY_ERR);
2742
2743	if (phybits & AR_PHY_ERR_RADAR)
2744		bits |= ATH9K_RX_FILTER_PHYRADAR;
2745	if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
2746		bits |= ATH9K_RX_FILTER_PHYERR;
2747
2748	return bits;
2749}
2750EXPORT_SYMBOL(ath9k_hw_getrxfilter);
2751
2752void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
2753{
2754	u32 phybits;
2755
2756	ENABLE_REGWRITE_BUFFER(ah);
2757
2758	if (AR_SREV_9462(ah))
2759		bits |= ATH9K_RX_FILTER_CONTROL_WRAPPER;
2760
2761	REG_WRITE(ah, AR_RX_FILTER, bits);
2762
2763	phybits = 0;
2764	if (bits & ATH9K_RX_FILTER_PHYRADAR)
2765		phybits |= AR_PHY_ERR_RADAR;
2766	if (bits & ATH9K_RX_FILTER_PHYERR)
2767		phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
2768	REG_WRITE(ah, AR_PHY_ERR, phybits);
2769
2770	if (phybits)
2771		REG_SET_BIT(ah, AR_RXCFG, AR_RXCFG_ZLFDMA);
2772	else
2773		REG_CLR_BIT(ah, AR_RXCFG, AR_RXCFG_ZLFDMA);
2774
2775	REGWRITE_BUFFER_FLUSH(ah);
2776}
2777EXPORT_SYMBOL(ath9k_hw_setrxfilter);
2778
2779bool ath9k_hw_phy_disable(struct ath_hw *ah)
2780{
2781	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
2782		return false;
2783
2784	ath9k_hw_init_pll(ah, NULL);
2785	ah->htc_reset_init = true;
2786	return true;
2787}
2788EXPORT_SYMBOL(ath9k_hw_phy_disable);
2789
2790bool ath9k_hw_disable(struct ath_hw *ah)
2791{
2792	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2793		return false;
2794
2795	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD))
2796		return false;
2797
2798	ath9k_hw_init_pll(ah, NULL);
2799	return true;
2800}
2801EXPORT_SYMBOL(ath9k_hw_disable);
2802
2803static int get_antenna_gain(struct ath_hw *ah, struct ath9k_channel *chan)
2804{
2805	enum eeprom_param gain_param;
2806
2807	if (IS_CHAN_2GHZ(chan))
2808		gain_param = EEP_ANTENNA_GAIN_2G;
2809	else
2810		gain_param = EEP_ANTENNA_GAIN_5G;
2811
2812	return ah->eep_ops->get_eeprom(ah, gain_param);
2813}
2814
2815void ath9k_hw_apply_txpower(struct ath_hw *ah, struct ath9k_channel *chan,
2816			    bool test)
2817{
2818	struct ath_regulatory *reg = ath9k_hw_regulatory(ah);
2819	struct ieee80211_channel *channel;
2820	int chan_pwr, new_pwr, max_gain;
2821	int ant_gain, ant_reduction = 0;
2822
2823	if (!chan)
2824		return;
2825
2826	channel = chan->chan;
2827	chan_pwr = min_t(int, channel->max_power * 2, MAX_RATE_POWER);
2828	new_pwr = min_t(int, chan_pwr, reg->power_limit);
2829	max_gain = chan_pwr - new_pwr + channel->max_antenna_gain * 2;
2830
2831	ant_gain = get_antenna_gain(ah, chan);
2832	if (ant_gain > max_gain)
2833		ant_reduction = ant_gain - max_gain;
2834
2835	ah->eep_ops->set_txpower(ah, chan,
2836				 ath9k_regd_get_ctl(reg, chan),
2837				 ant_reduction, new_pwr, test);
2838}
2839
2840void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit, bool test)
2841{
2842	struct ath_regulatory *reg = ath9k_hw_regulatory(ah);
2843	struct ath9k_channel *chan = ah->curchan;
2844	struct ieee80211_channel *channel = chan->chan;
2845
2846	reg->power_limit = min_t(u32, limit, MAX_RATE_POWER);
2847	if (test)
2848		channel->max_power = MAX_RATE_POWER / 2;
2849
2850	ath9k_hw_apply_txpower(ah, chan, test);
2851
2852	if (test)
2853		channel->max_power = DIV_ROUND_UP(reg->max_power_level, 2);
 
 
2854}
2855EXPORT_SYMBOL(ath9k_hw_set_txpowerlimit);
2856
2857void ath9k_hw_setopmode(struct ath_hw *ah)
2858{
2859	ath9k_hw_set_operating_mode(ah, ah->opmode);
2860}
2861EXPORT_SYMBOL(ath9k_hw_setopmode);
2862
2863void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
2864{
2865	REG_WRITE(ah, AR_MCAST_FIL0, filter0);
2866	REG_WRITE(ah, AR_MCAST_FIL1, filter1);
2867}
2868EXPORT_SYMBOL(ath9k_hw_setmcastfilter);
2869
2870void ath9k_hw_write_associd(struct ath_hw *ah)
2871{
2872	struct ath_common *common = ath9k_hw_common(ah);
2873
2874	REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(common->curbssid));
2875	REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(common->curbssid + 4) |
2876		  ((common->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
2877}
2878EXPORT_SYMBOL(ath9k_hw_write_associd);
2879
2880#define ATH9K_MAX_TSF_READ 10
2881
2882u64 ath9k_hw_gettsf64(struct ath_hw *ah)
2883{
2884	u32 tsf_lower, tsf_upper1, tsf_upper2;
2885	int i;
2886
2887	tsf_upper1 = REG_READ(ah, AR_TSF_U32);
2888	for (i = 0; i < ATH9K_MAX_TSF_READ; i++) {
2889		tsf_lower = REG_READ(ah, AR_TSF_L32);
2890		tsf_upper2 = REG_READ(ah, AR_TSF_U32);
2891		if (tsf_upper2 == tsf_upper1)
2892			break;
2893		tsf_upper1 = tsf_upper2;
2894	}
2895
2896	WARN_ON( i == ATH9K_MAX_TSF_READ );
2897
2898	return (((u64)tsf_upper1 << 32) | tsf_lower);
2899}
2900EXPORT_SYMBOL(ath9k_hw_gettsf64);
2901
2902void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
2903{
2904	REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
2905	REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
2906}
2907EXPORT_SYMBOL(ath9k_hw_settsf64);
2908
2909void ath9k_hw_reset_tsf(struct ath_hw *ah)
2910{
2911	if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
2912			   AH_TSF_WRITE_TIMEOUT))
2913		ath_dbg(ath9k_hw_common(ah), RESET,
2914			"AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
2915
2916	REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
2917}
2918EXPORT_SYMBOL(ath9k_hw_reset_tsf);
2919
2920void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting)
2921{
2922	if (setting)
2923		ah->misc_mode |= AR_PCU_TX_ADD_TSF;
2924	else
2925		ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
2926}
2927EXPORT_SYMBOL(ath9k_hw_set_tsfadjust);
2928
2929void ath9k_hw_set11nmac2040(struct ath_hw *ah)
2930{
2931	struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
2932	u32 macmode;
2933
2934	if (conf_is_ht40(conf) && !ah->config.cwm_ignore_extcca)
2935		macmode = AR_2040_JOINED_RX_CLEAR;
2936	else
2937		macmode = 0;
2938
2939	REG_WRITE(ah, AR_2040_MODE, macmode);
2940}
2941
2942/* HW Generic timers configuration */
2943
2944static const struct ath_gen_timer_configuration gen_tmr_configuration[] =
2945{
2946	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2947	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2948	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2949	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2950	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2951	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2952	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2953	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2954	{AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001},
2955	{AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4,
2956				AR_NDP2_TIMER_MODE, 0x0002},
2957	{AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4,
2958				AR_NDP2_TIMER_MODE, 0x0004},
2959	{AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4,
2960				AR_NDP2_TIMER_MODE, 0x0008},
2961	{AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4,
2962				AR_NDP2_TIMER_MODE, 0x0010},
2963	{AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4,
2964				AR_NDP2_TIMER_MODE, 0x0020},
2965	{AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4,
2966				AR_NDP2_TIMER_MODE, 0x0040},
2967	{AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4,
2968				AR_NDP2_TIMER_MODE, 0x0080}
2969};
2970
2971/* HW generic timer primitives */
2972
2973/* compute and clear index of rightmost 1 */
2974static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask)
2975{
2976	u32 b;
2977
2978	b = *mask;
2979	b &= (0-b);
2980	*mask &= ~b;
2981	b *= debruijn32;
2982	b >>= 27;
2983
2984	return timer_table->gen_timer_index[b];
2985}
2986
2987u32 ath9k_hw_gettsf32(struct ath_hw *ah)
2988{
2989	return REG_READ(ah, AR_TSF_L32);
2990}
2991EXPORT_SYMBOL(ath9k_hw_gettsf32);
2992
2993struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah,
2994					  void (*trigger)(void *),
2995					  void (*overflow)(void *),
2996					  void *arg,
2997					  u8 timer_index)
2998{
2999	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3000	struct ath_gen_timer *timer;
3001
3002	timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL);
3003
3004	if (timer == NULL) {
3005		ath_err(ath9k_hw_common(ah),
3006			"Failed to allocate memory for hw timer[%d]\n",
3007			timer_index);
3008		return NULL;
3009	}
3010
3011	/* allocate a hardware generic timer slot */
3012	timer_table->timers[timer_index] = timer;
3013	timer->index = timer_index;
3014	timer->trigger = trigger;
3015	timer->overflow = overflow;
3016	timer->arg = arg;
3017
3018	return timer;
3019}
3020EXPORT_SYMBOL(ath_gen_timer_alloc);
3021
3022void ath9k_hw_gen_timer_start(struct ath_hw *ah,
3023			      struct ath_gen_timer *timer,
3024			      u32 trig_timeout,
3025			      u32 timer_period)
3026{
3027	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3028	u32 tsf, timer_next;
3029
3030	BUG_ON(!timer_period);
3031
3032	set_bit(timer->index, &timer_table->timer_mask.timer_bits);
3033
3034	tsf = ath9k_hw_gettsf32(ah);
3035
3036	timer_next = tsf + trig_timeout;
3037
3038	ath_dbg(ath9k_hw_common(ah), HWTIMER,
3039		"current tsf %x period %x timer_next %x\n",
3040		tsf, timer_period, timer_next);
3041
3042	/*
3043	 * Program generic timer registers
3044	 */
3045	REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr,
3046		 timer_next);
3047	REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr,
3048		  timer_period);
3049	REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
3050		    gen_tmr_configuration[timer->index].mode_mask);
3051
3052	if (AR_SREV_9462(ah)) {
3053		/*
3054		 * Starting from AR9462, each generic timer can select which tsf
3055		 * to use. But we still follow the old rule, 0 - 7 use tsf and
3056		 * 8 - 15  use tsf2.
3057		 */
3058		if ((timer->index < AR_GEN_TIMER_BANK_1_LEN))
3059			REG_CLR_BIT(ah, AR_MAC_PCU_GEN_TIMER_TSF_SEL,
3060				       (1 << timer->index));
3061		else
3062			REG_SET_BIT(ah, AR_MAC_PCU_GEN_TIMER_TSF_SEL,
3063				       (1 << timer->index));
3064	}
3065
3066	/* Enable both trigger and thresh interrupt masks */
3067	REG_SET_BIT(ah, AR_IMR_S5,
3068		(SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
3069		SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
3070}
3071EXPORT_SYMBOL(ath9k_hw_gen_timer_start);
3072
3073void ath9k_hw_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer)
3074{
3075	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3076
3077	if ((timer->index < AR_FIRST_NDP_TIMER) ||
3078		(timer->index >= ATH_MAX_GEN_TIMER)) {
3079		return;
3080	}
3081
3082	/* Clear generic timer enable bits. */
3083	REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
3084			gen_tmr_configuration[timer->index].mode_mask);
3085
3086	/* Disable both trigger and thresh interrupt masks */
3087	REG_CLR_BIT(ah, AR_IMR_S5,
3088		(SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
3089		SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
3090
3091	clear_bit(timer->index, &timer_table->timer_mask.timer_bits);
3092}
3093EXPORT_SYMBOL(ath9k_hw_gen_timer_stop);
3094
3095void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer)
3096{
3097	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3098
3099	/* free the hardware generic timer slot */
3100	timer_table->timers[timer->index] = NULL;
3101	kfree(timer);
3102}
3103EXPORT_SYMBOL(ath_gen_timer_free);
3104
3105/*
3106 * Generic Timer Interrupts handling
3107 */
3108void ath_gen_timer_isr(struct ath_hw *ah)
3109{
3110	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3111	struct ath_gen_timer *timer;
3112	struct ath_common *common = ath9k_hw_common(ah);
3113	u32 trigger_mask, thresh_mask, index;
3114
3115	/* get hardware generic timer interrupt status */
3116	trigger_mask = ah->intr_gen_timer_trigger;
3117	thresh_mask = ah->intr_gen_timer_thresh;
3118	trigger_mask &= timer_table->timer_mask.val;
3119	thresh_mask &= timer_table->timer_mask.val;
3120
3121	trigger_mask &= ~thresh_mask;
3122
3123	while (thresh_mask) {
3124		index = rightmost_index(timer_table, &thresh_mask);
3125		timer = timer_table->timers[index];
3126		BUG_ON(!timer);
3127		ath_dbg(common, HWTIMER, "TSF overflow for Gen timer %d\n",
3128			index);
3129		timer->overflow(timer->arg);
3130	}
3131
3132	while (trigger_mask) {
3133		index = rightmost_index(timer_table, &trigger_mask);
3134		timer = timer_table->timers[index];
3135		BUG_ON(!timer);
3136		ath_dbg(common, HWTIMER,
3137			"Gen timer[%d] trigger\n", index);
3138		timer->trigger(timer->arg);
3139	}
3140}
3141EXPORT_SYMBOL(ath_gen_timer_isr);
3142
3143/********/
3144/* HTC  */
3145/********/
3146
 
 
 
 
 
 
3147static struct {
3148	u32 version;
3149	const char * name;
3150} ath_mac_bb_names[] = {
3151	/* Devices with external radios */
3152	{ AR_SREV_VERSION_5416_PCI,	"5416" },
3153	{ AR_SREV_VERSION_5416_PCIE,	"5418" },
3154	{ AR_SREV_VERSION_9100,		"9100" },
3155	{ AR_SREV_VERSION_9160,		"9160" },
3156	/* Single-chip solutions */
3157	{ AR_SREV_VERSION_9280,		"9280" },
3158	{ AR_SREV_VERSION_9285,		"9285" },
3159	{ AR_SREV_VERSION_9287,         "9287" },
3160	{ AR_SREV_VERSION_9271,         "9271" },
3161	{ AR_SREV_VERSION_9300,         "9300" },
3162	{ AR_SREV_VERSION_9330,         "9330" },
3163	{ AR_SREV_VERSION_9340,		"9340" },
3164	{ AR_SREV_VERSION_9485,         "9485" },
3165	{ AR_SREV_VERSION_9462,         "9462" },
3166};
3167
3168/* For devices with external radios */
3169static struct {
3170	u16 version;
3171	const char * name;
3172} ath_rf_names[] = {
3173	{ 0,				"5133" },
3174	{ AR_RAD5133_SREV_MAJOR,	"5133" },
3175	{ AR_RAD5122_SREV_MAJOR,	"5122" },
3176	{ AR_RAD2133_SREV_MAJOR,	"2133" },
3177	{ AR_RAD2122_SREV_MAJOR,	"2122" }
3178};
3179
3180/*
3181 * Return the MAC/BB name. "????" is returned if the MAC/BB is unknown.
3182 */
3183static const char *ath9k_hw_mac_bb_name(u32 mac_bb_version)
3184{
3185	int i;
3186
3187	for (i=0; i<ARRAY_SIZE(ath_mac_bb_names); i++) {
3188		if (ath_mac_bb_names[i].version == mac_bb_version) {
3189			return ath_mac_bb_names[i].name;
3190		}
3191	}
3192
3193	return "????";
3194}
3195
3196/*
3197 * Return the RF name. "????" is returned if the RF is unknown.
3198 * Used for devices with external radios.
3199 */
3200static const char *ath9k_hw_rf_name(u16 rf_version)
3201{
3202	int i;
3203
3204	for (i=0; i<ARRAY_SIZE(ath_rf_names); i++) {
3205		if (ath_rf_names[i].version == rf_version) {
3206			return ath_rf_names[i].name;
3207		}
3208	}
3209
3210	return "????";
3211}
3212
3213void ath9k_hw_name(struct ath_hw *ah, char *hw_name, size_t len)
3214{
3215	int used;
3216
3217	/* chipsets >= AR9280 are single-chip */
3218	if (AR_SREV_9280_20_OR_LATER(ah)) {
3219		used = snprintf(hw_name, len,
3220			       "Atheros AR%s Rev:%x",
3221			       ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
3222			       ah->hw_version.macRev);
3223	}
3224	else {
3225		used = snprintf(hw_name, len,
3226			       "Atheros AR%s MAC/BB Rev:%x AR%s RF Rev:%x",
3227			       ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
3228			       ah->hw_version.macRev,
3229			       ath9k_hw_rf_name((ah->hw_version.analog5GhzRev &
3230						AR_RADIO_SREV_MAJOR)),
3231			       ah->hw_version.phyRev);
3232	}
3233
3234	hw_name[used] = '\0';
3235}
3236EXPORT_SYMBOL(ath9k_hw_name);