Linux Audio

Check our new training course

Loading...
Note: File does not exist in v6.13.7.
   1/*
   2 * ARMv7 Cortex-A8 and Cortex-A9 Performance Events handling code.
   3 *
   4 * ARMv7 support: Jean Pihet <jpihet@mvista.com>
   5 * 2010 (c) MontaVista Software, LLC.
   6 *
   7 * Copied from ARMv6 code, with the low level code inspired
   8 *  by the ARMv7 Oprofile code.
   9 *
  10 * Cortex-A8 has up to 4 configurable performance counters and
  11 *  a single cycle counter.
  12 * Cortex-A9 has up to 31 configurable performance counters and
  13 *  a single cycle counter.
  14 *
  15 * All counters can be enabled/disabled and IRQ masked separately. The cycle
  16 *  counter and all 4 performance counters together can be reset separately.
  17 */
  18
  19#ifdef CONFIG_CPU_V7
  20/*
  21 * Common ARMv7 event types
  22 *
  23 * Note: An implementation may not be able to count all of these events
  24 * but the encodings are considered to be `reserved' in the case that
  25 * they are not available.
  26 */
  27enum armv7_perf_types {
  28	ARMV7_PERFCTR_PMNC_SW_INCR		= 0x00,
  29	ARMV7_PERFCTR_IFETCH_MISS		= 0x01,
  30	ARMV7_PERFCTR_ITLB_MISS			= 0x02,
  31	ARMV7_PERFCTR_DCACHE_REFILL		= 0x03,	/* L1 */
  32	ARMV7_PERFCTR_DCACHE_ACCESS		= 0x04,	/* L1 */
  33	ARMV7_PERFCTR_DTLB_REFILL		= 0x05,
  34	ARMV7_PERFCTR_DREAD			= 0x06,
  35	ARMV7_PERFCTR_DWRITE			= 0x07,
  36	ARMV7_PERFCTR_INSTR_EXECUTED		= 0x08,
  37	ARMV7_PERFCTR_EXC_TAKEN			= 0x09,
  38	ARMV7_PERFCTR_EXC_EXECUTED		= 0x0A,
  39	ARMV7_PERFCTR_CID_WRITE			= 0x0B,
  40	/* ARMV7_PERFCTR_PC_WRITE is equivalent to HW_BRANCH_INSTRUCTIONS.
  41	 * It counts:
  42	 *  - all branch instructions,
  43	 *  - instructions that explicitly write the PC,
  44	 *  - exception generating instructions.
  45	 */
  46	ARMV7_PERFCTR_PC_WRITE			= 0x0C,
  47	ARMV7_PERFCTR_PC_IMM_BRANCH		= 0x0D,
  48	ARMV7_PERFCTR_PC_PROC_RETURN		= 0x0E,
  49	ARMV7_PERFCTR_UNALIGNED_ACCESS		= 0x0F,
  50
  51	/* These events are defined by the PMUv2 supplement (ARM DDI 0457A). */
  52	ARMV7_PERFCTR_PC_BRANCH_MIS_PRED	= 0x10,
  53	ARMV7_PERFCTR_CLOCK_CYCLES		= 0x11,
  54	ARMV7_PERFCTR_PC_BRANCH_PRED		= 0x12,
  55	ARMV7_PERFCTR_MEM_ACCESS		= 0x13,
  56	ARMV7_PERFCTR_L1_ICACHE_ACCESS		= 0x14,
  57	ARMV7_PERFCTR_L1_DCACHE_WB		= 0x15,
  58	ARMV7_PERFCTR_L2_DCACHE_ACCESS		= 0x16,
  59	ARMV7_PERFCTR_L2_DCACHE_REFILL		= 0x17,
  60	ARMV7_PERFCTR_L2_DCACHE_WB		= 0x18,
  61	ARMV7_PERFCTR_BUS_ACCESS		= 0x19,
  62	ARMV7_PERFCTR_MEMORY_ERROR		= 0x1A,
  63	ARMV7_PERFCTR_INSTR_SPEC		= 0x1B,
  64	ARMV7_PERFCTR_TTBR_WRITE		= 0x1C,
  65	ARMV7_PERFCTR_BUS_CYCLES		= 0x1D,
  66
  67	ARMV7_PERFCTR_CPU_CYCLES		= 0xFF
  68};
  69
  70/* ARMv7 Cortex-A8 specific event types */
  71enum armv7_a8_perf_types {
  72	ARMV7_PERFCTR_WRITE_BUFFER_FULL		= 0x40,
  73	ARMV7_PERFCTR_L2_STORE_MERGED		= 0x41,
  74	ARMV7_PERFCTR_L2_STORE_BUFF		= 0x42,
  75	ARMV7_PERFCTR_L2_ACCESS			= 0x43,
  76	ARMV7_PERFCTR_L2_CACH_MISS		= 0x44,
  77	ARMV7_PERFCTR_AXI_READ_CYCLES		= 0x45,
  78	ARMV7_PERFCTR_AXI_WRITE_CYCLES		= 0x46,
  79	ARMV7_PERFCTR_MEMORY_REPLAY		= 0x47,
  80	ARMV7_PERFCTR_UNALIGNED_ACCESS_REPLAY	= 0x48,
  81	ARMV7_PERFCTR_L1_DATA_MISS		= 0x49,
  82	ARMV7_PERFCTR_L1_INST_MISS		= 0x4A,
  83	ARMV7_PERFCTR_L1_DATA_COLORING		= 0x4B,
  84	ARMV7_PERFCTR_L1_NEON_DATA		= 0x4C,
  85	ARMV7_PERFCTR_L1_NEON_CACH_DATA		= 0x4D,
  86	ARMV7_PERFCTR_L2_NEON			= 0x4E,
  87	ARMV7_PERFCTR_L2_NEON_HIT		= 0x4F,
  88	ARMV7_PERFCTR_L1_INST			= 0x50,
  89	ARMV7_PERFCTR_PC_RETURN_MIS_PRED	= 0x51,
  90	ARMV7_PERFCTR_PC_BRANCH_FAILED		= 0x52,
  91	ARMV7_PERFCTR_PC_BRANCH_TAKEN		= 0x53,
  92	ARMV7_PERFCTR_PC_BRANCH_EXECUTED	= 0x54,
  93	ARMV7_PERFCTR_OP_EXECUTED		= 0x55,
  94	ARMV7_PERFCTR_CYCLES_INST_STALL		= 0x56,
  95	ARMV7_PERFCTR_CYCLES_INST		= 0x57,
  96	ARMV7_PERFCTR_CYCLES_NEON_DATA_STALL	= 0x58,
  97	ARMV7_PERFCTR_CYCLES_NEON_INST_STALL	= 0x59,
  98	ARMV7_PERFCTR_NEON_CYCLES		= 0x5A,
  99
 100	ARMV7_PERFCTR_PMU0_EVENTS		= 0x70,
 101	ARMV7_PERFCTR_PMU1_EVENTS		= 0x71,
 102	ARMV7_PERFCTR_PMU_EVENTS		= 0x72,
 103};
 104
 105/* ARMv7 Cortex-A9 specific event types */
 106enum armv7_a9_perf_types {
 107	ARMV7_PERFCTR_JAVA_HW_BYTECODE_EXEC	= 0x40,
 108	ARMV7_PERFCTR_JAVA_SW_BYTECODE_EXEC	= 0x41,
 109	ARMV7_PERFCTR_JAZELLE_BRANCH_EXEC	= 0x42,
 110
 111	ARMV7_PERFCTR_COHERENT_LINE_MISS	= 0x50,
 112	ARMV7_PERFCTR_COHERENT_LINE_HIT		= 0x51,
 113
 114	ARMV7_PERFCTR_ICACHE_DEP_STALL_CYCLES	= 0x60,
 115	ARMV7_PERFCTR_DCACHE_DEP_STALL_CYCLES	= 0x61,
 116	ARMV7_PERFCTR_TLB_MISS_DEP_STALL_CYCLES	= 0x62,
 117	ARMV7_PERFCTR_STREX_EXECUTED_PASSED	= 0x63,
 118	ARMV7_PERFCTR_STREX_EXECUTED_FAILED	= 0x64,
 119	ARMV7_PERFCTR_DATA_EVICTION		= 0x65,
 120	ARMV7_PERFCTR_ISSUE_STAGE_NO_INST	= 0x66,
 121	ARMV7_PERFCTR_ISSUE_STAGE_EMPTY		= 0x67,
 122	ARMV7_PERFCTR_INST_OUT_OF_RENAME_STAGE	= 0x68,
 123
 124	ARMV7_PERFCTR_PREDICTABLE_FUNCT_RETURNS	= 0x6E,
 125
 126	ARMV7_PERFCTR_MAIN_UNIT_EXECUTED_INST	= 0x70,
 127	ARMV7_PERFCTR_SECOND_UNIT_EXECUTED_INST	= 0x71,
 128	ARMV7_PERFCTR_LD_ST_UNIT_EXECUTED_INST	= 0x72,
 129	ARMV7_PERFCTR_FP_EXECUTED_INST		= 0x73,
 130	ARMV7_PERFCTR_NEON_EXECUTED_INST	= 0x74,
 131
 132	ARMV7_PERFCTR_PLD_FULL_DEP_STALL_CYCLES	= 0x80,
 133	ARMV7_PERFCTR_DATA_WR_DEP_STALL_CYCLES	= 0x81,
 134	ARMV7_PERFCTR_ITLB_MISS_DEP_STALL_CYCLES	= 0x82,
 135	ARMV7_PERFCTR_DTLB_MISS_DEP_STALL_CYCLES	= 0x83,
 136	ARMV7_PERFCTR_MICRO_ITLB_MISS_DEP_STALL_CYCLES	= 0x84,
 137	ARMV7_PERFCTR_MICRO_DTLB_MISS_DEP_STALL_CYCLES	= 0x85,
 138	ARMV7_PERFCTR_DMB_DEP_STALL_CYCLES	= 0x86,
 139
 140	ARMV7_PERFCTR_INTGR_CLK_ENABLED_CYCLES	= 0x8A,
 141	ARMV7_PERFCTR_DATA_ENGINE_CLK_EN_CYCLES	= 0x8B,
 142
 143	ARMV7_PERFCTR_ISB_INST			= 0x90,
 144	ARMV7_PERFCTR_DSB_INST			= 0x91,
 145	ARMV7_PERFCTR_DMB_INST			= 0x92,
 146	ARMV7_PERFCTR_EXT_INTERRUPTS		= 0x93,
 147
 148	ARMV7_PERFCTR_PLE_CACHE_LINE_RQST_COMPLETED	= 0xA0,
 149	ARMV7_PERFCTR_PLE_CACHE_LINE_RQST_SKIPPED	= 0xA1,
 150	ARMV7_PERFCTR_PLE_FIFO_FLUSH		= 0xA2,
 151	ARMV7_PERFCTR_PLE_RQST_COMPLETED	= 0xA3,
 152	ARMV7_PERFCTR_PLE_FIFO_OVERFLOW		= 0xA4,
 153	ARMV7_PERFCTR_PLE_RQST_PROG		= 0xA5
 154};
 155
 156/* ARMv7 Cortex-A5 specific event types */
 157enum armv7_a5_perf_types {
 158	ARMV7_PERFCTR_IRQ_TAKEN			= 0x86,
 159	ARMV7_PERFCTR_FIQ_TAKEN			= 0x87,
 160
 161	ARMV7_PERFCTR_EXT_MEM_RQST		= 0xc0,
 162	ARMV7_PERFCTR_NC_EXT_MEM_RQST		= 0xc1,
 163	ARMV7_PERFCTR_PREFETCH_LINEFILL		= 0xc2,
 164	ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP	= 0xc3,
 165	ARMV7_PERFCTR_ENTER_READ_ALLOC		= 0xc4,
 166	ARMV7_PERFCTR_READ_ALLOC		= 0xc5,
 167
 168	ARMV7_PERFCTR_STALL_SB_FULL		= 0xc9,
 169};
 170
 171/* ARMv7 Cortex-A15 specific event types */
 172enum armv7_a15_perf_types {
 173	ARMV7_PERFCTR_L1_DCACHE_READ_ACCESS	= 0x40,
 174	ARMV7_PERFCTR_L1_DCACHE_WRITE_ACCESS	= 0x41,
 175	ARMV7_PERFCTR_L1_DCACHE_READ_REFILL	= 0x42,
 176	ARMV7_PERFCTR_L1_DCACHE_WRITE_REFILL	= 0x43,
 177
 178	ARMV7_PERFCTR_L1_DTLB_READ_REFILL	= 0x4C,
 179	ARMV7_PERFCTR_L1_DTLB_WRITE_REFILL	= 0x4D,
 180
 181	ARMV7_PERFCTR_L2_DCACHE_READ_ACCESS	= 0x50,
 182	ARMV7_PERFCTR_L2_DCACHE_WRITE_ACCESS	= 0x51,
 183	ARMV7_PERFCTR_L2_DCACHE_READ_REFILL	= 0x52,
 184	ARMV7_PERFCTR_L2_DCACHE_WRITE_REFILL	= 0x53,
 185
 186	ARMV7_PERFCTR_SPEC_PC_WRITE		= 0x76,
 187};
 188
 189/*
 190 * Cortex-A8 HW events mapping
 191 *
 192 * The hardware events that we support. We do support cache operations but
 193 * we have harvard caches and no way to combine instruction and data
 194 * accesses/misses in hardware.
 195 */
 196static const unsigned armv7_a8_perf_map[PERF_COUNT_HW_MAX] = {
 197	[PERF_COUNT_HW_CPU_CYCLES]	    = ARMV7_PERFCTR_CPU_CYCLES,
 198	[PERF_COUNT_HW_INSTRUCTIONS]	    = ARMV7_PERFCTR_INSTR_EXECUTED,
 199	[PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
 200	[PERF_COUNT_HW_CACHE_MISSES]	    = HW_OP_UNSUPPORTED,
 201	[PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
 202	[PERF_COUNT_HW_BRANCH_MISSES]	    = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 203	[PERF_COUNT_HW_BUS_CYCLES]	    = ARMV7_PERFCTR_CLOCK_CYCLES,
 204};
 205
 206static const unsigned armv7_a8_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
 207					  [PERF_COUNT_HW_CACHE_OP_MAX]
 208					  [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
 209	[C(L1D)] = {
 210		/*
 211		 * The performance counters don't differentiate between read
 212		 * and write accesses/misses so this isn't strictly correct,
 213		 * but it's the best we can do. Writes and reads get
 214		 * combined.
 215		 */
 216		[C(OP_READ)] = {
 217			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_DCACHE_ACCESS,
 218			[C(RESULT_MISS)]	= ARMV7_PERFCTR_DCACHE_REFILL,
 219		},
 220		[C(OP_WRITE)] = {
 221			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_DCACHE_ACCESS,
 222			[C(RESULT_MISS)]	= ARMV7_PERFCTR_DCACHE_REFILL,
 223		},
 224		[C(OP_PREFETCH)] = {
 225			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 226			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 227		},
 228	},
 229	[C(L1I)] = {
 230		[C(OP_READ)] = {
 231			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_L1_INST,
 232			[C(RESULT_MISS)]	= ARMV7_PERFCTR_L1_INST_MISS,
 233		},
 234		[C(OP_WRITE)] = {
 235			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_L1_INST,
 236			[C(RESULT_MISS)]	= ARMV7_PERFCTR_L1_INST_MISS,
 237		},
 238		[C(OP_PREFETCH)] = {
 239			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 240			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 241		},
 242	},
 243	[C(LL)] = {
 244		[C(OP_READ)] = {
 245			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_L2_ACCESS,
 246			[C(RESULT_MISS)]	= ARMV7_PERFCTR_L2_CACH_MISS,
 247		},
 248		[C(OP_WRITE)] = {
 249			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_L2_ACCESS,
 250			[C(RESULT_MISS)]	= ARMV7_PERFCTR_L2_CACH_MISS,
 251		},
 252		[C(OP_PREFETCH)] = {
 253			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 254			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 255		},
 256	},
 257	[C(DTLB)] = {
 258		[C(OP_READ)] = {
 259			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 260			[C(RESULT_MISS)]	= ARMV7_PERFCTR_DTLB_REFILL,
 261		},
 262		[C(OP_WRITE)] = {
 263			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 264			[C(RESULT_MISS)]	= ARMV7_PERFCTR_DTLB_REFILL,
 265		},
 266		[C(OP_PREFETCH)] = {
 267			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 268			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 269		},
 270	},
 271	[C(ITLB)] = {
 272		[C(OP_READ)] = {
 273			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 274			[C(RESULT_MISS)]	= ARMV7_PERFCTR_ITLB_MISS,
 275		},
 276		[C(OP_WRITE)] = {
 277			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 278			[C(RESULT_MISS)]	= ARMV7_PERFCTR_ITLB_MISS,
 279		},
 280		[C(OP_PREFETCH)] = {
 281			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 282			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 283		},
 284	},
 285	[C(BPU)] = {
 286		[C(OP_READ)] = {
 287			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_PC_WRITE,
 288			[C(RESULT_MISS)]
 289					= ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 290		},
 291		[C(OP_WRITE)] = {
 292			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_PC_WRITE,
 293			[C(RESULT_MISS)]
 294					= ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 295		},
 296		[C(OP_PREFETCH)] = {
 297			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 298			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 299		},
 300	},
 301	[C(NODE)] = {
 302		[C(OP_READ)] = {
 303			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 304			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 305		},
 306		[C(OP_WRITE)] = {
 307			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 308			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 309		},
 310		[C(OP_PREFETCH)] = {
 311			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 312			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 313		},
 314	},
 315};
 316
 317/*
 318 * Cortex-A9 HW events mapping
 319 */
 320static const unsigned armv7_a9_perf_map[PERF_COUNT_HW_MAX] = {
 321	[PERF_COUNT_HW_CPU_CYCLES]	    = ARMV7_PERFCTR_CPU_CYCLES,
 322	[PERF_COUNT_HW_INSTRUCTIONS]	    =
 323					ARMV7_PERFCTR_INST_OUT_OF_RENAME_STAGE,
 324	[PERF_COUNT_HW_CACHE_REFERENCES]    = ARMV7_PERFCTR_DCACHE_ACCESS,
 325	[PERF_COUNT_HW_CACHE_MISSES]	    = ARMV7_PERFCTR_DCACHE_REFILL,
 326	[PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
 327	[PERF_COUNT_HW_BRANCH_MISSES]	    = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 328	[PERF_COUNT_HW_BUS_CYCLES]	    = ARMV7_PERFCTR_CLOCK_CYCLES,
 329};
 330
 331static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
 332					  [PERF_COUNT_HW_CACHE_OP_MAX]
 333					  [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
 334	[C(L1D)] = {
 335		/*
 336		 * The performance counters don't differentiate between read
 337		 * and write accesses/misses so this isn't strictly correct,
 338		 * but it's the best we can do. Writes and reads get
 339		 * combined.
 340		 */
 341		[C(OP_READ)] = {
 342			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_DCACHE_ACCESS,
 343			[C(RESULT_MISS)]	= ARMV7_PERFCTR_DCACHE_REFILL,
 344		},
 345		[C(OP_WRITE)] = {
 346			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_DCACHE_ACCESS,
 347			[C(RESULT_MISS)]	= ARMV7_PERFCTR_DCACHE_REFILL,
 348		},
 349		[C(OP_PREFETCH)] = {
 350			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 351			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 352		},
 353	},
 354	[C(L1I)] = {
 355		[C(OP_READ)] = {
 356			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 357			[C(RESULT_MISS)]	= ARMV7_PERFCTR_IFETCH_MISS,
 358		},
 359		[C(OP_WRITE)] = {
 360			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 361			[C(RESULT_MISS)]	= ARMV7_PERFCTR_IFETCH_MISS,
 362		},
 363		[C(OP_PREFETCH)] = {
 364			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 365			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 366		},
 367	},
 368	[C(LL)] = {
 369		[C(OP_READ)] = {
 370			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 371			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 372		},
 373		[C(OP_WRITE)] = {
 374			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 375			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 376		},
 377		[C(OP_PREFETCH)] = {
 378			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 379			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 380		},
 381	},
 382	[C(DTLB)] = {
 383		[C(OP_READ)] = {
 384			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 385			[C(RESULT_MISS)]	= ARMV7_PERFCTR_DTLB_REFILL,
 386		},
 387		[C(OP_WRITE)] = {
 388			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 389			[C(RESULT_MISS)]	= ARMV7_PERFCTR_DTLB_REFILL,
 390		},
 391		[C(OP_PREFETCH)] = {
 392			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 393			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 394		},
 395	},
 396	[C(ITLB)] = {
 397		[C(OP_READ)] = {
 398			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 399			[C(RESULT_MISS)]	= ARMV7_PERFCTR_ITLB_MISS,
 400		},
 401		[C(OP_WRITE)] = {
 402			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 403			[C(RESULT_MISS)]	= ARMV7_PERFCTR_ITLB_MISS,
 404		},
 405		[C(OP_PREFETCH)] = {
 406			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 407			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 408		},
 409	},
 410	[C(BPU)] = {
 411		[C(OP_READ)] = {
 412			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_PC_WRITE,
 413			[C(RESULT_MISS)]
 414					= ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 415		},
 416		[C(OP_WRITE)] = {
 417			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_PC_WRITE,
 418			[C(RESULT_MISS)]
 419					= ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 420		},
 421		[C(OP_PREFETCH)] = {
 422			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 423			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 424		},
 425	},
 426	[C(NODE)] = {
 427		[C(OP_READ)] = {
 428			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 429			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 430		},
 431		[C(OP_WRITE)] = {
 432			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 433			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 434		},
 435		[C(OP_PREFETCH)] = {
 436			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 437			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 438		},
 439	},
 440};
 441
 442/*
 443 * Cortex-A5 HW events mapping
 444 */
 445static const unsigned armv7_a5_perf_map[PERF_COUNT_HW_MAX] = {
 446	[PERF_COUNT_HW_CPU_CYCLES]	    = ARMV7_PERFCTR_CPU_CYCLES,
 447	[PERF_COUNT_HW_INSTRUCTIONS]	    = ARMV7_PERFCTR_INSTR_EXECUTED,
 448	[PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
 449	[PERF_COUNT_HW_CACHE_MISSES]	    = HW_OP_UNSUPPORTED,
 450	[PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
 451	[PERF_COUNT_HW_BRANCH_MISSES]	    = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 452	[PERF_COUNT_HW_BUS_CYCLES]	    = HW_OP_UNSUPPORTED,
 453};
 454
 455static const unsigned armv7_a5_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
 456					[PERF_COUNT_HW_CACHE_OP_MAX]
 457					[PERF_COUNT_HW_CACHE_RESULT_MAX] = {
 458	[C(L1D)] = {
 459		[C(OP_READ)] = {
 460			[C(RESULT_ACCESS)]
 461					= ARMV7_PERFCTR_DCACHE_ACCESS,
 462			[C(RESULT_MISS)]
 463					= ARMV7_PERFCTR_DCACHE_REFILL,
 464		},
 465		[C(OP_WRITE)] = {
 466			[C(RESULT_ACCESS)]
 467					= ARMV7_PERFCTR_DCACHE_ACCESS,
 468			[C(RESULT_MISS)]
 469					= ARMV7_PERFCTR_DCACHE_REFILL,
 470		},
 471		[C(OP_PREFETCH)] = {
 472			[C(RESULT_ACCESS)]
 473					= ARMV7_PERFCTR_PREFETCH_LINEFILL,
 474			[C(RESULT_MISS)]
 475					= ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP,
 476		},
 477	},
 478	[C(L1I)] = {
 479		[C(OP_READ)] = {
 480			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_L1_ICACHE_ACCESS,
 481			[C(RESULT_MISS)]	= ARMV7_PERFCTR_IFETCH_MISS,
 482		},
 483		[C(OP_WRITE)] = {
 484			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_L1_ICACHE_ACCESS,
 485			[C(RESULT_MISS)]	= ARMV7_PERFCTR_IFETCH_MISS,
 486		},
 487		/*
 488		 * The prefetch counters don't differentiate between the I
 489		 * side and the D side.
 490		 */
 491		[C(OP_PREFETCH)] = {
 492			[C(RESULT_ACCESS)]
 493					= ARMV7_PERFCTR_PREFETCH_LINEFILL,
 494			[C(RESULT_MISS)]
 495					= ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP,
 496		},
 497	},
 498	[C(LL)] = {
 499		[C(OP_READ)] = {
 500			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 501			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 502		},
 503		[C(OP_WRITE)] = {
 504			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 505			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 506		},
 507		[C(OP_PREFETCH)] = {
 508			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 509			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 510		},
 511	},
 512	[C(DTLB)] = {
 513		[C(OP_READ)] = {
 514			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 515			[C(RESULT_MISS)]	= ARMV7_PERFCTR_DTLB_REFILL,
 516		},
 517		[C(OP_WRITE)] = {
 518			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 519			[C(RESULT_MISS)]	= ARMV7_PERFCTR_DTLB_REFILL,
 520		},
 521		[C(OP_PREFETCH)] = {
 522			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 523			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 524		},
 525	},
 526	[C(ITLB)] = {
 527		[C(OP_READ)] = {
 528			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 529			[C(RESULT_MISS)]	= ARMV7_PERFCTR_ITLB_MISS,
 530		},
 531		[C(OP_WRITE)] = {
 532			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 533			[C(RESULT_MISS)]	= ARMV7_PERFCTR_ITLB_MISS,
 534		},
 535		[C(OP_PREFETCH)] = {
 536			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 537			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 538		},
 539	},
 540	[C(BPU)] = {
 541		[C(OP_READ)] = {
 542			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_PC_BRANCH_PRED,
 543			[C(RESULT_MISS)]
 544					= ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 545		},
 546		[C(OP_WRITE)] = {
 547			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_PC_BRANCH_PRED,
 548			[C(RESULT_MISS)]
 549					= ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 550		},
 551		[C(OP_PREFETCH)] = {
 552			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 553			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 554		},
 555	},
 556};
 557
 558/*
 559 * Cortex-A15 HW events mapping
 560 */
 561static const unsigned armv7_a15_perf_map[PERF_COUNT_HW_MAX] = {
 562	[PERF_COUNT_HW_CPU_CYCLES]	    = ARMV7_PERFCTR_CPU_CYCLES,
 563	[PERF_COUNT_HW_INSTRUCTIONS]	    = ARMV7_PERFCTR_INSTR_EXECUTED,
 564	[PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
 565	[PERF_COUNT_HW_CACHE_MISSES]	    = HW_OP_UNSUPPORTED,
 566	[PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_SPEC_PC_WRITE,
 567	[PERF_COUNT_HW_BRANCH_MISSES]	    = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 568	[PERF_COUNT_HW_BUS_CYCLES]	    = ARMV7_PERFCTR_BUS_CYCLES,
 569};
 570
 571static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
 572					[PERF_COUNT_HW_CACHE_OP_MAX]
 573					[PERF_COUNT_HW_CACHE_RESULT_MAX] = {
 574	[C(L1D)] = {
 575		[C(OP_READ)] = {
 576			[C(RESULT_ACCESS)]
 577					= ARMV7_PERFCTR_L1_DCACHE_READ_ACCESS,
 578			[C(RESULT_MISS)]
 579					= ARMV7_PERFCTR_L1_DCACHE_READ_REFILL,
 580		},
 581		[C(OP_WRITE)] = {
 582			[C(RESULT_ACCESS)]
 583					= ARMV7_PERFCTR_L1_DCACHE_WRITE_ACCESS,
 584			[C(RESULT_MISS)]
 585					= ARMV7_PERFCTR_L1_DCACHE_WRITE_REFILL,
 586		},
 587		[C(OP_PREFETCH)] = {
 588			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 589			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 590		},
 591	},
 592	[C(L1I)] = {
 593		/*
 594		 * Not all performance counters differentiate between read
 595		 * and write accesses/misses so we're not always strictly
 596		 * correct, but it's the best we can do. Writes and reads get
 597		 * combined in these cases.
 598		 */
 599		[C(OP_READ)] = {
 600			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_L1_ICACHE_ACCESS,
 601			[C(RESULT_MISS)]	= ARMV7_PERFCTR_IFETCH_MISS,
 602		},
 603		[C(OP_WRITE)] = {
 604			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_L1_ICACHE_ACCESS,
 605			[C(RESULT_MISS)]	= ARMV7_PERFCTR_IFETCH_MISS,
 606		},
 607		[C(OP_PREFETCH)] = {
 608			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 609			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 610		},
 611	},
 612	[C(LL)] = {
 613		[C(OP_READ)] = {
 614			[C(RESULT_ACCESS)]
 615					= ARMV7_PERFCTR_L2_DCACHE_READ_ACCESS,
 616			[C(RESULT_MISS)]
 617					= ARMV7_PERFCTR_L2_DCACHE_READ_REFILL,
 618		},
 619		[C(OP_WRITE)] = {
 620			[C(RESULT_ACCESS)]
 621					= ARMV7_PERFCTR_L2_DCACHE_WRITE_ACCESS,
 622			[C(RESULT_MISS)]
 623					= ARMV7_PERFCTR_L2_DCACHE_WRITE_REFILL,
 624		},
 625		[C(OP_PREFETCH)] = {
 626			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 627			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 628		},
 629	},
 630	[C(DTLB)] = {
 631		[C(OP_READ)] = {
 632			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 633			[C(RESULT_MISS)]
 634					= ARMV7_PERFCTR_L1_DTLB_READ_REFILL,
 635		},
 636		[C(OP_WRITE)] = {
 637			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 638			[C(RESULT_MISS)]
 639					= ARMV7_PERFCTR_L1_DTLB_WRITE_REFILL,
 640		},
 641		[C(OP_PREFETCH)] = {
 642			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 643			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 644		},
 645	},
 646	[C(ITLB)] = {
 647		[C(OP_READ)] = {
 648			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 649			[C(RESULT_MISS)]	= ARMV7_PERFCTR_ITLB_MISS,
 650		},
 651		[C(OP_WRITE)] = {
 652			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 653			[C(RESULT_MISS)]	= ARMV7_PERFCTR_ITLB_MISS,
 654		},
 655		[C(OP_PREFETCH)] = {
 656			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 657			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 658		},
 659	},
 660	[C(BPU)] = {
 661		[C(OP_READ)] = {
 662			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_PC_BRANCH_PRED,
 663			[C(RESULT_MISS)]
 664					= ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 665		},
 666		[C(OP_WRITE)] = {
 667			[C(RESULT_ACCESS)]	= ARMV7_PERFCTR_PC_BRANCH_PRED,
 668			[C(RESULT_MISS)]
 669					= ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 670		},
 671		[C(OP_PREFETCH)] = {
 672			[C(RESULT_ACCESS)]	= CACHE_OP_UNSUPPORTED,
 673			[C(RESULT_MISS)]	= CACHE_OP_UNSUPPORTED,
 674		},
 675	},
 676};
 677
 678/*
 679 * Perf Events counters
 680 */
 681enum armv7_counters {
 682	ARMV7_CYCLE_COUNTER		= 1,	/* Cycle counter */
 683	ARMV7_COUNTER0			= 2,	/* First event counter */
 684};
 685
 686/*
 687 * The cycle counter is ARMV7_CYCLE_COUNTER.
 688 * The first event counter is ARMV7_COUNTER0.
 689 * The last event counter is (ARMV7_COUNTER0 + armpmu->num_events - 1).
 690 */
 691#define	ARMV7_COUNTER_LAST	(ARMV7_COUNTER0 + armpmu->num_events - 1)
 692
 693/*
 694 * ARMv7 low level PMNC access
 695 */
 696
 697/*
 698 * Per-CPU PMNC: config reg
 699 */
 700#define ARMV7_PMNC_E		(1 << 0) /* Enable all counters */
 701#define ARMV7_PMNC_P		(1 << 1) /* Reset all counters */
 702#define ARMV7_PMNC_C		(1 << 2) /* Cycle counter reset */
 703#define ARMV7_PMNC_D		(1 << 3) /* CCNT counts every 64th cpu cycle */
 704#define ARMV7_PMNC_X		(1 << 4) /* Export to ETM */
 705#define ARMV7_PMNC_DP		(1 << 5) /* Disable CCNT if non-invasive debug*/
 706#define	ARMV7_PMNC_N_SHIFT	11	 /* Number of counters supported */
 707#define	ARMV7_PMNC_N_MASK	0x1f
 708#define	ARMV7_PMNC_MASK		0x3f	 /* Mask for writable bits */
 709
 710/*
 711 * Available counters
 712 */
 713#define ARMV7_CNT0		0	/* First event counter */
 714#define ARMV7_CCNT		31	/* Cycle counter */
 715
 716/* Perf Event to low level counters mapping */
 717#define ARMV7_EVENT_CNT_TO_CNTx	(ARMV7_COUNTER0 - ARMV7_CNT0)
 718
 719/*
 720 * CNTENS: counters enable reg
 721 */
 722#define ARMV7_CNTENS_P(idx)	(1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
 723#define ARMV7_CNTENS_C		(1 << ARMV7_CCNT)
 724
 725/*
 726 * CNTENC: counters disable reg
 727 */
 728#define ARMV7_CNTENC_P(idx)	(1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
 729#define ARMV7_CNTENC_C		(1 << ARMV7_CCNT)
 730
 731/*
 732 * INTENS: counters overflow interrupt enable reg
 733 */
 734#define ARMV7_INTENS_P(idx)	(1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
 735#define ARMV7_INTENS_C		(1 << ARMV7_CCNT)
 736
 737/*
 738 * INTENC: counters overflow interrupt disable reg
 739 */
 740#define ARMV7_INTENC_P(idx)	(1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
 741#define ARMV7_INTENC_C		(1 << ARMV7_CCNT)
 742
 743/*
 744 * EVTSEL: Event selection reg
 745 */
 746#define	ARMV7_EVTSEL_MASK	0xff		/* Mask for writable bits */
 747
 748/*
 749 * SELECT: Counter selection reg
 750 */
 751#define	ARMV7_SELECT_MASK	0x1f		/* Mask for writable bits */
 752
 753/*
 754 * FLAG: counters overflow flag status reg
 755 */
 756#define ARMV7_FLAG_P(idx)	(1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
 757#define ARMV7_FLAG_C		(1 << ARMV7_CCNT)
 758#define	ARMV7_FLAG_MASK		0xffffffff	/* Mask for writable bits */
 759#define	ARMV7_OVERFLOWED_MASK	ARMV7_FLAG_MASK
 760
 761static inline unsigned long armv7_pmnc_read(void)
 762{
 763	u32 val;
 764	asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r"(val));
 765	return val;
 766}
 767
 768static inline void armv7_pmnc_write(unsigned long val)
 769{
 770	val &= ARMV7_PMNC_MASK;
 771	isb();
 772	asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r"(val));
 773}
 774
 775static inline int armv7_pmnc_has_overflowed(unsigned long pmnc)
 776{
 777	return pmnc & ARMV7_OVERFLOWED_MASK;
 778}
 779
 780static inline int armv7_pmnc_counter_has_overflowed(unsigned long pmnc,
 781					enum armv7_counters counter)
 782{
 783	int ret = 0;
 784
 785	if (counter == ARMV7_CYCLE_COUNTER)
 786		ret = pmnc & ARMV7_FLAG_C;
 787	else if ((counter >= ARMV7_COUNTER0) && (counter <= ARMV7_COUNTER_LAST))
 788		ret = pmnc & ARMV7_FLAG_P(counter);
 789	else
 790		pr_err("CPU%u checking wrong counter %d overflow status\n",
 791			smp_processor_id(), counter);
 792
 793	return ret;
 794}
 795
 796static inline int armv7_pmnc_select_counter(unsigned int idx)
 797{
 798	u32 val;
 799
 800	if ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST)) {
 801		pr_err("CPU%u selecting wrong PMNC counter"
 802			" %d\n", smp_processor_id(), idx);
 803		return -1;
 804	}
 805
 806	val = (idx - ARMV7_EVENT_CNT_TO_CNTx) & ARMV7_SELECT_MASK;
 807	asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (val));
 808	isb();
 809
 810	return idx;
 811}
 812
 813static inline u32 armv7pmu_read_counter(int idx)
 814{
 815	unsigned long value = 0;
 816
 817	if (idx == ARMV7_CYCLE_COUNTER)
 818		asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
 819	else if ((idx >= ARMV7_COUNTER0) && (idx <= ARMV7_COUNTER_LAST)) {
 820		if (armv7_pmnc_select_counter(idx) == idx)
 821			asm volatile("mrc p15, 0, %0, c9, c13, 2"
 822				     : "=r" (value));
 823	} else
 824		pr_err("CPU%u reading wrong counter %d\n",
 825			smp_processor_id(), idx);
 826
 827	return value;
 828}
 829
 830static inline void armv7pmu_write_counter(int idx, u32 value)
 831{
 832	if (idx == ARMV7_CYCLE_COUNTER)
 833		asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
 834	else if ((idx >= ARMV7_COUNTER0) && (idx <= ARMV7_COUNTER_LAST)) {
 835		if (armv7_pmnc_select_counter(idx) == idx)
 836			asm volatile("mcr p15, 0, %0, c9, c13, 2"
 837				     : : "r" (value));
 838	} else
 839		pr_err("CPU%u writing wrong counter %d\n",
 840			smp_processor_id(), idx);
 841}
 842
 843static inline void armv7_pmnc_write_evtsel(unsigned int idx, u32 val)
 844{
 845	if (armv7_pmnc_select_counter(idx) == idx) {
 846		val &= ARMV7_EVTSEL_MASK;
 847		asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val));
 848	}
 849}
 850
 851static inline u32 armv7_pmnc_enable_counter(unsigned int idx)
 852{
 853	u32 val;
 854
 855	if ((idx != ARMV7_CYCLE_COUNTER) &&
 856	    ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
 857		pr_err("CPU%u enabling wrong PMNC counter"
 858			" %d\n", smp_processor_id(), idx);
 859		return -1;
 860	}
 861
 862	if (idx == ARMV7_CYCLE_COUNTER)
 863		val = ARMV7_CNTENS_C;
 864	else
 865		val = ARMV7_CNTENS_P(idx);
 866
 867	asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (val));
 868
 869	return idx;
 870}
 871
 872static inline u32 armv7_pmnc_disable_counter(unsigned int idx)
 873{
 874	u32 val;
 875
 876
 877	if ((idx != ARMV7_CYCLE_COUNTER) &&
 878	    ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
 879		pr_err("CPU%u disabling wrong PMNC counter"
 880			" %d\n", smp_processor_id(), idx);
 881		return -1;
 882	}
 883
 884	if (idx == ARMV7_CYCLE_COUNTER)
 885		val = ARMV7_CNTENC_C;
 886	else
 887		val = ARMV7_CNTENC_P(idx);
 888
 889	asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (val));
 890
 891	return idx;
 892}
 893
 894static inline u32 armv7_pmnc_enable_intens(unsigned int idx)
 895{
 896	u32 val;
 897
 898	if ((idx != ARMV7_CYCLE_COUNTER) &&
 899	    ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
 900		pr_err("CPU%u enabling wrong PMNC counter"
 901			" interrupt enable %d\n", smp_processor_id(), idx);
 902		return -1;
 903	}
 904
 905	if (idx == ARMV7_CYCLE_COUNTER)
 906		val = ARMV7_INTENS_C;
 907	else
 908		val = ARMV7_INTENS_P(idx);
 909
 910	asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (val));
 911
 912	return idx;
 913}
 914
 915static inline u32 armv7_pmnc_disable_intens(unsigned int idx)
 916{
 917	u32 val;
 918
 919	if ((idx != ARMV7_CYCLE_COUNTER) &&
 920	    ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
 921		pr_err("CPU%u disabling wrong PMNC counter"
 922			" interrupt enable %d\n", smp_processor_id(), idx);
 923		return -1;
 924	}
 925
 926	if (idx == ARMV7_CYCLE_COUNTER)
 927		val = ARMV7_INTENC_C;
 928	else
 929		val = ARMV7_INTENC_P(idx);
 930
 931	asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (val));
 932
 933	return idx;
 934}
 935
 936static inline u32 armv7_pmnc_getreset_flags(void)
 937{
 938	u32 val;
 939
 940	/* Read */
 941	asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
 942
 943	/* Write to clear flags */
 944	val &= ARMV7_FLAG_MASK;
 945	asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val));
 946
 947	return val;
 948}
 949
 950#ifdef DEBUG
 951static void armv7_pmnc_dump_regs(void)
 952{
 953	u32 val;
 954	unsigned int cnt;
 955
 956	printk(KERN_INFO "PMNC registers dump:\n");
 957
 958	asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val));
 959	printk(KERN_INFO "PMNC  =0x%08x\n", val);
 960
 961	asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (val));
 962	printk(KERN_INFO "CNTENS=0x%08x\n", val);
 963
 964	asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (val));
 965	printk(KERN_INFO "INTENS=0x%08x\n", val);
 966
 967	asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
 968	printk(KERN_INFO "FLAGS =0x%08x\n", val);
 969
 970	asm volatile("mrc p15, 0, %0, c9, c12, 5" : "=r" (val));
 971	printk(KERN_INFO "SELECT=0x%08x\n", val);
 972
 973	asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
 974	printk(KERN_INFO "CCNT  =0x%08x\n", val);
 975
 976	for (cnt = ARMV7_COUNTER0; cnt < ARMV7_COUNTER_LAST; cnt++) {
 977		armv7_pmnc_select_counter(cnt);
 978		asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
 979		printk(KERN_INFO "CNT[%d] count =0x%08x\n",
 980			cnt-ARMV7_EVENT_CNT_TO_CNTx, val);
 981		asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
 982		printk(KERN_INFO "CNT[%d] evtsel=0x%08x\n",
 983			cnt-ARMV7_EVENT_CNT_TO_CNTx, val);
 984	}
 985}
 986#endif
 987
 988static void armv7pmu_enable_event(struct hw_perf_event *hwc, int idx)
 989{
 990	unsigned long flags;
 991
 992	/*
 993	 * Enable counter and interrupt, and set the counter to count
 994	 * the event that we're interested in.
 995	 */
 996	raw_spin_lock_irqsave(&pmu_lock, flags);
 997
 998	/*
 999	 * Disable counter
1000	 */
1001	armv7_pmnc_disable_counter(idx);
1002
1003	/*
1004	 * Set event (if destined for PMNx counters)
1005	 * We don't need to set the event if it's a cycle count
1006	 */
1007	if (idx != ARMV7_CYCLE_COUNTER)
1008		armv7_pmnc_write_evtsel(idx, hwc->config_base);
1009
1010	/*
1011	 * Enable interrupt for this counter
1012	 */
1013	armv7_pmnc_enable_intens(idx);
1014
1015	/*
1016	 * Enable counter
1017	 */
1018	armv7_pmnc_enable_counter(idx);
1019
1020	raw_spin_unlock_irqrestore(&pmu_lock, flags);
1021}
1022
1023static void armv7pmu_disable_event(struct hw_perf_event *hwc, int idx)
1024{
1025	unsigned long flags;
1026
1027	/*
1028	 * Disable counter and interrupt
1029	 */
1030	raw_spin_lock_irqsave(&pmu_lock, flags);
1031
1032	/*
1033	 * Disable counter
1034	 */
1035	armv7_pmnc_disable_counter(idx);
1036
1037	/*
1038	 * Disable interrupt for this counter
1039	 */
1040	armv7_pmnc_disable_intens(idx);
1041
1042	raw_spin_unlock_irqrestore(&pmu_lock, flags);
1043}
1044
1045static irqreturn_t armv7pmu_handle_irq(int irq_num, void *dev)
1046{
1047	unsigned long pmnc;
1048	struct perf_sample_data data;
1049	struct cpu_hw_events *cpuc;
1050	struct pt_regs *regs;
1051	int idx;
1052
1053	/*
1054	 * Get and reset the IRQ flags
1055	 */
1056	pmnc = armv7_pmnc_getreset_flags();
1057
1058	/*
1059	 * Did an overflow occur?
1060	 */
1061	if (!armv7_pmnc_has_overflowed(pmnc))
1062		return IRQ_NONE;
1063
1064	/*
1065	 * Handle the counter(s) overflow(s)
1066	 */
1067	regs = get_irq_regs();
1068
1069	perf_sample_data_init(&data, 0);
1070
1071	cpuc = &__get_cpu_var(cpu_hw_events);
1072	for (idx = 0; idx <= armpmu->num_events; ++idx) {
1073		struct perf_event *event = cpuc->events[idx];
1074		struct hw_perf_event *hwc;
1075
1076		if (!test_bit(idx, cpuc->active_mask))
1077			continue;
1078
1079		/*
1080		 * We have a single interrupt for all counters. Check that
1081		 * each counter has overflowed before we process it.
1082		 */
1083		if (!armv7_pmnc_counter_has_overflowed(pmnc, idx))
1084			continue;
1085
1086		hwc = &event->hw;
1087		armpmu_event_update(event, hwc, idx, 1);
1088		data.period = event->hw.last_period;
1089		if (!armpmu_event_set_period(event, hwc, idx))
1090			continue;
1091
1092		if (perf_event_overflow(event, &data, regs))
1093			armpmu->disable(hwc, idx);
1094	}
1095
1096	/*
1097	 * Handle the pending perf events.
1098	 *
1099	 * Note: this call *must* be run with interrupts disabled. For
1100	 * platforms that can have the PMU interrupts raised as an NMI, this
1101	 * will not work.
1102	 */
1103	irq_work_run();
1104
1105	return IRQ_HANDLED;
1106}
1107
1108static void armv7pmu_start(void)
1109{
1110	unsigned long flags;
1111
1112	raw_spin_lock_irqsave(&pmu_lock, flags);
1113	/* Enable all counters */
1114	armv7_pmnc_write(armv7_pmnc_read() | ARMV7_PMNC_E);
1115	raw_spin_unlock_irqrestore(&pmu_lock, flags);
1116}
1117
1118static void armv7pmu_stop(void)
1119{
1120	unsigned long flags;
1121
1122	raw_spin_lock_irqsave(&pmu_lock, flags);
1123	/* Disable all counters */
1124	armv7_pmnc_write(armv7_pmnc_read() & ~ARMV7_PMNC_E);
1125	raw_spin_unlock_irqrestore(&pmu_lock, flags);
1126}
1127
1128static int armv7pmu_get_event_idx(struct cpu_hw_events *cpuc,
1129				  struct hw_perf_event *event)
1130{
1131	int idx;
1132
1133	/* Always place a cycle counter into the cycle counter. */
1134	if (event->config_base == ARMV7_PERFCTR_CPU_CYCLES) {
1135		if (test_and_set_bit(ARMV7_CYCLE_COUNTER, cpuc->used_mask))
1136			return -EAGAIN;
1137
1138		return ARMV7_CYCLE_COUNTER;
1139	} else {
1140		/*
1141		 * For anything other than a cycle counter, try and use
1142		 * the events counters
1143		 */
1144		for (idx = ARMV7_COUNTER0; idx <= armpmu->num_events; ++idx) {
1145			if (!test_and_set_bit(idx, cpuc->used_mask))
1146				return idx;
1147		}
1148
1149		/* The counters are all in use. */
1150		return -EAGAIN;
1151	}
1152}
1153
1154static void armv7pmu_reset(void *info)
1155{
1156	u32 idx, nb_cnt = armpmu->num_events;
1157
1158	/* The counter and interrupt enable registers are unknown at reset. */
1159	for (idx = 1; idx < nb_cnt; ++idx)
1160		armv7pmu_disable_event(NULL, idx);
1161
1162	/* Initialize & Reset PMNC: C and P bits */
1163	armv7_pmnc_write(ARMV7_PMNC_P | ARMV7_PMNC_C);
1164}
1165
1166static struct arm_pmu armv7pmu = {
1167	.handle_irq		= armv7pmu_handle_irq,
1168	.enable			= armv7pmu_enable_event,
1169	.disable		= armv7pmu_disable_event,
1170	.read_counter		= armv7pmu_read_counter,
1171	.write_counter		= armv7pmu_write_counter,
1172	.get_event_idx		= armv7pmu_get_event_idx,
1173	.start			= armv7pmu_start,
1174	.stop			= armv7pmu_stop,
1175	.reset			= armv7pmu_reset,
1176	.raw_event_mask		= 0xFF,
1177	.max_period		= (1LLU << 32) - 1,
1178};
1179
1180static u32 __init armv7_read_num_pmnc_events(void)
1181{
1182	u32 nb_cnt;
1183
1184	/* Read the nb of CNTx counters supported from PMNC */
1185	nb_cnt = (armv7_pmnc_read() >> ARMV7_PMNC_N_SHIFT) & ARMV7_PMNC_N_MASK;
1186
1187	/* Add the CPU cycles counter and return */
1188	return nb_cnt + 1;
1189}
1190
1191static const struct arm_pmu *__init armv7_a8_pmu_init(void)
1192{
1193	armv7pmu.id		= ARM_PERF_PMU_ID_CA8;
1194	armv7pmu.name		= "ARMv7 Cortex-A8";
1195	armv7pmu.cache_map	= &armv7_a8_perf_cache_map;
1196	armv7pmu.event_map	= &armv7_a8_perf_map;
1197	armv7pmu.num_events	= armv7_read_num_pmnc_events();
1198	return &armv7pmu;
1199}
1200
1201static const struct arm_pmu *__init armv7_a9_pmu_init(void)
1202{
1203	armv7pmu.id		= ARM_PERF_PMU_ID_CA9;
1204	armv7pmu.name		= "ARMv7 Cortex-A9";
1205	armv7pmu.cache_map	= &armv7_a9_perf_cache_map;
1206	armv7pmu.event_map	= &armv7_a9_perf_map;
1207	armv7pmu.num_events	= armv7_read_num_pmnc_events();
1208	return &armv7pmu;
1209}
1210
1211static const struct arm_pmu *__init armv7_a5_pmu_init(void)
1212{
1213	armv7pmu.id		= ARM_PERF_PMU_ID_CA5;
1214	armv7pmu.name		= "ARMv7 Cortex-A5";
1215	armv7pmu.cache_map	= &armv7_a5_perf_cache_map;
1216	armv7pmu.event_map	= &armv7_a5_perf_map;
1217	armv7pmu.num_events	= armv7_read_num_pmnc_events();
1218	return &armv7pmu;
1219}
1220
1221static const struct arm_pmu *__init armv7_a15_pmu_init(void)
1222{
1223	armv7pmu.id		= ARM_PERF_PMU_ID_CA15;
1224	armv7pmu.name		= "ARMv7 Cortex-A15";
1225	armv7pmu.cache_map	= &armv7_a15_perf_cache_map;
1226	armv7pmu.event_map	= &armv7_a15_perf_map;
1227	armv7pmu.num_events	= armv7_read_num_pmnc_events();
1228	return &armv7pmu;
1229}
1230#else
1231static const struct arm_pmu *__init armv7_a8_pmu_init(void)
1232{
1233	return NULL;
1234}
1235
1236static const struct arm_pmu *__init armv7_a9_pmu_init(void)
1237{
1238	return NULL;
1239}
1240
1241static const struct arm_pmu *__init armv7_a5_pmu_init(void)
1242{
1243	return NULL;
1244}
1245
1246static const struct arm_pmu *__init armv7_a15_pmu_init(void)
1247{
1248	return NULL;
1249}
1250#endif	/* CONFIG_CPU_V7 */