Linux Audio

Check our new training course

Loading...
v6.2
  1/*
  2 * arch/xtensa/kernel/align.S
  3 *
  4 * Handle unalignment exceptions in kernel space.
  5 *
  6 * This file is subject to the terms and conditions of the GNU General
  7 * Public License.  See the file "COPYING" in the main directory of
  8 * this archive for more details.
  9 *
 10 * Copyright (C) 2001 - 2005 Tensilica, Inc.
 11 * Copyright (C) 2014 Cadence Design Systems Inc.
 12 *
 13 * Rewritten by Chris Zankel <chris@zankel.net>
 14 *
 15 * Based on work from Joe Taylor <joe@tensilica.com, joetylr@yahoo.com>
 16 * and Marc Gauthier <marc@tensilica.com, marc@alimni.uwaterloo.ca>
 17 */
 18
 19#include <linux/linkage.h>
 20#include <asm/current.h>
 21#include <asm/asm-offsets.h>
 22#include <asm/asmmacro.h>
 23#include <asm/processor.h>
 24
 25#if XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION
 26
 27/*  First-level exception handler for unaligned exceptions.
 28 *
 29 *  Note: This handler works only for kernel exceptions.  Unaligned user
 30 *        access should get a seg fault.
 31 */
 32
 33/* Big and little endian 16-bit values are located in
 34 * different halves of a register.  HWORD_START helps to
 35 * abstract the notion of extracting a 16-bit value from a
 36 * register.
 37 * We also have to define new shifting instructions because
 38 * lsb and msb are on 'opposite' ends in a register for
 39 * different endian machines.
 40 *
 41 * Assume a memory region in ascending address:
 42 *   	0 1 2 3|4 5 6 7
 43 *
 44 * When loading one word into a register, the content of that register is:
 45 *  LE	3 2 1 0, 7 6 5 4
 46 *  BE  0 1 2 3, 4 5 6 7
 47 *
 48 * Masking the bits of the higher/lower address means:
 49 *  LE  X X 0 0, 0 0 X X
 50 *  BE	0 0 X X, X X 0 0
 51 *
 52 * Shifting to higher/lower addresses, means:
 53 *  LE  shift left / shift right
 54 *  BE  shift right / shift left
 55 *
 56 * Extracting 16 bits from a 32 bit reg. value to higher/lower address means:
 57 *  LE  mask 0 0 X X / shift left
 58 *  BE  shift left / mask 0 0 X X
 59 */
 60
 61#if XCHAL_HAVE_WINDOWED
 62#define UNALIGNED_USER_EXCEPTION
 63#endif
 64
 65#if XCHAL_HAVE_BE
 66
 67#define HWORD_START	16
 68#define	INSN_OP0	28
 69#define	INSN_T		24
 70#define	INSN_OP1	16
 71
 
 
 72.macro __ssa8r	r;		ssa8l	\r;		.endm
 73.macro __sh	r, s;		srl	\r, \s;		.endm
 74.macro __sl	r, s;		sll	\r, \s;		.endm
 75.macro __exth	r, s;		extui	\r, \s, 0, 16;	.endm
 76.macro __extl	r, s;		slli	\r, \s, 16;	.endm
 77
 78#else
 79
 80#define HWORD_START	0
 81#define	INSN_OP0	0
 82#define	INSN_T		4
 83#define	INSN_OP1	12
 84
 
 
 85.macro __ssa8r	r;		ssa8b	\r;		.endm
 86.macro __sh	r, s;		sll	\r, \s;		.endm
 87.macro __sl	r, s;		srl	\r, \s;		.endm
 88.macro __exth	r, s;		slli	\r, \s, 16;	.endm
 89.macro __extl	r, s;		extui	\r, \s, 0, 16;	.endm
 90
 91#endif
 92
 93/*
 94 *	xxxx xxxx = imm8 field
 95 *	     yyyy = imm4 field
 96 *	     ssss = s field
 97 *	     tttt = t field
 98 *
 99 *	       		 16		    0
100 *		          -------------------
101 *	L32I.N		  yyyy ssss tttt 1000
102 *	S32I.N	          yyyy ssss tttt 1001
103 *
104 *	       23			    0
105 *		-----------------------------
106 *	res	          0000           0010
107 *	L16UI	xxxx xxxx 0001 ssss tttt 0010
108 *	L32I	xxxx xxxx 0010 ssss tttt 0010
109 *	XXX	          0011 ssss tttt 0010
110 *	XXX	          0100 ssss tttt 0010
111 *	S16I	xxxx xxxx 0101 ssss tttt 0010
112 *	S32I	xxxx xxxx 0110 ssss tttt 0010
113 *	XXX	          0111 ssss tttt 0010
114 *	XXX	          1000 ssss tttt 0010
115 *	L16SI	xxxx xxxx 1001 ssss tttt 0010
116 *	XXX	          1010           0010
117 *      **L32AI	xxxx xxxx 1011 ssss tttt 0010 unsupported
118 *	XXX	          1100           0010
119 *	XXX	          1101           0010
120 *	XXX	          1110           0010
121 *	**S32RI	xxxx xxxx 1111 ssss tttt 0010 unsupported
122 *		-----------------------------
123 *                           ^         ^    ^
124 *    sub-opcode (NIBBLE_R) -+         |    |
125 *       t field (NIBBLE_T) -----------+    |
126 *  major opcode (NIBBLE_OP0) --------------+
127 */
128
129#define OP0_L32I_N	0x8		/* load immediate narrow */
130#define OP0_S32I_N	0x9		/* store immediate narrow */
131#define OP1_SI_MASK	0x4		/* OP1 bit set for stores */
132#define OP1_SI_BIT	2		/* OP1 bit number for stores */
133
134#define OP1_L32I	0x2
135#define OP1_L16UI	0x1
136#define OP1_L16SI	0x9
137#define OP1_L32AI	0xb
138
139#define OP1_S32I	0x6
140#define OP1_S16I	0x5
141#define OP1_S32RI	0xf
142
143/*
144 * Entry condition:
145 *
146 *   a0:	trashed, original value saved on stack (PT_AREG0)
147 *   a1:	a1
148 *   a2:	new stack pointer, original in DEPC
149 *   a3:	a3
150 *   depc:	a2, original value saved on stack (PT_DEPC)
151 *   excsave_1:	dispatch table
152 *
153 *   PT_DEPC >= VALID_DOUBLE_EXCEPTION_ADDRESS: double exception, DEPC
154 *	     <  VALID_DOUBLE_EXCEPTION_ADDRESS: regular exception
155 */
156
157	.literal_position
158ENTRY(fast_unaligned)
159
160	/* Note: We don't expect the address to be aligned on a word
161	 *       boundary. After all, the processor generated that exception
162	 *       and it would be a hardware fault.
163	 */
164
165	/* Save some working register */
166
167	s32i	a4, a2, PT_AREG4
168	s32i	a5, a2, PT_AREG5
169	s32i	a6, a2, PT_AREG6
170	s32i	a7, a2, PT_AREG7
171	s32i	a8, a2, PT_AREG8
172
173	rsr	a0, depc
174	s32i	a0, a2, PT_AREG2
175	s32i	a3, a2, PT_AREG3
176
177	rsr	a3, excsave1
178	movi	a4, fast_unaligned_fixup
179	s32i	a4, a3, EXC_TABLE_FIXUP
180
181	/* Keep value of SAR in a0 */
182
183	rsr	a0, sar
184	rsr	a8, excvaddr		# load unaligned memory address
185
186	/* Now, identify one of the following load/store instructions.
187	 *
188	 * The only possible danger of a double exception on the
189	 * following l32i instructions is kernel code in vmalloc
190	 * memory. The processor was just executing at the EPC_1
191	 * address, and indeed, already fetched the instruction.  That
192	 * guarantees a TLB mapping, which hasn't been replaced by
193	 * this unaligned exception handler that uses only static TLB
194	 * mappings. However, high-level interrupt handlers might
195	 * modify TLB entries, so for the generic case, we register a
196	 * TABLE_FIXUP handler here, too.
197	 */
198
199	/* a3...a6 saved on stack, a2 = SP */
200
201	/* Extract the instruction that caused the unaligned access. */
202
203	rsr	a7, epc1	# load exception address
204	movi	a3, ~3
205	and	a3, a3, a7	# mask lower bits
206
207	l32i	a4, a3, 0	# load 2 words
208	l32i	a5, a3, 4
209
210	__ssa8	a7
211	__src_b	a4, a4, a5	# a4 has the instruction
212
213	/* Analyze the instruction (load or store?). */
214
215	extui	a5, a4, INSN_OP0, 4	# get insn.op0 nibble
216
217#if XCHAL_HAVE_DENSITY
218	_beqi	a5, OP0_L32I_N, .Lload	# L32I.N, jump
219	addi	a6, a5, -OP0_S32I_N
220	_beqz	a6, .Lstore		# S32I.N, do a store
221#endif
222	/* 'store indicator bit' not set, jump */
223	_bbci.l	a4, OP1_SI_BIT + INSN_OP1, .Lload
224
225	/* Store: Jump to table entry to get the value in the source register.*/
226
227.Lstore:movi	a5, .Lstore_table	# table
228	extui	a6, a4, INSN_T, 4	# get source register
229	addx8	a5, a6, a5
230	jx	a5			# jump into table
231
232	/* Load: Load memory address. */
233
234.Lload: movi	a3, ~3
235	and	a3, a3, a8		# align memory address
236
237	__ssa8	a8
238#ifdef UNALIGNED_USER_EXCEPTION
239	addi	a3, a3, 8
240	l32e	a5, a3, -8
241	l32e	a6, a3, -4
242#else
243	l32i	a5, a3, 0
244	l32i	a6, a3, 4
245#endif
246	__src_b	a3, a5, a6		# a3 has the data word
247
248#if XCHAL_HAVE_DENSITY
249	addi	a7, a7, 2		# increment PC (assume 16-bit insn)
250
251	extui	a5, a4, INSN_OP0, 4
252	_beqi	a5, OP0_L32I_N, 1f	# l32i.n: jump
253
254	addi	a7, a7, 1
255#else
256	addi	a7, a7, 3
257#endif
258
259	extui	a5, a4, INSN_OP1, 4
260	_beqi	a5, OP1_L32I, 1f	# l32i: jump
261
262	extui	a3, a3, 0, 16		# extract lower 16 bits
263	_beqi	a5, OP1_L16UI, 1f
264	addi	a5, a5, -OP1_L16SI
265	_bnez	a5, .Linvalid_instruction_load
266
267	/* sign extend value */
268
269	slli	a3, a3, 16
270	srai	a3, a3, 16
271
272	/* Set target register. */
273
2741:
275	extui	a4, a4, INSN_T, 4	# extract target register
276	movi	a5, .Lload_table
277	addx8	a4, a4, a5
278	jx	a4			# jump to entry for target register
279
280	.align	8
281.Lload_table:
282	s32i	a3, a2, PT_AREG0;	_j .Lexit;	.align 8
283	mov	a1, a3;			_j .Lexit;	.align 8 # fishy??
284	s32i	a3, a2, PT_AREG2;	_j .Lexit;	.align 8
285	s32i	a3, a2, PT_AREG3;	_j .Lexit;	.align 8
286	s32i	a3, a2, PT_AREG4;	_j .Lexit;	.align 8
287	s32i	a3, a2, PT_AREG5;	_j .Lexit;	.align 8
288	s32i	a3, a2, PT_AREG6;	_j .Lexit;	.align 8
289	s32i	a3, a2, PT_AREG7;	_j .Lexit;	.align 8
290	s32i	a3, a2, PT_AREG8;	_j .Lexit;	.align 8
291	mov	a9, a3		;	_j .Lexit;	.align 8
292	mov	a10, a3		;	_j .Lexit;	.align 8
293	mov	a11, a3		;	_j .Lexit;	.align 8
294	mov	a12, a3		;	_j .Lexit;	.align 8
295	mov	a13, a3		;	_j .Lexit;	.align 8
296	mov	a14, a3		;	_j .Lexit;	.align 8
297	mov	a15, a3		;	_j .Lexit;	.align 8
298
299.Lstore_table:
300	l32i	a3, a2, PT_AREG0;	_j 1f;	.align 8
301	mov	a3, a1;			_j 1f;	.align 8	# fishy??
302	l32i	a3, a2, PT_AREG2;	_j 1f;	.align 8
303	l32i	a3, a2, PT_AREG3;	_j 1f;	.align 8
304	l32i	a3, a2, PT_AREG4;	_j 1f;	.align 8
305	l32i	a3, a2, PT_AREG5;	_j 1f;	.align 8
306	l32i	a3, a2, PT_AREG6;	_j 1f;	.align 8
307	l32i	a3, a2, PT_AREG7;	_j 1f;	.align 8
308	l32i	a3, a2, PT_AREG8;	_j 1f;	.align 8
309	mov	a3, a9		;	_j 1f;	.align 8
310	mov	a3, a10		;	_j 1f;	.align 8
311	mov	a3, a11		;	_j 1f;	.align 8
312	mov	a3, a12		;	_j 1f;	.align 8
313	mov	a3, a13		;	_j 1f;	.align 8
314	mov	a3, a14		;	_j 1f;	.align 8
315	mov	a3, a15		;	_j 1f;	.align 8
316
317	/* We cannot handle this exception. */
318
319	.extern _kernel_exception
320.Linvalid_instruction_load:
321.Linvalid_instruction_store:
322
323	movi	a4, 0
324	rsr	a3, excsave1
325	s32i	a4, a3, EXC_TABLE_FIXUP
326
327	/* Restore a4...a8 and SAR, set SP, and jump to default exception. */
328
329	l32i	a8, a2, PT_AREG8
330	l32i	a7, a2, PT_AREG7
331	l32i	a6, a2, PT_AREG6
332	l32i	a5, a2, PT_AREG5
333	l32i	a4, a2, PT_AREG4
334	wsr	a0, sar
335	mov	a1, a2
336
337	rsr	a0, ps
338	bbsi.l  a0, PS_UM_BIT, 2f     # jump if user mode
339
340	movi	a0, _kernel_exception
341	jx	a0
342
3432:	movi	a0, _user_exception
344	jx	a0
345
3461: 	# a7: instruction pointer, a4: instruction, a3: value
347
348	movi	a6, 0			# mask: ffffffff:00000000
349
350#if XCHAL_HAVE_DENSITY
351	addi	a7, a7, 2		# incr. PC,assume 16-bit instruction
352
353	extui	a5, a4, INSN_OP0, 4	# extract OP0
354	addi	a5, a5, -OP0_S32I_N
355	_beqz	a5, 1f			# s32i.n: jump
356
357	addi	a7, a7, 1		# increment PC, 32-bit instruction
358#else
359	addi	a7, a7, 3		# increment PC, 32-bit instruction
360#endif
361
362	extui	a5, a4, INSN_OP1, 4	# extract OP1
363	_beqi	a5, OP1_S32I, 1f	# jump if 32 bit store
364	_bnei	a5, OP1_S16I, .Linvalid_instruction_store
365
366	movi	a5, -1
367	__extl	a3, a3			# get 16-bit value
368	__exth	a6, a5			# get 16-bit mask ffffffff:ffff0000
369
370	/* Get memory address */
371
3721:
373	movi	a4, ~3
374	and	a4, a4, a8		# align memory address
375
376	/* Insert value into memory */
377
378	movi	a5, -1			# mask: ffffffff:XXXX0000
379#ifdef UNALIGNED_USER_EXCEPTION
380	addi	a4, a4, 8
381#endif
382
383	__ssa8r a8
384	__src_b	a8, a5, a6		# lo-mask  F..F0..0 (BE) 0..0F..F (LE)
385	__src_b	a6, a6, a5		# hi-mask  0..0F..F (BE) F..F0..0 (LE)
386#ifdef UNALIGNED_USER_EXCEPTION
387	l32e	a5, a4, -8
388#else
389	l32i	a5, a4, 0		# load lower address word
390#endif
391	and	a5, a5, a8		# mask
392	__sh	a8, a3 			# shift value
393	or	a5, a5, a8		# or with original value
394#ifdef UNALIGNED_USER_EXCEPTION
395	s32e	a5, a4, -8
396	l32e	a8, a4, -4
397#else
398	s32i	a5, a4, 0		# store
399	l32i	a8, a4, 4		# same for upper address word
400#endif
401	__sl	a5, a3
402	and	a6, a8, a6
403	or	a6, a6, a5
404#ifdef UNALIGNED_USER_EXCEPTION
405	s32e	a6, a4, -4
406#else
407	s32i	a6, a4, 4
408#endif
409
410.Lexit:
411#if XCHAL_HAVE_LOOPS
412	rsr	a4, lend		# check if we reached LEND
413	bne	a7, a4, 1f
414	rsr	a4, lcount		# and LCOUNT != 0
415	beqz	a4, 1f
416	addi	a4, a4, -1		# decrement LCOUNT and set
417	rsr	a7, lbeg		# set PC to LBEGIN
418	wsr	a4, lcount
419#endif
420
4211:	wsr	a7, epc1		# skip emulated instruction
422
423	/* Update icount if we're single-stepping in userspace. */
424	rsr	a4, icountlevel
425	beqz	a4, 1f
426	bgeui	a4, LOCKLEVEL + 1, 1f
427	rsr	a4, icount
428	addi	a4, a4, 1
429	wsr	a4, icount
4301:
431	movi	a4, 0
432	rsr	a3, excsave1
433	s32i	a4, a3, EXC_TABLE_FIXUP
434
435	/* Restore working register */
436
437	l32i	a8, a2, PT_AREG8
438	l32i	a7, a2, PT_AREG7
439	l32i	a6, a2, PT_AREG6
440	l32i	a5, a2, PT_AREG5
441	l32i	a4, a2, PT_AREG4
442	l32i	a3, a2, PT_AREG3
443
444	/* restore SAR and return */
445
446	wsr	a0, sar
447	l32i	a0, a2, PT_AREG0
448	l32i	a2, a2, PT_AREG2
449	rfe
450
451ENDPROC(fast_unaligned)
452
453ENTRY(fast_unaligned_fixup)
454
455	l32i	a2, a3, EXC_TABLE_DOUBLE_SAVE
456	wsr	a3, excsave1
457
458	l32i	a8, a2, PT_AREG8
459	l32i	a7, a2, PT_AREG7
460	l32i	a6, a2, PT_AREG6
461	l32i	a5, a2, PT_AREG5
462	l32i	a4, a2, PT_AREG4
463	l32i	a0, a2, PT_AREG2
464	xsr	a0, depc			# restore depc and a0
465	wsr	a0, sar
466
467	rsr	a0, exccause
468	s32i	a0, a2, PT_DEPC			# mark as a regular exception
469
470	rsr	a0, ps
471	bbsi.l  a0, PS_UM_BIT, 1f		# jump if user mode
472
473	rsr	a0, exccause
474	addx4	a0, a0, a3              	# find entry in table
475	l32i	a0, a0, EXC_TABLE_FAST_KERNEL   # load handler
476	l32i	a3, a2, PT_AREG3
477	jx	a0
4781:
479	rsr	a0, exccause
480	addx4	a0, a0, a3              	# find entry in table
481	l32i	a0, a0, EXC_TABLE_FAST_USER     # load handler
482	l32i	a3, a2, PT_AREG3
483	jx	a0
484
485ENDPROC(fast_unaligned_fixup)
486
487#endif /* XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION */
v4.6
  1/*
  2 * arch/xtensa/kernel/align.S
  3 *
  4 * Handle unalignment exceptions in kernel space.
  5 *
  6 * This file is subject to the terms and conditions of the GNU General
  7 * Public License.  See the file "COPYING" in the main directory of
  8 * this archive for more details.
  9 *
 10 * Copyright (C) 2001 - 2005 Tensilica, Inc.
 11 * Copyright (C) 2014 Cadence Design Systems Inc.
 12 *
 13 * Rewritten by Chris Zankel <chris@zankel.net>
 14 *
 15 * Based on work from Joe Taylor <joe@tensilica.com, joetylr@yahoo.com>
 16 * and Marc Gauthier <marc@tensilica.com, marc@alimni.uwaterloo.ca>
 17 */
 18
 19#include <linux/linkage.h>
 20#include <asm/current.h>
 21#include <asm/asm-offsets.h>
 
 22#include <asm/processor.h>
 23
 24#if XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION
 25
 26/*  First-level exception handler for unaligned exceptions.
 27 *
 28 *  Note: This handler works only for kernel exceptions.  Unaligned user
 29 *        access should get a seg fault.
 30 */
 31
 32/* Big and little endian 16-bit values are located in
 33 * different halves of a register.  HWORD_START helps to
 34 * abstract the notion of extracting a 16-bit value from a
 35 * register.
 36 * We also have to define new shifting instructions because
 37 * lsb and msb are on 'opposite' ends in a register for
 38 * different endian machines.
 39 *
 40 * Assume a memory region in ascending address:
 41 *   	0 1 2 3|4 5 6 7
 42 *
 43 * When loading one word into a register, the content of that register is:
 44 *  LE	3 2 1 0, 7 6 5 4
 45 *  BE  0 1 2 3, 4 5 6 7
 46 *
 47 * Masking the bits of the higher/lower address means:
 48 *  LE  X X 0 0, 0 0 X X
 49 *  BE	0 0 X X, X X 0 0
 50 *
 51 * Shifting to higher/lower addresses, means:
 52 *  LE  shift left / shift right
 53 *  BE  shift right / shift left
 54 *
 55 * Extracting 16 bits from a 32 bit reg. value to higher/lower address means:
 56 *  LE  mask 0 0 X X / shift left
 57 *  BE  shift left / mask 0 0 X X
 58 */
 59
 
 60#define UNALIGNED_USER_EXCEPTION
 
 61
 62#if XCHAL_HAVE_BE
 63
 64#define HWORD_START	16
 65#define	INSN_OP0	28
 66#define	INSN_T		24
 67#define	INSN_OP1	16
 68
 69.macro __src_b	r, w0, w1;	src	\r, \w0, \w1;	.endm
 70.macro __ssa8	r;		ssa8b	\r;		.endm
 71.macro __ssa8r	r;		ssa8l	\r;		.endm
 72.macro __sh	r, s;		srl	\r, \s;		.endm
 73.macro __sl	r, s;		sll	\r, \s;		.endm
 74.macro __exth	r, s;		extui	\r, \s, 0, 16;	.endm
 75.macro __extl	r, s;		slli	\r, \s, 16;	.endm
 76
 77#else
 78
 79#define HWORD_START	0
 80#define	INSN_OP0	0
 81#define	INSN_T		4
 82#define	INSN_OP1	12
 83
 84.macro __src_b	r, w0, w1;	src	\r, \w1, \w0;	.endm
 85.macro __ssa8	r;		ssa8l	\r;		.endm
 86.macro __ssa8r	r;		ssa8b	\r;		.endm
 87.macro __sh	r, s;		sll	\r, \s;		.endm
 88.macro __sl	r, s;		srl	\r, \s;		.endm
 89.macro __exth	r, s;		slli	\r, \s, 16;	.endm
 90.macro __extl	r, s;		extui	\r, \s, 0, 16;	.endm
 91
 92#endif
 93
 94/*
 95 *	xxxx xxxx = imm8 field
 96 *	     yyyy = imm4 field
 97 *	     ssss = s field
 98 *	     tttt = t field
 99 *
100 *	       		 16		    0
101 *		          -------------------
102 *	L32I.N		  yyyy ssss tttt 1000
103 *	S32I.N	          yyyy ssss tttt 1001
104 *
105 *	       23			    0
106 *		-----------------------------
107 *	res	          0000           0010
108 *	L16UI	xxxx xxxx 0001 ssss tttt 0010
109 *	L32I	xxxx xxxx 0010 ssss tttt 0010
110 *	XXX	          0011 ssss tttt 0010
111 *	XXX	          0100 ssss tttt 0010
112 *	S16I	xxxx xxxx 0101 ssss tttt 0010
113 *	S32I	xxxx xxxx 0110 ssss tttt 0010
114 *	XXX	          0111 ssss tttt 0010
115 *	XXX	          1000 ssss tttt 0010
116 *	L16SI	xxxx xxxx 1001 ssss tttt 0010
117 *	XXX	          1010           0010
118 *      **L32AI	xxxx xxxx 1011 ssss tttt 0010 unsupported
119 *	XXX	          1100           0010
120 *	XXX	          1101           0010
121 *	XXX	          1110           0010
122 *	**S32RI	xxxx xxxx 1111 ssss tttt 0010 unsupported
123 *		-----------------------------
124 *                           ^         ^    ^
125 *    sub-opcode (NIBBLE_R) -+         |    |
126 *       t field (NIBBLE_T) -----------+    |
127 *  major opcode (NIBBLE_OP0) --------------+
128 */
129
130#define OP0_L32I_N	0x8		/* load immediate narrow */
131#define OP0_S32I_N	0x9		/* store immediate narrow */
132#define OP1_SI_MASK	0x4		/* OP1 bit set for stores */
133#define OP1_SI_BIT	2		/* OP1 bit number for stores */
134
135#define OP1_L32I	0x2
136#define OP1_L16UI	0x1
137#define OP1_L16SI	0x9
138#define OP1_L32AI	0xb
139
140#define OP1_S32I	0x6
141#define OP1_S16I	0x5
142#define OP1_S32RI	0xf
143
144/*
145 * Entry condition:
146 *
147 *   a0:	trashed, original value saved on stack (PT_AREG0)
148 *   a1:	a1
149 *   a2:	new stack pointer, original in DEPC
150 *   a3:	a3
151 *   depc:	a2, original value saved on stack (PT_DEPC)
152 *   excsave_1:	dispatch table
153 *
154 *   PT_DEPC >= VALID_DOUBLE_EXCEPTION_ADDRESS: double exception, DEPC
155 *	     <  VALID_DOUBLE_EXCEPTION_ADDRESS: regular exception
156 */
157
158
159ENTRY(fast_unaligned)
160
161	/* Note: We don't expect the address to be aligned on a word
162	 *       boundary. After all, the processor generated that exception
163	 *       and it would be a hardware fault.
164	 */
165
166	/* Save some working register */
167
168	s32i	a4, a2, PT_AREG4
169	s32i	a5, a2, PT_AREG5
170	s32i	a6, a2, PT_AREG6
171	s32i	a7, a2, PT_AREG7
172	s32i	a8, a2, PT_AREG8
173
174	rsr	a0, depc
175	s32i	a0, a2, PT_AREG2
176	s32i	a3, a2, PT_AREG3
177
178	rsr	a3, excsave1
179	movi	a4, fast_unaligned_fixup
180	s32i	a4, a3, EXC_TABLE_FIXUP
181
182	/* Keep value of SAR in a0 */
183
184	rsr	a0, sar
185	rsr	a8, excvaddr		# load unaligned memory address
186
187	/* Now, identify one of the following load/store instructions.
188	 *
189	 * The only possible danger of a double exception on the
190	 * following l32i instructions is kernel code in vmalloc
191	 * memory. The processor was just executing at the EPC_1
192	 * address, and indeed, already fetched the instruction.  That
193	 * guarantees a TLB mapping, which hasn't been replaced by
194	 * this unaligned exception handler that uses only static TLB
195	 * mappings. However, high-level interrupt handlers might
196	 * modify TLB entries, so for the generic case, we register a
197	 * TABLE_FIXUP handler here, too.
198	 */
199
200	/* a3...a6 saved on stack, a2 = SP */
201
202	/* Extract the instruction that caused the unaligned access. */
203
204	rsr	a7, epc1	# load exception address
205	movi	a3, ~3
206	and	a3, a3, a7	# mask lower bits
207
208	l32i	a4, a3, 0	# load 2 words
209	l32i	a5, a3, 4
210
211	__ssa8	a7
212	__src_b	a4, a4, a5	# a4 has the instruction
213
214	/* Analyze the instruction (load or store?). */
215
216	extui	a5, a4, INSN_OP0, 4	# get insn.op0 nibble
217
218#if XCHAL_HAVE_DENSITY
219	_beqi	a5, OP0_L32I_N, .Lload	# L32I.N, jump
220	addi	a6, a5, -OP0_S32I_N
221	_beqz	a6, .Lstore		# S32I.N, do a store
222#endif
223	/* 'store indicator bit' not set, jump */
224	_bbci.l	a4, OP1_SI_BIT + INSN_OP1, .Lload
225
226	/* Store: Jump to table entry to get the value in the source register.*/
227
228.Lstore:movi	a5, .Lstore_table	# table
229	extui	a6, a4, INSN_T, 4	# get source register
230	addx8	a5, a6, a5
231	jx	a5			# jump into table
232
233	/* Load: Load memory address. */
234
235.Lload: movi	a3, ~3
236	and	a3, a3, a8		# align memory address
237
238	__ssa8	a8
239#ifdef UNALIGNED_USER_EXCEPTION
240	addi	a3, a3, 8
241	l32e	a5, a3, -8
242	l32e	a6, a3, -4
243#else
244	l32i	a5, a3, 0
245	l32i	a6, a3, 4
246#endif
247	__src_b	a3, a5, a6		# a3 has the data word
248
249#if XCHAL_HAVE_DENSITY
250	addi	a7, a7, 2		# increment PC (assume 16-bit insn)
251
252	extui	a5, a4, INSN_OP0, 4
253	_beqi	a5, OP0_L32I_N, 1f	# l32i.n: jump
254
255	addi	a7, a7, 1
256#else
257	addi	a7, a7, 3
258#endif
259
260	extui	a5, a4, INSN_OP1, 4
261	_beqi	a5, OP1_L32I, 1f	# l32i: jump
262
263	extui	a3, a3, 0, 16		# extract lower 16 bits
264	_beqi	a5, OP1_L16UI, 1f
265	addi	a5, a5, -OP1_L16SI
266	_bnez	a5, .Linvalid_instruction_load
267
268	/* sign extend value */
269
270	slli	a3, a3, 16
271	srai	a3, a3, 16
272
273	/* Set target register. */
274
2751:
276	extui	a4, a4, INSN_T, 4	# extract target register
277	movi	a5, .Lload_table
278	addx8	a4, a4, a5
279	jx	a4			# jump to entry for target register
280
281	.align	8
282.Lload_table:
283	s32i	a3, a2, PT_AREG0;	_j .Lexit;	.align 8
284	mov	a1, a3;			_j .Lexit;	.align 8 # fishy??
285	s32i	a3, a2, PT_AREG2;	_j .Lexit;	.align 8
286	s32i	a3, a2, PT_AREG3;	_j .Lexit;	.align 8
287	s32i	a3, a2, PT_AREG4;	_j .Lexit;	.align 8
288	s32i	a3, a2, PT_AREG5;	_j .Lexit;	.align 8
289	s32i	a3, a2, PT_AREG6;	_j .Lexit;	.align 8
290	s32i	a3, a2, PT_AREG7;	_j .Lexit;	.align 8
291	s32i	a3, a2, PT_AREG8;	_j .Lexit;	.align 8
292	mov	a9, a3		;	_j .Lexit;	.align 8
293	mov	a10, a3		;	_j .Lexit;	.align 8
294	mov	a11, a3		;	_j .Lexit;	.align 8
295	mov	a12, a3		;	_j .Lexit;	.align 8
296	mov	a13, a3		;	_j .Lexit;	.align 8
297	mov	a14, a3		;	_j .Lexit;	.align 8
298	mov	a15, a3		;	_j .Lexit;	.align 8
299
300.Lstore_table:
301	l32i	a3, a2, PT_AREG0;	_j 1f;	.align 8
302	mov	a3, a1;			_j 1f;	.align 8	# fishy??
303	l32i	a3, a2, PT_AREG2;	_j 1f;	.align 8
304	l32i	a3, a2, PT_AREG3;	_j 1f;	.align 8
305	l32i	a3, a2, PT_AREG4;	_j 1f;	.align 8
306	l32i	a3, a2, PT_AREG5;	_j 1f;	.align 8
307	l32i	a3, a2, PT_AREG6;	_j 1f;	.align 8
308	l32i	a3, a2, PT_AREG7;	_j 1f;	.align 8
309	l32i	a3, a2, PT_AREG8;	_j 1f;	.align 8
310	mov	a3, a9		;	_j 1f;	.align 8
311	mov	a3, a10		;	_j 1f;	.align 8
312	mov	a3, a11		;	_j 1f;	.align 8
313	mov	a3, a12		;	_j 1f;	.align 8
314	mov	a3, a13		;	_j 1f;	.align 8
315	mov	a3, a14		;	_j 1f;	.align 8
316	mov	a3, a15		;	_j 1f;	.align 8
317
318	/* We cannot handle this exception. */
319
320	.extern _kernel_exception
321.Linvalid_instruction_load:
322.Linvalid_instruction_store:
323
324	movi	a4, 0
325	rsr	a3, excsave1
326	s32i	a4, a3, EXC_TABLE_FIXUP
327
328	/* Restore a4...a8 and SAR, set SP, and jump to default exception. */
329
330	l32i	a8, a2, PT_AREG8
331	l32i	a7, a2, PT_AREG7
332	l32i	a6, a2, PT_AREG6
333	l32i	a5, a2, PT_AREG5
334	l32i	a4, a2, PT_AREG4
335	wsr	a0, sar
336	mov	a1, a2
337
338	rsr	a0, ps
339	bbsi.l  a0, PS_UM_BIT, 2f     # jump if user mode
340
341	movi	a0, _kernel_exception
342	jx	a0
343
3442:	movi	a0, _user_exception
345	jx	a0
346
3471: 	# a7: instruction pointer, a4: instruction, a3: value
348
349	movi	a6, 0			# mask: ffffffff:00000000
350
351#if XCHAL_HAVE_DENSITY
352	addi	a7, a7, 2		# incr. PC,assume 16-bit instruction
353
354	extui	a5, a4, INSN_OP0, 4	# extract OP0
355	addi	a5, a5, -OP0_S32I_N
356	_beqz	a5, 1f			# s32i.n: jump
357
358	addi	a7, a7, 1		# increment PC, 32-bit instruction
359#else
360	addi	a7, a7, 3		# increment PC, 32-bit instruction
361#endif
362
363	extui	a5, a4, INSN_OP1, 4	# extract OP1
364	_beqi	a5, OP1_S32I, 1f	# jump if 32 bit store
365	_bnei	a5, OP1_S16I, .Linvalid_instruction_store
366
367	movi	a5, -1
368	__extl	a3, a3			# get 16-bit value
369	__exth	a6, a5			# get 16-bit mask ffffffff:ffff0000
370
371	/* Get memory address */
372
3731:
374	movi	a4, ~3
375	and	a4, a4, a8		# align memory address
376
377	/* Insert value into memory */
378
379	movi	a5, -1			# mask: ffffffff:XXXX0000
380#ifdef UNALIGNED_USER_EXCEPTION
381	addi	a4, a4, 8
382#endif
383
384	__ssa8r a8
385	__src_b	a8, a5, a6		# lo-mask  F..F0..0 (BE) 0..0F..F (LE)
386	__src_b	a6, a6, a5		# hi-mask  0..0F..F (BE) F..F0..0 (LE)
387#ifdef UNALIGNED_USER_EXCEPTION
388	l32e	a5, a4, -8
389#else
390	l32i	a5, a4, 0		# load lower address word
391#endif
392	and	a5, a5, a8		# mask
393	__sh	a8, a3 			# shift value
394	or	a5, a5, a8		# or with original value
395#ifdef UNALIGNED_USER_EXCEPTION
396	s32e	a5, a4, -8
397	l32e	a8, a4, -4
398#else
399	s32i	a5, a4, 0		# store
400	l32i	a8, a4, 4		# same for upper address word
401#endif
402	__sl	a5, a3
403	and	a6, a8, a6
404	or	a6, a6, a5
405#ifdef UNALIGNED_USER_EXCEPTION
406	s32e	a6, a4, -4
407#else
408	s32i	a6, a4, 4
409#endif
410
411.Lexit:
412#if XCHAL_HAVE_LOOPS
413	rsr	a4, lend		# check if we reached LEND
414	bne	a7, a4, 1f
415	rsr	a4, lcount		# and LCOUNT != 0
416	beqz	a4, 1f
417	addi	a4, a4, -1		# decrement LCOUNT and set
418	rsr	a7, lbeg		# set PC to LBEGIN
419	wsr	a4, lcount
420#endif
421
4221:	wsr	a7, epc1		# skip emulated instruction
423
424	/* Update icount if we're single-stepping in userspace. */
425	rsr	a4, icountlevel
426	beqz	a4, 1f
427	bgeui	a4, LOCKLEVEL + 1, 1f
428	rsr	a4, icount
429	addi	a4, a4, 1
430	wsr	a4, icount
4311:
432	movi	a4, 0
433	rsr	a3, excsave1
434	s32i	a4, a3, EXC_TABLE_FIXUP
435
436	/* Restore working register */
437
438	l32i	a8, a2, PT_AREG8
439	l32i	a7, a2, PT_AREG7
440	l32i	a6, a2, PT_AREG6
441	l32i	a5, a2, PT_AREG5
442	l32i	a4, a2, PT_AREG4
443	l32i	a3, a2, PT_AREG3
444
445	/* restore SAR and return */
446
447	wsr	a0, sar
448	l32i	a0, a2, PT_AREG0
449	l32i	a2, a2, PT_AREG2
450	rfe
451
452ENDPROC(fast_unaligned)
453
454ENTRY(fast_unaligned_fixup)
455
456	l32i	a2, a3, EXC_TABLE_DOUBLE_SAVE
457	wsr	a3, excsave1
458
459	l32i	a8, a2, PT_AREG8
460	l32i	a7, a2, PT_AREG7
461	l32i	a6, a2, PT_AREG6
462	l32i	a5, a2, PT_AREG5
463	l32i	a4, a2, PT_AREG4
464	l32i	a0, a2, PT_AREG2
465	xsr	a0, depc			# restore depc and a0
466	wsr	a0, sar
467
468	rsr	a0, exccause
469	s32i	a0, a2, PT_DEPC			# mark as a regular exception
470
471	rsr	a0, ps
472	bbsi.l  a0, PS_UM_BIT, 1f		# jump if user mode
473
474	rsr	a0, exccause
475	addx4	a0, a0, a3              	# find entry in table
476	l32i	a0, a0, EXC_TABLE_FAST_KERNEL   # load handler
477	l32i	a3, a2, PT_AREG3
478	jx	a0
4791:
480	rsr	a0, exccause
481	addx4	a0, a0, a3              	# find entry in table
482	l32i	a0, a0, EXC_TABLE_FAST_USER     # load handler
483	l32i	a3, a2, PT_AREG3
484	jx	a0
485
486ENDPROC(fast_unaligned_fixup)
487
488#endif /* XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION */