Linux Audio

Check our new training course

Loading...
v4.17
 
  1/*
  2 * Simple sanity test for emulate_step load/store instructions.
  3 *
  4 * Copyright IBM Corp. 2016
  5 *
  6 * This program is free software;  you can redistribute it and/or modify
  7 * it under the terms of the GNU General Public License as published by
  8 * the Free Software Foundation; either version 2 of the License, or
  9 * (at your option) any later version.
 10 */
 11
 12#define pr_fmt(fmt) "emulate_step_test: " fmt
 13
 14#include <linux/ptrace.h>
 15#include <asm/sstep.h>
 16#include <asm/ppc-opcode.h>
 
 17
 18#define IMM_L(i)		((uintptr_t)(i) & 0xffff)
 19
 20/*
 21 * Defined with TEST_ prefix so it does not conflict with other
 22 * definitions.
 23 */
 24#define TEST_LD(r, base, i)	(PPC_INST_LD | ___PPC_RT(r) |		\
 25					___PPC_RA(base) | IMM_L(i))
 26#define TEST_LWZ(r, base, i)	(PPC_INST_LWZ | ___PPC_RT(r) |		\
 27					___PPC_RA(base) | IMM_L(i))
 28#define TEST_LWZX(t, a, b)	(PPC_INST_LWZX | ___PPC_RT(t) |		\
 29					___PPC_RA(a) | ___PPC_RB(b))
 30#define TEST_STD(r, base, i)	(PPC_INST_STD | ___PPC_RS(r) |		\
 31					___PPC_RA(base) | ((i) & 0xfffc))
 32#define TEST_LDARX(t, a, b, eh)	(PPC_INST_LDARX | ___PPC_RT(t) |	\
 33					___PPC_RA(a) | ___PPC_RB(b) |	\
 34					__PPC_EH(eh))
 35#define TEST_STDCX(s, a, b)	(PPC_INST_STDCX | ___PPC_RS(s) |	\
 36					___PPC_RA(a) | ___PPC_RB(b))
 37#define TEST_LFSX(t, a, b)	(PPC_INST_LFSX | ___PPC_RT(t) |		\
 38					___PPC_RA(a) | ___PPC_RB(b))
 39#define TEST_STFSX(s, a, b)	(PPC_INST_STFSX | ___PPC_RS(s) |	\
 40					___PPC_RA(a) | ___PPC_RB(b))
 41#define TEST_LFDX(t, a, b)	(PPC_INST_LFDX | ___PPC_RT(t) |		\
 42					___PPC_RA(a) | ___PPC_RB(b))
 43#define TEST_STFDX(s, a, b)	(PPC_INST_STFDX | ___PPC_RS(s) |	\
 44					___PPC_RA(a) | ___PPC_RB(b))
 45#define TEST_LVX(t, a, b)	(PPC_INST_LVX | ___PPC_RT(t) |		\
 46					___PPC_RA(a) | ___PPC_RB(b))
 47#define TEST_STVX(s, a, b)	(PPC_INST_STVX | ___PPC_RS(s) |		\
 48					___PPC_RA(a) | ___PPC_RB(b))
 49#define TEST_LXVD2X(s, a, b)	(PPC_INST_LXVD2X | VSX_XX1((s), R##a, R##b))
 50#define TEST_STXVD2X(s, a, b)	(PPC_INST_STXVD2X | VSX_XX1((s), R##a, R##b))
 
 
 
 
 
 
 
 
 51
 
 
 
 
 
 52
 53static void __init init_pt_regs(struct pt_regs *regs)
 54{
 55	static unsigned long msr;
 56	static bool msr_cached;
 57
 58	memset(regs, 0, sizeof(struct pt_regs));
 59
 60	if (likely(msr_cached)) {
 61		regs->msr = msr;
 62		return;
 63	}
 64
 65	asm volatile("mfmsr %0" : "=r"(regs->msr));
 66
 67	regs->msr |= MSR_FP;
 68	regs->msr |= MSR_VEC;
 69	regs->msr |= MSR_VSX;
 70
 71	msr = regs->msr;
 72	msr_cached = true;
 73}
 74
 75static void __init show_result(char *ins, char *result)
 
 
 
 
 
 
 76{
 77	pr_info("%-14s : %s\n", ins, result);
 78}
 79
 80static void __init test_ld(void)
 81{
 82	struct pt_regs regs;
 83	unsigned long a = 0x23;
 84	int stepped = -1;
 85
 86	init_pt_regs(&regs);
 87	regs.gpr[3] = (unsigned long) &a;
 88
 89	/* ld r5, 0(r3) */
 90	stepped = emulate_step(&regs, TEST_LD(5, 3, 0));
 91
 92	if (stepped == 1 && regs.gpr[5] == a)
 93		show_result("ld", "PASS");
 94	else
 95		show_result("ld", "FAIL");
 96}
 97
 98static void __init test_lwz(void)
 99{
100	struct pt_regs regs;
101	unsigned int a = 0x4545;
102	int stepped = -1;
103
104	init_pt_regs(&regs);
105	regs.gpr[3] = (unsigned long) &a;
106
107	/* lwz r5, 0(r3) */
108	stepped = emulate_step(&regs, TEST_LWZ(5, 3, 0));
109
110	if (stepped == 1 && regs.gpr[5] == a)
111		show_result("lwz", "PASS");
112	else
113		show_result("lwz", "FAIL");
114}
115
116static void __init test_lwzx(void)
117{
118	struct pt_regs regs;
119	unsigned int a[3] = {0x0, 0x0, 0x1234};
120	int stepped = -1;
121
122	init_pt_regs(&regs);
123	regs.gpr[3] = (unsigned long) a;
124	regs.gpr[4] = 8;
125	regs.gpr[5] = 0x8765;
126
127	/* lwzx r5, r3, r4 */
128	stepped = emulate_step(&regs, TEST_LWZX(5, 3, 4));
129	if (stepped == 1 && regs.gpr[5] == a[2])
130		show_result("lwzx", "PASS");
131	else
132		show_result("lwzx", "FAIL");
133}
134
135static void __init test_std(void)
136{
137	struct pt_regs regs;
138	unsigned long a = 0x1234;
139	int stepped = -1;
140
141	init_pt_regs(&regs);
142	regs.gpr[3] = (unsigned long) &a;
143	regs.gpr[5] = 0x5678;
144
145	/* std r5, 0(r3) */
146	stepped = emulate_step(&regs, TEST_STD(5, 3, 0));
147	if (stepped == 1 || regs.gpr[5] == a)
148		show_result("std", "PASS");
149	else
150		show_result("std", "FAIL");
151}
152
153static void __init test_ldarx_stdcx(void)
154{
155	struct pt_regs regs;
156	unsigned long a = 0x1234;
157	int stepped = -1;
158	unsigned long cr0_eq = 0x1 << 29; /* eq bit of CR0 */
159
160	init_pt_regs(&regs);
161	asm volatile("mfcr %0" : "=r"(regs.ccr));
162
163
164	/*** ldarx ***/
165
166	regs.gpr[3] = (unsigned long) &a;
167	regs.gpr[4] = 0;
168	regs.gpr[5] = 0x5678;
169
170	/* ldarx r5, r3, r4, 0 */
171	stepped = emulate_step(&regs, TEST_LDARX(5, 3, 4, 0));
172
173	/*
174	 * Don't touch 'a' here. Touching 'a' can do Load/store
175	 * of 'a' which result in failure of subsequent stdcx.
176	 * Instead, use hardcoded value for comparison.
177	 */
178	if (stepped <= 0 || regs.gpr[5] != 0x1234) {
179		show_result("ldarx / stdcx.", "FAIL (ldarx)");
180		return;
181	}
182
183
184	/*** stdcx. ***/
185
186	regs.gpr[5] = 0x9ABC;
187
188	/* stdcx. r5, r3, r4 */
189	stepped = emulate_step(&regs, TEST_STDCX(5, 3, 4));
190
191	/*
192	 * Two possible scenarios that indicates successful emulation
193	 * of stdcx. :
194	 *  1. Reservation is active and store is performed. In this
195	 *     case cr0.eq bit will be set to 1.
196	 *  2. Reservation is not active and store is not performed.
197	 *     In this case cr0.eq bit will be set to 0.
198	 */
199	if (stepped == 1 && ((regs.gpr[5] == a && (regs.ccr & cr0_eq))
200			|| (regs.gpr[5] != a && !(regs.ccr & cr0_eq))))
201		show_result("ldarx / stdcx.", "PASS");
202	else
203		show_result("ldarx / stdcx.", "FAIL (stdcx.)");
204}
205
206#ifdef CONFIG_PPC_FPU
207static void __init test_lfsx_stfsx(void)
208{
209	struct pt_regs regs;
210	union {
211		float a;
212		int b;
213	} c;
214	int cached_b;
215	int stepped = -1;
216
217	init_pt_regs(&regs);
218
219
220	/*** lfsx ***/
221
222	c.a = 123.45;
223	cached_b = c.b;
224
225	regs.gpr[3] = (unsigned long) &c.a;
226	regs.gpr[4] = 0;
227
228	/* lfsx frt10, r3, r4 */
229	stepped = emulate_step(&regs, TEST_LFSX(10, 3, 4));
230
231	if (stepped == 1)
232		show_result("lfsx", "PASS");
233	else
234		show_result("lfsx", "FAIL");
235
236
237	/*** stfsx ***/
238
239	c.a = 678.91;
240
241	/* stfsx frs10, r3, r4 */
242	stepped = emulate_step(&regs, TEST_STFSX(10, 3, 4));
243
244	if (stepped == 1 && c.b == cached_b)
245		show_result("stfsx", "PASS");
246	else
247		show_result("stfsx", "FAIL");
248}
249
250static void __init test_lfdx_stfdx(void)
251{
252	struct pt_regs regs;
253	union {
254		double a;
255		long b;
256	} c;
257	long cached_b;
258	int stepped = -1;
259
260	init_pt_regs(&regs);
261
262
263	/*** lfdx ***/
264
265	c.a = 123456.78;
266	cached_b = c.b;
267
268	regs.gpr[3] = (unsigned long) &c.a;
269	regs.gpr[4] = 0;
270
271	/* lfdx frt10, r3, r4 */
272	stepped = emulate_step(&regs, TEST_LFDX(10, 3, 4));
273
274	if (stepped == 1)
275		show_result("lfdx", "PASS");
276	else
277		show_result("lfdx", "FAIL");
278
279
280	/*** stfdx ***/
281
282	c.a = 987654.32;
283
284	/* stfdx frs10, r3, r4 */
285	stepped = emulate_step(&regs, TEST_STFDX(10, 3, 4));
286
287	if (stepped == 1 && c.b == cached_b)
288		show_result("stfdx", "PASS");
289	else
290		show_result("stfdx", "FAIL");
291}
292#else
293static void __init test_lfsx_stfsx(void)
294{
295	show_result("lfsx", "SKIP (CONFIG_PPC_FPU is not set)");
296	show_result("stfsx", "SKIP (CONFIG_PPC_FPU is not set)");
297}
298
299static void __init test_lfdx_stfdx(void)
300{
301	show_result("lfdx", "SKIP (CONFIG_PPC_FPU is not set)");
302	show_result("stfdx", "SKIP (CONFIG_PPC_FPU is not set)");
303}
304#endif /* CONFIG_PPC_FPU */
305
306#ifdef CONFIG_ALTIVEC
307static void __init test_lvx_stvx(void)
308{
309	struct pt_regs regs;
310	union {
311		vector128 a;
312		u32 b[4];
313	} c;
314	u32 cached_b[4];
315	int stepped = -1;
316
317	init_pt_regs(&regs);
318
319
320	/*** lvx ***/
321
322	cached_b[0] = c.b[0] = 923745;
323	cached_b[1] = c.b[1] = 2139478;
324	cached_b[2] = c.b[2] = 9012;
325	cached_b[3] = c.b[3] = 982134;
326
327	regs.gpr[3] = (unsigned long) &c.a;
328	regs.gpr[4] = 0;
329
330	/* lvx vrt10, r3, r4 */
331	stepped = emulate_step(&regs, TEST_LVX(10, 3, 4));
332
333	if (stepped == 1)
334		show_result("lvx", "PASS");
335	else
336		show_result("lvx", "FAIL");
337
338
339	/*** stvx ***/
340
341	c.b[0] = 4987513;
342	c.b[1] = 84313948;
343	c.b[2] = 71;
344	c.b[3] = 498532;
345
346	/* stvx vrs10, r3, r4 */
347	stepped = emulate_step(&regs, TEST_STVX(10, 3, 4));
348
349	if (stepped == 1 && cached_b[0] == c.b[0] && cached_b[1] == c.b[1] &&
350	    cached_b[2] == c.b[2] && cached_b[3] == c.b[3])
351		show_result("stvx", "PASS");
352	else
353		show_result("stvx", "FAIL");
354}
355#else
356static void __init test_lvx_stvx(void)
357{
358	show_result("lvx", "SKIP (CONFIG_ALTIVEC is not set)");
359	show_result("stvx", "SKIP (CONFIG_ALTIVEC is not set)");
360}
361#endif /* CONFIG_ALTIVEC */
362
363#ifdef CONFIG_VSX
364static void __init test_lxvd2x_stxvd2x(void)
365{
366	struct pt_regs regs;
367	union {
368		vector128 a;
369		u32 b[4];
370	} c;
371	u32 cached_b[4];
372	int stepped = -1;
373
374	init_pt_regs(&regs);
375
376
377	/*** lxvd2x ***/
378
379	cached_b[0] = c.b[0] = 18233;
380	cached_b[1] = c.b[1] = 34863571;
381	cached_b[2] = c.b[2] = 834;
382	cached_b[3] = c.b[3] = 6138911;
383
384	regs.gpr[3] = (unsigned long) &c.a;
385	regs.gpr[4] = 0;
386
387	/* lxvd2x vsr39, r3, r4 */
388	stepped = emulate_step(&regs, TEST_LXVD2X(39, 3, 4));
389
390	if (stepped == 1)
391		show_result("lxvd2x", "PASS");
392	else
393		show_result("lxvd2x", "FAIL");
 
 
 
 
394
395
396	/*** stxvd2x ***/
397
398	c.b[0] = 21379463;
399	c.b[1] = 87;
400	c.b[2] = 374234;
401	c.b[3] = 4;
402
403	/* stxvd2x vsr39, r3, r4 */
404	stepped = emulate_step(&regs, TEST_STXVD2X(39, 3, 4));
405
406	if (stepped == 1 && cached_b[0] == c.b[0] && cached_b[1] == c.b[1] &&
407	    cached_b[2] == c.b[2] && cached_b[3] == c.b[3])
 
408		show_result("stxvd2x", "PASS");
409	else
410		show_result("stxvd2x", "FAIL");
 
 
 
 
411}
412#else
413static void __init test_lxvd2x_stxvd2x(void)
414{
415	show_result("lxvd2x", "SKIP (CONFIG_VSX is not set)");
416	show_result("stxvd2x", "SKIP (CONFIG_VSX is not set)");
417}
418#endif /* CONFIG_VSX */
419
420static int __init test_emulate_step(void)
421{
422	test_ld();
423	test_lwz();
424	test_lwzx();
425	test_std();
426	test_ldarx_stdcx();
427	test_lfsx_stfsx();
428	test_lfdx_stfdx();
429	test_lvx_stvx();
430	test_lxvd2x_stxvd2x();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
431
432	return 0;
433}
434late_initcall(test_emulate_step);
v5.4
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Simple sanity tests for instruction emulation infrastructure.
  4 *
  5 * Copyright IBM Corp. 2016
 
 
 
 
 
  6 */
  7
  8#define pr_fmt(fmt) "emulate_step_test: " fmt
  9
 10#include <linux/ptrace.h>
 11#include <asm/sstep.h>
 12#include <asm/ppc-opcode.h>
 13#include <asm/code-patching.h>
 14
 15#define IMM_L(i)		((uintptr_t)(i) & 0xffff)
 16
 17/*
 18 * Defined with TEST_ prefix so it does not conflict with other
 19 * definitions.
 20 */
 21#define TEST_LD(r, base, i)	(PPC_INST_LD | ___PPC_RT(r) |		\
 22					___PPC_RA(base) | IMM_L(i))
 23#define TEST_LWZ(r, base, i)	(PPC_INST_LWZ | ___PPC_RT(r) |		\
 24					___PPC_RA(base) | IMM_L(i))
 25#define TEST_LWZX(t, a, b)	(PPC_INST_LWZX | ___PPC_RT(t) |		\
 26					___PPC_RA(a) | ___PPC_RB(b))
 27#define TEST_STD(r, base, i)	(PPC_INST_STD | ___PPC_RS(r) |		\
 28					___PPC_RA(base) | ((i) & 0xfffc))
 29#define TEST_LDARX(t, a, b, eh)	(PPC_INST_LDARX | ___PPC_RT(t) |	\
 30					___PPC_RA(a) | ___PPC_RB(b) |	\
 31					__PPC_EH(eh))
 32#define TEST_STDCX(s, a, b)	(PPC_INST_STDCX | ___PPC_RS(s) |	\
 33					___PPC_RA(a) | ___PPC_RB(b))
 34#define TEST_LFSX(t, a, b)	(PPC_INST_LFSX | ___PPC_RT(t) |		\
 35					___PPC_RA(a) | ___PPC_RB(b))
 36#define TEST_STFSX(s, a, b)	(PPC_INST_STFSX | ___PPC_RS(s) |	\
 37					___PPC_RA(a) | ___PPC_RB(b))
 38#define TEST_LFDX(t, a, b)	(PPC_INST_LFDX | ___PPC_RT(t) |		\
 39					___PPC_RA(a) | ___PPC_RB(b))
 40#define TEST_STFDX(s, a, b)	(PPC_INST_STFDX | ___PPC_RS(s) |	\
 41					___PPC_RA(a) | ___PPC_RB(b))
 42#define TEST_LVX(t, a, b)	(PPC_INST_LVX | ___PPC_RT(t) |		\
 43					___PPC_RA(a) | ___PPC_RB(b))
 44#define TEST_STVX(s, a, b)	(PPC_INST_STVX | ___PPC_RS(s) |		\
 45					___PPC_RA(a) | ___PPC_RB(b))
 46#define TEST_LXVD2X(s, a, b)	(PPC_INST_LXVD2X | VSX_XX1((s), R##a, R##b))
 47#define TEST_STXVD2X(s, a, b)	(PPC_INST_STXVD2X | VSX_XX1((s), R##a, R##b))
 48#define TEST_ADD(t, a, b)	(PPC_INST_ADD | ___PPC_RT(t) |		\
 49					___PPC_RA(a) | ___PPC_RB(b))
 50#define TEST_ADD_DOT(t, a, b)	(PPC_INST_ADD | ___PPC_RT(t) |		\
 51					___PPC_RA(a) | ___PPC_RB(b) | 0x1)
 52#define TEST_ADDC(t, a, b)	(PPC_INST_ADDC | ___PPC_RT(t) |		\
 53					___PPC_RA(a) | ___PPC_RB(b))
 54#define TEST_ADDC_DOT(t, a, b)	(PPC_INST_ADDC | ___PPC_RT(t) |		\
 55					___PPC_RA(a) | ___PPC_RB(b) | 0x1)
 56
 57#define MAX_SUBTESTS	16
 58
 59#define IGNORE_GPR(n)	(0x1UL << (n))
 60#define IGNORE_XER	(0x1UL << 32)
 61#define IGNORE_CCR	(0x1UL << 33)
 62
 63static void __init init_pt_regs(struct pt_regs *regs)
 64{
 65	static unsigned long msr;
 66	static bool msr_cached;
 67
 68	memset(regs, 0, sizeof(struct pt_regs));
 69
 70	if (likely(msr_cached)) {
 71		regs->msr = msr;
 72		return;
 73	}
 74
 75	asm volatile("mfmsr %0" : "=r"(regs->msr));
 76
 77	regs->msr |= MSR_FP;
 78	regs->msr |= MSR_VEC;
 79	regs->msr |= MSR_VSX;
 80
 81	msr = regs->msr;
 82	msr_cached = true;
 83}
 84
 85static void __init show_result(char *mnemonic, char *result)
 86{
 87	pr_info("%-14s : %s\n", mnemonic, result);
 88}
 89
 90static void __init show_result_with_descr(char *mnemonic, char *descr,
 91					  char *result)
 92{
 93	pr_info("%-14s : %-50s %s\n", mnemonic, descr, result);
 94}
 95
 96static void __init test_ld(void)
 97{
 98	struct pt_regs regs;
 99	unsigned long a = 0x23;
100	int stepped = -1;
101
102	init_pt_regs(&regs);
103	regs.gpr[3] = (unsigned long) &a;
104
105	/* ld r5, 0(r3) */
106	stepped = emulate_step(&regs, TEST_LD(5, 3, 0));
107
108	if (stepped == 1 && regs.gpr[5] == a)
109		show_result("ld", "PASS");
110	else
111		show_result("ld", "FAIL");
112}
113
114static void __init test_lwz(void)
115{
116	struct pt_regs regs;
117	unsigned int a = 0x4545;
118	int stepped = -1;
119
120	init_pt_regs(&regs);
121	regs.gpr[3] = (unsigned long) &a;
122
123	/* lwz r5, 0(r3) */
124	stepped = emulate_step(&regs, TEST_LWZ(5, 3, 0));
125
126	if (stepped == 1 && regs.gpr[5] == a)
127		show_result("lwz", "PASS");
128	else
129		show_result("lwz", "FAIL");
130}
131
132static void __init test_lwzx(void)
133{
134	struct pt_regs regs;
135	unsigned int a[3] = {0x0, 0x0, 0x1234};
136	int stepped = -1;
137
138	init_pt_regs(&regs);
139	regs.gpr[3] = (unsigned long) a;
140	regs.gpr[4] = 8;
141	regs.gpr[5] = 0x8765;
142
143	/* lwzx r5, r3, r4 */
144	stepped = emulate_step(&regs, TEST_LWZX(5, 3, 4));
145	if (stepped == 1 && regs.gpr[5] == a[2])
146		show_result("lwzx", "PASS");
147	else
148		show_result("lwzx", "FAIL");
149}
150
151static void __init test_std(void)
152{
153	struct pt_regs regs;
154	unsigned long a = 0x1234;
155	int stepped = -1;
156
157	init_pt_regs(&regs);
158	regs.gpr[3] = (unsigned long) &a;
159	regs.gpr[5] = 0x5678;
160
161	/* std r5, 0(r3) */
162	stepped = emulate_step(&regs, TEST_STD(5, 3, 0));
163	if (stepped == 1 || regs.gpr[5] == a)
164		show_result("std", "PASS");
165	else
166		show_result("std", "FAIL");
167}
168
169static void __init test_ldarx_stdcx(void)
170{
171	struct pt_regs regs;
172	unsigned long a = 0x1234;
173	int stepped = -1;
174	unsigned long cr0_eq = 0x1 << 29; /* eq bit of CR0 */
175
176	init_pt_regs(&regs);
177	asm volatile("mfcr %0" : "=r"(regs.ccr));
178
179
180	/*** ldarx ***/
181
182	regs.gpr[3] = (unsigned long) &a;
183	regs.gpr[4] = 0;
184	regs.gpr[5] = 0x5678;
185
186	/* ldarx r5, r3, r4, 0 */
187	stepped = emulate_step(&regs, TEST_LDARX(5, 3, 4, 0));
188
189	/*
190	 * Don't touch 'a' here. Touching 'a' can do Load/store
191	 * of 'a' which result in failure of subsequent stdcx.
192	 * Instead, use hardcoded value for comparison.
193	 */
194	if (stepped <= 0 || regs.gpr[5] != 0x1234) {
195		show_result("ldarx / stdcx.", "FAIL (ldarx)");
196		return;
197	}
198
199
200	/*** stdcx. ***/
201
202	regs.gpr[5] = 0x9ABC;
203
204	/* stdcx. r5, r3, r4 */
205	stepped = emulate_step(&regs, TEST_STDCX(5, 3, 4));
206
207	/*
208	 * Two possible scenarios that indicates successful emulation
209	 * of stdcx. :
210	 *  1. Reservation is active and store is performed. In this
211	 *     case cr0.eq bit will be set to 1.
212	 *  2. Reservation is not active and store is not performed.
213	 *     In this case cr0.eq bit will be set to 0.
214	 */
215	if (stepped == 1 && ((regs.gpr[5] == a && (regs.ccr & cr0_eq))
216			|| (regs.gpr[5] != a && !(regs.ccr & cr0_eq))))
217		show_result("ldarx / stdcx.", "PASS");
218	else
219		show_result("ldarx / stdcx.", "FAIL (stdcx.)");
220}
221
222#ifdef CONFIG_PPC_FPU
223static void __init test_lfsx_stfsx(void)
224{
225	struct pt_regs regs;
226	union {
227		float a;
228		int b;
229	} c;
230	int cached_b;
231	int stepped = -1;
232
233	init_pt_regs(&regs);
234
235
236	/*** lfsx ***/
237
238	c.a = 123.45;
239	cached_b = c.b;
240
241	regs.gpr[3] = (unsigned long) &c.a;
242	regs.gpr[4] = 0;
243
244	/* lfsx frt10, r3, r4 */
245	stepped = emulate_step(&regs, TEST_LFSX(10, 3, 4));
246
247	if (stepped == 1)
248		show_result("lfsx", "PASS");
249	else
250		show_result("lfsx", "FAIL");
251
252
253	/*** stfsx ***/
254
255	c.a = 678.91;
256
257	/* stfsx frs10, r3, r4 */
258	stepped = emulate_step(&regs, TEST_STFSX(10, 3, 4));
259
260	if (stepped == 1 && c.b == cached_b)
261		show_result("stfsx", "PASS");
262	else
263		show_result("stfsx", "FAIL");
264}
265
266static void __init test_lfdx_stfdx(void)
267{
268	struct pt_regs regs;
269	union {
270		double a;
271		long b;
272	} c;
273	long cached_b;
274	int stepped = -1;
275
276	init_pt_regs(&regs);
277
278
279	/*** lfdx ***/
280
281	c.a = 123456.78;
282	cached_b = c.b;
283
284	regs.gpr[3] = (unsigned long) &c.a;
285	regs.gpr[4] = 0;
286
287	/* lfdx frt10, r3, r4 */
288	stepped = emulate_step(&regs, TEST_LFDX(10, 3, 4));
289
290	if (stepped == 1)
291		show_result("lfdx", "PASS");
292	else
293		show_result("lfdx", "FAIL");
294
295
296	/*** stfdx ***/
297
298	c.a = 987654.32;
299
300	/* stfdx frs10, r3, r4 */
301	stepped = emulate_step(&regs, TEST_STFDX(10, 3, 4));
302
303	if (stepped == 1 && c.b == cached_b)
304		show_result("stfdx", "PASS");
305	else
306		show_result("stfdx", "FAIL");
307}
308#else
309static void __init test_lfsx_stfsx(void)
310{
311	show_result("lfsx", "SKIP (CONFIG_PPC_FPU is not set)");
312	show_result("stfsx", "SKIP (CONFIG_PPC_FPU is not set)");
313}
314
315static void __init test_lfdx_stfdx(void)
316{
317	show_result("lfdx", "SKIP (CONFIG_PPC_FPU is not set)");
318	show_result("stfdx", "SKIP (CONFIG_PPC_FPU is not set)");
319}
320#endif /* CONFIG_PPC_FPU */
321
322#ifdef CONFIG_ALTIVEC
323static void __init test_lvx_stvx(void)
324{
325	struct pt_regs regs;
326	union {
327		vector128 a;
328		u32 b[4];
329	} c;
330	u32 cached_b[4];
331	int stepped = -1;
332
333	init_pt_regs(&regs);
334
335
336	/*** lvx ***/
337
338	cached_b[0] = c.b[0] = 923745;
339	cached_b[1] = c.b[1] = 2139478;
340	cached_b[2] = c.b[2] = 9012;
341	cached_b[3] = c.b[3] = 982134;
342
343	regs.gpr[3] = (unsigned long) &c.a;
344	regs.gpr[4] = 0;
345
346	/* lvx vrt10, r3, r4 */
347	stepped = emulate_step(&regs, TEST_LVX(10, 3, 4));
348
349	if (stepped == 1)
350		show_result("lvx", "PASS");
351	else
352		show_result("lvx", "FAIL");
353
354
355	/*** stvx ***/
356
357	c.b[0] = 4987513;
358	c.b[1] = 84313948;
359	c.b[2] = 71;
360	c.b[3] = 498532;
361
362	/* stvx vrs10, r3, r4 */
363	stepped = emulate_step(&regs, TEST_STVX(10, 3, 4));
364
365	if (stepped == 1 && cached_b[0] == c.b[0] && cached_b[1] == c.b[1] &&
366	    cached_b[2] == c.b[2] && cached_b[3] == c.b[3])
367		show_result("stvx", "PASS");
368	else
369		show_result("stvx", "FAIL");
370}
371#else
372static void __init test_lvx_stvx(void)
373{
374	show_result("lvx", "SKIP (CONFIG_ALTIVEC is not set)");
375	show_result("stvx", "SKIP (CONFIG_ALTIVEC is not set)");
376}
377#endif /* CONFIG_ALTIVEC */
378
379#ifdef CONFIG_VSX
380static void __init test_lxvd2x_stxvd2x(void)
381{
382	struct pt_regs regs;
383	union {
384		vector128 a;
385		u32 b[4];
386	} c;
387	u32 cached_b[4];
388	int stepped = -1;
389
390	init_pt_regs(&regs);
391
392
393	/*** lxvd2x ***/
394
395	cached_b[0] = c.b[0] = 18233;
396	cached_b[1] = c.b[1] = 34863571;
397	cached_b[2] = c.b[2] = 834;
398	cached_b[3] = c.b[3] = 6138911;
399
400	regs.gpr[3] = (unsigned long) &c.a;
401	regs.gpr[4] = 0;
402
403	/* lxvd2x vsr39, r3, r4 */
404	stepped = emulate_step(&regs, TEST_LXVD2X(39, 3, 4));
405
406	if (stepped == 1 && cpu_has_feature(CPU_FTR_VSX)) {
407		show_result("lxvd2x", "PASS");
408	} else {
409		if (!cpu_has_feature(CPU_FTR_VSX))
410			show_result("lxvd2x", "PASS (!CPU_FTR_VSX)");
411		else
412			show_result("lxvd2x", "FAIL");
413	}
414
415
416	/*** stxvd2x ***/
417
418	c.b[0] = 21379463;
419	c.b[1] = 87;
420	c.b[2] = 374234;
421	c.b[3] = 4;
422
423	/* stxvd2x vsr39, r3, r4 */
424	stepped = emulate_step(&regs, TEST_STXVD2X(39, 3, 4));
425
426	if (stepped == 1 && cached_b[0] == c.b[0] && cached_b[1] == c.b[1] &&
427	    cached_b[2] == c.b[2] && cached_b[3] == c.b[3] &&
428	    cpu_has_feature(CPU_FTR_VSX)) {
429		show_result("stxvd2x", "PASS");
430	} else {
431		if (!cpu_has_feature(CPU_FTR_VSX))
432			show_result("stxvd2x", "PASS (!CPU_FTR_VSX)");
433		else
434			show_result("stxvd2x", "FAIL");
435	}
436}
437#else
438static void __init test_lxvd2x_stxvd2x(void)
439{
440	show_result("lxvd2x", "SKIP (CONFIG_VSX is not set)");
441	show_result("stxvd2x", "SKIP (CONFIG_VSX is not set)");
442}
443#endif /* CONFIG_VSX */
444
445static void __init run_tests_load_store(void)
446{
447	test_ld();
448	test_lwz();
449	test_lwzx();
450	test_std();
451	test_ldarx_stdcx();
452	test_lfsx_stfsx();
453	test_lfdx_stfdx();
454	test_lvx_stvx();
455	test_lxvd2x_stxvd2x();
456}
457
458struct compute_test {
459	char *mnemonic;
460	struct {
461		char *descr;
462		unsigned long flags;
463		unsigned int instr;
464		struct pt_regs regs;
465	} subtests[MAX_SUBTESTS + 1];
466};
467
468static struct compute_test compute_tests[] = {
469	{
470		.mnemonic = "nop",
471		.subtests = {
472			{
473				.descr = "R0 = LONG_MAX",
474				.instr = PPC_INST_NOP,
475				.regs = {
476					.gpr[0] = LONG_MAX,
477				}
478			}
479		}
480	},
481	{
482		.mnemonic = "add",
483		.subtests = {
484			{
485				.descr = "RA = LONG_MIN, RB = LONG_MIN",
486				.instr = TEST_ADD(20, 21, 22),
487				.regs = {
488					.gpr[21] = LONG_MIN,
489					.gpr[22] = LONG_MIN,
490				}
491			},
492			{
493				.descr = "RA = LONG_MIN, RB = LONG_MAX",
494				.instr = TEST_ADD(20, 21, 22),
495				.regs = {
496					.gpr[21] = LONG_MIN,
497					.gpr[22] = LONG_MAX,
498				}
499			},
500			{
501				.descr = "RA = LONG_MAX, RB = LONG_MAX",
502				.instr = TEST_ADD(20, 21, 22),
503				.regs = {
504					.gpr[21] = LONG_MAX,
505					.gpr[22] = LONG_MAX,
506				}
507			},
508			{
509				.descr = "RA = ULONG_MAX, RB = ULONG_MAX",
510				.instr = TEST_ADD(20, 21, 22),
511				.regs = {
512					.gpr[21] = ULONG_MAX,
513					.gpr[22] = ULONG_MAX,
514				}
515			},
516			{
517				.descr = "RA = ULONG_MAX, RB = 0x1",
518				.instr = TEST_ADD(20, 21, 22),
519				.regs = {
520					.gpr[21] = ULONG_MAX,
521					.gpr[22] = 0x1,
522				}
523			},
524			{
525				.descr = "RA = INT_MIN, RB = INT_MIN",
526				.instr = TEST_ADD(20, 21, 22),
527				.regs = {
528					.gpr[21] = INT_MIN,
529					.gpr[22] = INT_MIN,
530				}
531			},
532			{
533				.descr = "RA = INT_MIN, RB = INT_MAX",
534				.instr = TEST_ADD(20, 21, 22),
535				.regs = {
536					.gpr[21] = INT_MIN,
537					.gpr[22] = INT_MAX,
538				}
539			},
540			{
541				.descr = "RA = INT_MAX, RB = INT_MAX",
542				.instr = TEST_ADD(20, 21, 22),
543				.regs = {
544					.gpr[21] = INT_MAX,
545					.gpr[22] = INT_MAX,
546				}
547			},
548			{
549				.descr = "RA = UINT_MAX, RB = UINT_MAX",
550				.instr = TEST_ADD(20, 21, 22),
551				.regs = {
552					.gpr[21] = UINT_MAX,
553					.gpr[22] = UINT_MAX,
554				}
555			},
556			{
557				.descr = "RA = UINT_MAX, RB = 0x1",
558				.instr = TEST_ADD(20, 21, 22),
559				.regs = {
560					.gpr[21] = UINT_MAX,
561					.gpr[22] = 0x1,
562				}
563			}
564		}
565	},
566	{
567		.mnemonic = "add.",
568		.subtests = {
569			{
570				.descr = "RA = LONG_MIN, RB = LONG_MIN",
571				.flags = IGNORE_CCR,
572				.instr = TEST_ADD_DOT(20, 21, 22),
573				.regs = {
574					.gpr[21] = LONG_MIN,
575					.gpr[22] = LONG_MIN,
576				}
577			},
578			{
579				.descr = "RA = LONG_MIN, RB = LONG_MAX",
580				.instr = TEST_ADD_DOT(20, 21, 22),
581				.regs = {
582					.gpr[21] = LONG_MIN,
583					.gpr[22] = LONG_MAX,
584				}
585			},
586			{
587				.descr = "RA = LONG_MAX, RB = LONG_MAX",
588				.flags = IGNORE_CCR,
589				.instr = TEST_ADD_DOT(20, 21, 22),
590				.regs = {
591					.gpr[21] = LONG_MAX,
592					.gpr[22] = LONG_MAX,
593				}
594			},
595			{
596				.descr = "RA = ULONG_MAX, RB = ULONG_MAX",
597				.instr = TEST_ADD_DOT(20, 21, 22),
598				.regs = {
599					.gpr[21] = ULONG_MAX,
600					.gpr[22] = ULONG_MAX,
601				}
602			},
603			{
604				.descr = "RA = ULONG_MAX, RB = 0x1",
605				.instr = TEST_ADD_DOT(20, 21, 22),
606				.regs = {
607					.gpr[21] = ULONG_MAX,
608					.gpr[22] = 0x1,
609				}
610			},
611			{
612				.descr = "RA = INT_MIN, RB = INT_MIN",
613				.instr = TEST_ADD_DOT(20, 21, 22),
614				.regs = {
615					.gpr[21] = INT_MIN,
616					.gpr[22] = INT_MIN,
617				}
618			},
619			{
620				.descr = "RA = INT_MIN, RB = INT_MAX",
621				.instr = TEST_ADD_DOT(20, 21, 22),
622				.regs = {
623					.gpr[21] = INT_MIN,
624					.gpr[22] = INT_MAX,
625				}
626			},
627			{
628				.descr = "RA = INT_MAX, RB = INT_MAX",
629				.instr = TEST_ADD_DOT(20, 21, 22),
630				.regs = {
631					.gpr[21] = INT_MAX,
632					.gpr[22] = INT_MAX,
633				}
634			},
635			{
636				.descr = "RA = UINT_MAX, RB = UINT_MAX",
637				.instr = TEST_ADD_DOT(20, 21, 22),
638				.regs = {
639					.gpr[21] = UINT_MAX,
640					.gpr[22] = UINT_MAX,
641				}
642			},
643			{
644				.descr = "RA = UINT_MAX, RB = 0x1",
645				.instr = TEST_ADD_DOT(20, 21, 22),
646				.regs = {
647					.gpr[21] = UINT_MAX,
648					.gpr[22] = 0x1,
649				}
650			}
651		}
652	},
653	{
654		.mnemonic = "addc",
655		.subtests = {
656			{
657				.descr = "RA = LONG_MIN, RB = LONG_MIN",
658				.instr = TEST_ADDC(20, 21, 22),
659				.regs = {
660					.gpr[21] = LONG_MIN,
661					.gpr[22] = LONG_MIN,
662				}
663			},
664			{
665				.descr = "RA = LONG_MIN, RB = LONG_MAX",
666				.instr = TEST_ADDC(20, 21, 22),
667				.regs = {
668					.gpr[21] = LONG_MIN,
669					.gpr[22] = LONG_MAX,
670				}
671			},
672			{
673				.descr = "RA = LONG_MAX, RB = LONG_MAX",
674				.instr = TEST_ADDC(20, 21, 22),
675				.regs = {
676					.gpr[21] = LONG_MAX,
677					.gpr[22] = LONG_MAX,
678				}
679			},
680			{
681				.descr = "RA = ULONG_MAX, RB = ULONG_MAX",
682				.instr = TEST_ADDC(20, 21, 22),
683				.regs = {
684					.gpr[21] = ULONG_MAX,
685					.gpr[22] = ULONG_MAX,
686				}
687			},
688			{
689				.descr = "RA = ULONG_MAX, RB = 0x1",
690				.instr = TEST_ADDC(20, 21, 22),
691				.regs = {
692					.gpr[21] = ULONG_MAX,
693					.gpr[22] = 0x1,
694				}
695			},
696			{
697				.descr = "RA = INT_MIN, RB = INT_MIN",
698				.instr = TEST_ADDC(20, 21, 22),
699				.regs = {
700					.gpr[21] = INT_MIN,
701					.gpr[22] = INT_MIN,
702				}
703			},
704			{
705				.descr = "RA = INT_MIN, RB = INT_MAX",
706				.instr = TEST_ADDC(20, 21, 22),
707				.regs = {
708					.gpr[21] = INT_MIN,
709					.gpr[22] = INT_MAX,
710				}
711			},
712			{
713				.descr = "RA = INT_MAX, RB = INT_MAX",
714				.instr = TEST_ADDC(20, 21, 22),
715				.regs = {
716					.gpr[21] = INT_MAX,
717					.gpr[22] = INT_MAX,
718				}
719			},
720			{
721				.descr = "RA = UINT_MAX, RB = UINT_MAX",
722				.instr = TEST_ADDC(20, 21, 22),
723				.regs = {
724					.gpr[21] = UINT_MAX,
725					.gpr[22] = UINT_MAX,
726				}
727			},
728			{
729				.descr = "RA = UINT_MAX, RB = 0x1",
730				.instr = TEST_ADDC(20, 21, 22),
731				.regs = {
732					.gpr[21] = UINT_MAX,
733					.gpr[22] = 0x1,
734				}
735			},
736			{
737				.descr = "RA = LONG_MIN | INT_MIN, RB = LONG_MIN | INT_MIN",
738				.instr = TEST_ADDC(20, 21, 22),
739				.regs = {
740					.gpr[21] = LONG_MIN | (uint)INT_MIN,
741					.gpr[22] = LONG_MIN | (uint)INT_MIN,
742				}
743			}
744		}
745	},
746	{
747		.mnemonic = "addc.",
748		.subtests = {
749			{
750				.descr = "RA = LONG_MIN, RB = LONG_MIN",
751				.flags = IGNORE_CCR,
752				.instr = TEST_ADDC_DOT(20, 21, 22),
753				.regs = {
754					.gpr[21] = LONG_MIN,
755					.gpr[22] = LONG_MIN,
756				}
757			},
758			{
759				.descr = "RA = LONG_MIN, RB = LONG_MAX",
760				.instr = TEST_ADDC_DOT(20, 21, 22),
761				.regs = {
762					.gpr[21] = LONG_MIN,
763					.gpr[22] = LONG_MAX,
764				}
765			},
766			{
767				.descr = "RA = LONG_MAX, RB = LONG_MAX",
768				.flags = IGNORE_CCR,
769				.instr = TEST_ADDC_DOT(20, 21, 22),
770				.regs = {
771					.gpr[21] = LONG_MAX,
772					.gpr[22] = LONG_MAX,
773				}
774			},
775			{
776				.descr = "RA = ULONG_MAX, RB = ULONG_MAX",
777				.instr = TEST_ADDC_DOT(20, 21, 22),
778				.regs = {
779					.gpr[21] = ULONG_MAX,
780					.gpr[22] = ULONG_MAX,
781				}
782			},
783			{
784				.descr = "RA = ULONG_MAX, RB = 0x1",
785				.instr = TEST_ADDC_DOT(20, 21, 22),
786				.regs = {
787					.gpr[21] = ULONG_MAX,
788					.gpr[22] = 0x1,
789				}
790			},
791			{
792				.descr = "RA = INT_MIN, RB = INT_MIN",
793				.instr = TEST_ADDC_DOT(20, 21, 22),
794				.regs = {
795					.gpr[21] = INT_MIN,
796					.gpr[22] = INT_MIN,
797				}
798			},
799			{
800				.descr = "RA = INT_MIN, RB = INT_MAX",
801				.instr = TEST_ADDC_DOT(20, 21, 22),
802				.regs = {
803					.gpr[21] = INT_MIN,
804					.gpr[22] = INT_MAX,
805				}
806			},
807			{
808				.descr = "RA = INT_MAX, RB = INT_MAX",
809				.instr = TEST_ADDC_DOT(20, 21, 22),
810				.regs = {
811					.gpr[21] = INT_MAX,
812					.gpr[22] = INT_MAX,
813				}
814			},
815			{
816				.descr = "RA = UINT_MAX, RB = UINT_MAX",
817				.instr = TEST_ADDC_DOT(20, 21, 22),
818				.regs = {
819					.gpr[21] = UINT_MAX,
820					.gpr[22] = UINT_MAX,
821				}
822			},
823			{
824				.descr = "RA = UINT_MAX, RB = 0x1",
825				.instr = TEST_ADDC_DOT(20, 21, 22),
826				.regs = {
827					.gpr[21] = UINT_MAX,
828					.gpr[22] = 0x1,
829				}
830			},
831			{
832				.descr = "RA = LONG_MIN | INT_MIN, RB = LONG_MIN | INT_MIN",
833				.instr = TEST_ADDC_DOT(20, 21, 22),
834				.regs = {
835					.gpr[21] = LONG_MIN | (uint)INT_MIN,
836					.gpr[22] = LONG_MIN | (uint)INT_MIN,
837				}
838			}
839		}
840	}
841};
842
843static int __init emulate_compute_instr(struct pt_regs *regs,
844					unsigned int instr)
845{
846	struct instruction_op op;
847
848	if (!regs || !instr)
849		return -EINVAL;
850
851	if (analyse_instr(&op, regs, instr) != 1 ||
852	    GETTYPE(op.type) != COMPUTE) {
853		pr_info("emulation failed, instruction = 0x%08x\n", instr);
854		return -EFAULT;
855	}
856
857	emulate_update_regs(regs, &op);
858	return 0;
859}
860
861static int __init execute_compute_instr(struct pt_regs *regs,
862					unsigned int instr)
863{
864	extern int exec_instr(struct pt_regs *regs);
865	extern s32 patch__exec_instr;
866
867	if (!regs || !instr)
868		return -EINVAL;
869
870	/* Patch the NOP with the actual instruction */
871	patch_instruction_site(&patch__exec_instr, instr);
872	if (exec_instr(regs)) {
873		pr_info("execution failed, instruction = 0x%08x\n", instr);
874		return -EFAULT;
875	}
876
877	return 0;
878}
879
880#define gpr_mismatch(gprn, exp, got)	\
881	pr_info("GPR%u mismatch, exp = 0x%016lx, got = 0x%016lx\n",	\
882		gprn, exp, got)
883
884#define reg_mismatch(name, exp, got)	\
885	pr_info("%s mismatch, exp = 0x%016lx, got = 0x%016lx\n",	\
886		name, exp, got)
887
888static void __init run_tests_compute(void)
889{
890	unsigned long flags;
891	struct compute_test *test;
892	struct pt_regs *regs, exp, got;
893	unsigned int i, j, k, instr;
894	bool ignore_gpr, ignore_xer, ignore_ccr, passed;
895
896	for (i = 0; i < ARRAY_SIZE(compute_tests); i++) {
897		test = &compute_tests[i];
898
899		for (j = 0; j < MAX_SUBTESTS && test->subtests[j].descr; j++) {
900			instr = test->subtests[j].instr;
901			flags = test->subtests[j].flags;
902			regs = &test->subtests[j].regs;
903			ignore_xer = flags & IGNORE_XER;
904			ignore_ccr = flags & IGNORE_CCR;
905			passed = true;
906
907			memcpy(&exp, regs, sizeof(struct pt_regs));
908			memcpy(&got, regs, sizeof(struct pt_regs));
909
910			/*
911			 * Set a compatible MSR value explicitly to ensure
912			 * that XER and CR bits are updated appropriately
913			 */
914			exp.msr = MSR_KERNEL;
915			got.msr = MSR_KERNEL;
916
917			if (emulate_compute_instr(&got, instr) ||
918			    execute_compute_instr(&exp, instr)) {
919				passed = false;
920				goto print;
921			}
922
923			/* Verify GPR values */
924			for (k = 0; k < 32; k++) {
925				ignore_gpr = flags & IGNORE_GPR(k);
926				if (!ignore_gpr && exp.gpr[k] != got.gpr[k]) {
927					passed = false;
928					gpr_mismatch(k, exp.gpr[k], got.gpr[k]);
929				}
930			}
931
932			/* Verify LR value */
933			if (exp.link != got.link) {
934				passed = false;
935				reg_mismatch("LR", exp.link, got.link);
936			}
937
938			/* Verify XER value */
939			if (!ignore_xer && exp.xer != got.xer) {
940				passed = false;
941				reg_mismatch("XER", exp.xer, got.xer);
942			}
943
944			/* Verify CR value */
945			if (!ignore_ccr && exp.ccr != got.ccr) {
946				passed = false;
947				reg_mismatch("CR", exp.ccr, got.ccr);
948			}
949
950print:
951			show_result_with_descr(test->mnemonic,
952					       test->subtests[j].descr,
953					       passed ? "PASS" : "FAIL");
954		}
955	}
956}
957
958static int __init test_emulate_step(void)
959{
960	printk(KERN_INFO "Running instruction emulation self-tests ...\n");
961	run_tests_load_store();
962	run_tests_compute();
963
964	return 0;
965}
966late_initcall(test_emulate_step);