Linux Audio

Check our new training course

Loading...
v4.17
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_X86_ALTERNATIVE_H
  3#define _ASM_X86_ALTERNATIVE_H
  4
  5#ifndef __ASSEMBLY__
  6
  7#include <linux/types.h>
  8#include <linux/stddef.h>
  9#include <linux/stringify.h>
 10#include <asm/asm.h>
 11
 
 
 
 
 
 
 
 12/*
 13 * Alternative inline assembly for SMP.
 14 *
 15 * The LOCK_PREFIX macro defined here replaces the LOCK and
 16 * LOCK_PREFIX macros used everywhere in the source tree.
 17 *
 18 * SMP alternatives use the same data structures as the other
 19 * alternatives and the X86_FEATURE_UP flag to indicate the case of a
 20 * UP system running a SMP kernel.  The existing apply_alternatives()
 21 * works fine for patching a SMP kernel for UP.
 22 *
 23 * The SMP alternative tables can be kept after boot and contain both
 24 * UP and SMP versions of the instructions to allow switching back to
 25 * SMP at runtime, when hotplugging in a new CPU, which is especially
 26 * useful in virtualized environments.
 27 *
 28 * The very common lock prefix is handled as special case in a
 29 * separate table which is a pure address list without replacement ptr
 30 * and size information.  That keeps the table sizes small.
 31 */
 32
 33#ifdef CONFIG_SMP
 34#define LOCK_PREFIX_HERE \
 35		".pushsection .smp_locks,\"a\"\n"	\
 36		".balign 4\n"				\
 37		".long 671f - .\n" /* offset */		\
 38		".popsection\n"				\
 39		"671:"
 40
 41#define LOCK_PREFIX LOCK_PREFIX_HERE "\n\tlock; "
 42
 43#else /* ! CONFIG_SMP */
 44#define LOCK_PREFIX_HERE ""
 45#define LOCK_PREFIX ""
 46#endif
 47
 
 
 
 
 
 
 
 
 
 
 48struct alt_instr {
 49	s32 instr_offset;	/* original instruction */
 50	s32 repl_offset;	/* offset to replacement instruction */
 51	u16 cpuid;		/* cpuid bit set for replacement */
 52	u8  instrlen;		/* length of original instruction */
 53	u8  replacementlen;	/* length of new instruction */
 54	u8  padlen;		/* length of build-time padding */
 55} __packed;
 56
 57/*
 58 * Debug flag that can be tested to see whether alternative
 59 * instructions were patched in already:
 60 */
 61extern int alternatives_patched;
 62
 63extern void alternative_instructions(void);
 64extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end);
 
 
 
 
 
 65
 66struct module;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 67
 68#ifdef CONFIG_SMP
 69extern void alternatives_smp_module_add(struct module *mod, char *name,
 70					void *locks, void *locks_end,
 71					void *text, void *text_end);
 72extern void alternatives_smp_module_del(struct module *mod);
 73extern void alternatives_enable_smp(void);
 74extern int alternatives_text_reserved(void *start, void *end);
 75extern bool skip_smp_alternatives;
 76#else
 77static inline void alternatives_smp_module_add(struct module *mod, char *name,
 78					       void *locks, void *locks_end,
 79					       void *text, void *text_end) {}
 80static inline void alternatives_smp_module_del(struct module *mod) {}
 81static inline void alternatives_enable_smp(void) {}
 82static inline int alternatives_text_reserved(void *start, void *end)
 83{
 84	return 0;
 85}
 86#endif	/* CONFIG_SMP */
 87
 88#define b_replacement(num)	"664"#num
 89#define e_replacement(num)	"665"#num
 90
 91#define alt_end_marker		"663"
 92#define alt_slen		"662b-661b"
 93#define alt_pad_len		alt_end_marker"b-662b"
 94#define alt_total_slen		alt_end_marker"b-661b"
 95#define alt_rlen(num)		e_replacement(num)"f-"b_replacement(num)"f"
 96
 97#define __OLDINSTR(oldinstr, num)					\
 
 98	"661:\n\t" oldinstr "\n662:\n"					\
 
 99	".skip -(((" alt_rlen(num) ")-(" alt_slen ")) > 0) * "		\
100		"((" alt_rlen(num) ")-(" alt_slen ")),0x90\n"
101
102#define OLDINSTR(oldinstr, num)						\
103	__OLDINSTR(oldinstr, num)					\
104	alt_end_marker ":\n"
105
106/*
107 * gas compatible max based on the idea from:
108 * http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax
109 *
110 * The additional "-" is needed because gas uses a "true" value of -1.
111 */
112#define alt_max_short(a, b)	"((" a ") ^ (((" a ") ^ (" b ")) & -(-((" a ") < (" b ")))))"
113
114/*
115 * Pad the second replacement alternative with additional NOPs if it is
116 * additionally longer than the first replacement alternative.
117 */
118#define OLDINSTR_2(oldinstr, num1, num2) \
 
119	"661:\n\t" oldinstr "\n662:\n"								\
 
120	".skip -((" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")) > 0) * "	\
121		"(" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")), 0x90\n"	\
122	alt_end_marker ":\n"
123
 
 
 
 
 
 
 
 
 
 
124#define ALTINSTR_ENTRY(feature, num)					      \
125	" .long 661b - .\n"				/* label           */ \
126	" .long " b_replacement(num)"f - .\n"		/* new instruction */ \
127	" .word " __stringify(feature) "\n"		/* feature bit     */ \
128	" .byte " alt_total_slen "\n"			/* source len      */ \
129	" .byte " alt_rlen(num) "\n"			/* replacement len */ \
130	" .byte " alt_pad_len "\n"			/* pad len */
131
132#define ALTINSTR_REPLACEMENT(newinstr, feature, num)	/* replacement */     \
133	b_replacement(num)":\n\t" newinstr "\n" e_replacement(num) ":\n\t"
 
134
135/* alternative assembly primitive: */
136#define ALTERNATIVE(oldinstr, newinstr, feature)			\
137	OLDINSTR(oldinstr, 1)						\
138	".pushsection .altinstructions,\"a\"\n"				\
139	ALTINSTR_ENTRY(feature, 1)					\
140	".popsection\n"							\
141	".pushsection .altinstr_replacement, \"ax\"\n"			\
142	ALTINSTR_REPLACEMENT(newinstr, feature, 1)			\
143	".popsection\n"
144
145#define ALTERNATIVE_2(oldinstr, newinstr1, feature1, newinstr2, feature2)\
146	OLDINSTR_2(oldinstr, 1, 2)					\
147	".pushsection .altinstructions,\"a\"\n"				\
148	ALTINSTR_ENTRY(feature1, 1)					\
149	ALTINSTR_ENTRY(feature2, 2)					\
150	".popsection\n"							\
151	".pushsection .altinstr_replacement, \"ax\"\n"			\
152	ALTINSTR_REPLACEMENT(newinstr1, feature1, 1)			\
153	ALTINSTR_REPLACEMENT(newinstr2, feature2, 2)			\
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
154	".popsection\n"
155
156/*
157 * Alternative instructions for different CPU types or capabilities.
158 *
159 * This allows to use optimized instructions even on generic binary
160 * kernels.
161 *
162 * length of oldinstr must be longer or equal the length of newinstr
163 * It can be padded with nops as needed.
164 *
165 * For non barrier like inlines please define new variants
166 * without volatile and memory clobber.
167 */
168#define alternative(oldinstr, newinstr, feature)			\
169	asm volatile (ALTERNATIVE(oldinstr, newinstr, feature) : : : "memory")
170
171#define alternative_2(oldinstr, newinstr1, feature1, newinstr2, feature2) \
172	asm volatile(ALTERNATIVE_2(oldinstr, newinstr1, feature1, newinstr2, feature2) ::: "memory")
 
 
 
173
174/*
175 * Alternative inline assembly with input.
176 *
177 * Pecularities:
178 * No memory clobber here.
179 * Argument numbers start with 1.
180 * Best is to use constraints that are fixed size (like (%1) ... "r")
181 * If you use variable sized constraints like "m" or "g" in the
182 * replacement make sure to pad to the worst case length.
183 * Leaving an unused argument 0 to keep API compatibility.
184 */
185#define alternative_input(oldinstr, newinstr, feature, input...)	\
186	asm volatile (ALTERNATIVE(oldinstr, newinstr, feature)		\
187		: : "i" (0), ## input)
188
189/*
190 * This is similar to alternative_input. But it has two features and
191 * respective instructions.
192 *
193 * If CPU has feature2, newinstr2 is used.
194 * Otherwise, if CPU has feature1, newinstr1 is used.
195 * Otherwise, oldinstr is used.
196 */
197#define alternative_input_2(oldinstr, newinstr1, feature1, newinstr2,	     \
198			   feature2, input...)				     \
199	asm volatile(ALTERNATIVE_2(oldinstr, newinstr1, feature1,	     \
200		newinstr2, feature2)					     \
201		: : "i" (0), ## input)
202
203/* Like alternative_input, but with a single output argument */
204#define alternative_io(oldinstr, newinstr, feature, output, input...)	\
205	asm volatile (ALTERNATIVE(oldinstr, newinstr, feature)		\
206		: output : "i" (0), ## input)
207
208/* Like alternative_io, but for replacing a direct call with another one. */
209#define alternative_call(oldfunc, newfunc, feature, output, input...)	\
210	asm volatile (ALTERNATIVE("call %P[old]", "call %P[new]", feature) \
211		: output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)
212
213/*
214 * Like alternative_call, but there are two features and respective functions.
215 * If CPU has feature2, function2 is used.
216 * Otherwise, if CPU has feature1, function1 is used.
217 * Otherwise, old function is used.
218 */
219#define alternative_call_2(oldfunc, newfunc1, feature1, newfunc2, feature2,   \
220			   output, input...)				      \
221	asm volatile (ALTERNATIVE_2("call %P[old]", "call %P[new1]", feature1,\
222		"call %P[new2]", feature2)				      \
223		: output, ASM_CALL_CONSTRAINT				      \
224		: [old] "i" (oldfunc), [new1] "i" (newfunc1),		      \
225		  [new2] "i" (newfunc2), ## input)
226
227/*
228 * use this macro(s) if you need more than one output parameter
229 * in alternative_io
230 */
231#define ASM_OUTPUT2(a...) a
232
233/*
234 * use this macro if you need clobbers but no inputs in
235 * alternative_{input,io,call}()
236 */
237#define ASM_NO_INPUT_CLOBBER(clbr...) "i" (0) : clbr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
238
239#endif /* __ASSEMBLY__ */
240
241#endif /* _ASM_X86_ALTERNATIVE_H */
v6.2
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ASM_X86_ALTERNATIVE_H
  3#define _ASM_X86_ALTERNATIVE_H
  4
 
 
  5#include <linux/types.h>
 
  6#include <linux/stringify.h>
  7#include <asm/asm.h>
  8
  9#define ALTINSTR_FLAG_INV	(1 << 15)
 10#define ALT_NOT(feat)		((feat) | ALTINSTR_FLAG_INV)
 11
 12#ifndef __ASSEMBLY__
 13
 14#include <linux/stddef.h>
 15
 16/*
 17 * Alternative inline assembly for SMP.
 18 *
 19 * The LOCK_PREFIX macro defined here replaces the LOCK and
 20 * LOCK_PREFIX macros used everywhere in the source tree.
 21 *
 22 * SMP alternatives use the same data structures as the other
 23 * alternatives and the X86_FEATURE_UP flag to indicate the case of a
 24 * UP system running a SMP kernel.  The existing apply_alternatives()
 25 * works fine for patching a SMP kernel for UP.
 26 *
 27 * The SMP alternative tables can be kept after boot and contain both
 28 * UP and SMP versions of the instructions to allow switching back to
 29 * SMP at runtime, when hotplugging in a new CPU, which is especially
 30 * useful in virtualized environments.
 31 *
 32 * The very common lock prefix is handled as special case in a
 33 * separate table which is a pure address list without replacement ptr
 34 * and size information.  That keeps the table sizes small.
 35 */
 36
 37#ifdef CONFIG_SMP
 38#define LOCK_PREFIX_HERE \
 39		".pushsection .smp_locks,\"a\"\n"	\
 40		".balign 4\n"				\
 41		".long 671f - .\n" /* offset */		\
 42		".popsection\n"				\
 43		"671:"
 44
 45#define LOCK_PREFIX LOCK_PREFIX_HERE "\n\tlock; "
 46
 47#else /* ! CONFIG_SMP */
 48#define LOCK_PREFIX_HERE ""
 49#define LOCK_PREFIX ""
 50#endif
 51
 52/*
 53 * objtool annotation to ignore the alternatives and only consider the original
 54 * instruction(s).
 55 */
 56#define ANNOTATE_IGNORE_ALTERNATIVE				\
 57	"999:\n\t"						\
 58	".pushsection .discard.ignore_alts\n\t"			\
 59	".long 999b - .\n\t"					\
 60	".popsection\n\t"
 61
 62struct alt_instr {
 63	s32 instr_offset;	/* original instruction */
 64	s32 repl_offset;	/* offset to replacement instruction */
 65	u16 cpuid;		/* cpuid bit set for replacement */
 66	u8  instrlen;		/* length of original instruction */
 67	u8  replacementlen;	/* length of new instruction */
 
 68} __packed;
 69
 70/*
 71 * Debug flag that can be tested to see whether alternative
 72 * instructions were patched in already:
 73 */
 74extern int alternatives_patched;
 75
 76extern void alternative_instructions(void);
 77extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end);
 78extern void apply_retpolines(s32 *start, s32 *end);
 79extern void apply_returns(s32 *start, s32 *end);
 80extern void apply_ibt_endbr(s32 *start, s32 *end);
 81extern void apply_fineibt(s32 *start_retpoline, s32 *end_retpoine,
 82			  s32 *start_cfi, s32 *end_cfi);
 83
 84struct module;
 85struct paravirt_patch_site;
 86
 87struct callthunk_sites {
 88	s32				*call_start, *call_end;
 89	struct paravirt_patch_site	*pv_start, *pv_end;
 90};
 91
 92#ifdef CONFIG_CALL_THUNKS
 93extern void callthunks_patch_builtin_calls(void);
 94extern void callthunks_patch_module_calls(struct callthunk_sites *sites,
 95					  struct module *mod);
 96extern void *callthunks_translate_call_dest(void *dest);
 97extern bool is_callthunk(void *addr);
 98extern int x86_call_depth_emit_accounting(u8 **pprog, void *func);
 99#else
100static __always_inline void callthunks_patch_builtin_calls(void) {}
101static __always_inline void
102callthunks_patch_module_calls(struct callthunk_sites *sites,
103			      struct module *mod) {}
104static __always_inline void *callthunks_translate_call_dest(void *dest)
105{
106	return dest;
107}
108static __always_inline bool is_callthunk(void *addr)
109{
110	return false;
111}
112static __always_inline int x86_call_depth_emit_accounting(u8 **pprog,
113							  void *func)
114{
115	return 0;
116}
117#endif
118
119#ifdef CONFIG_SMP
120extern void alternatives_smp_module_add(struct module *mod, char *name,
121					void *locks, void *locks_end,
122					void *text, void *text_end);
123extern void alternatives_smp_module_del(struct module *mod);
124extern void alternatives_enable_smp(void);
125extern int alternatives_text_reserved(void *start, void *end);
126extern bool skip_smp_alternatives;
127#else
128static inline void alternatives_smp_module_add(struct module *mod, char *name,
129					       void *locks, void *locks_end,
130					       void *text, void *text_end) {}
131static inline void alternatives_smp_module_del(struct module *mod) {}
132static inline void alternatives_enable_smp(void) {}
133static inline int alternatives_text_reserved(void *start, void *end)
134{
135	return 0;
136}
137#endif	/* CONFIG_SMP */
138
139#define b_replacement(num)	"664"#num
140#define e_replacement(num)	"665"#num
141
142#define alt_end_marker		"663"
143#define alt_slen		"662b-661b"
 
144#define alt_total_slen		alt_end_marker"b-661b"
145#define alt_rlen(num)		e_replacement(num)"f-"b_replacement(num)"f"
146
147#define OLDINSTR(oldinstr, num)						\
148	"# ALT: oldnstr\n"						\
149	"661:\n\t" oldinstr "\n662:\n"					\
150	"# ALT: padding\n"						\
151	".skip -(((" alt_rlen(num) ")-(" alt_slen ")) > 0) * "		\
152		"((" alt_rlen(num) ")-(" alt_slen ")),0x90\n"		\
 
 
 
153	alt_end_marker ":\n"
154
155/*
156 * gas compatible max based on the idea from:
157 * http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax
158 *
159 * The additional "-" is needed because gas uses a "true" value of -1.
160 */
161#define alt_max_short(a, b)	"((" a ") ^ (((" a ") ^ (" b ")) & -(-((" a ") < (" b ")))))"
162
163/*
164 * Pad the second replacement alternative with additional NOPs if it is
165 * additionally longer than the first replacement alternative.
166 */
167#define OLDINSTR_2(oldinstr, num1, num2) \
168	"# ALT: oldinstr2\n"									\
169	"661:\n\t" oldinstr "\n662:\n"								\
170	"# ALT: padding2\n"									\
171	".skip -((" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")) > 0) * "	\
172		"(" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")), 0x90\n"	\
173	alt_end_marker ":\n"
174
175#define OLDINSTR_3(oldinsn, n1, n2, n3)								\
176	"# ALT: oldinstr3\n"									\
177	"661:\n\t" oldinsn "\n662:\n"								\
178	"# ALT: padding3\n"									\
179	".skip -((" alt_max_short(alt_max_short(alt_rlen(n1), alt_rlen(n2)), alt_rlen(n3))	\
180		" - (" alt_slen ")) > 0) * "							\
181		"(" alt_max_short(alt_max_short(alt_rlen(n1), alt_rlen(n2)), alt_rlen(n3))	\
182		" - (" alt_slen ")), 0x90\n"							\
183	alt_end_marker ":\n"
184
185#define ALTINSTR_ENTRY(feature, num)					      \
186	" .long 661b - .\n"				/* label           */ \
187	" .long " b_replacement(num)"f - .\n"		/* new instruction */ \
188	" .word " __stringify(feature) "\n"		/* feature bit     */ \
189	" .byte " alt_total_slen "\n"			/* source len      */ \
190	" .byte " alt_rlen(num) "\n"			/* replacement len */
 
191
192#define ALTINSTR_REPLACEMENT(newinstr, num)		/* replacement */	\
193	"# ALT: replacement " #num "\n"						\
194	b_replacement(num)":\n\t" newinstr "\n" e_replacement(num) ":\n"
195
196/* alternative assembly primitive: */
197#define ALTERNATIVE(oldinstr, newinstr, feature)			\
198	OLDINSTR(oldinstr, 1)						\
199	".pushsection .altinstructions,\"a\"\n"				\
200	ALTINSTR_ENTRY(feature, 1)					\
201	".popsection\n"							\
202	".pushsection .altinstr_replacement, \"ax\"\n"			\
203	ALTINSTR_REPLACEMENT(newinstr, 1)				\
204	".popsection\n"
205
206#define ALTERNATIVE_2(oldinstr, newinstr1, feature1, newinstr2, feature2)\
207	OLDINSTR_2(oldinstr, 1, 2)					\
208	".pushsection .altinstructions,\"a\"\n"				\
209	ALTINSTR_ENTRY(feature1, 1)					\
210	ALTINSTR_ENTRY(feature2, 2)					\
211	".popsection\n"							\
212	".pushsection .altinstr_replacement, \"ax\"\n"			\
213	ALTINSTR_REPLACEMENT(newinstr1, 1)				\
214	ALTINSTR_REPLACEMENT(newinstr2, 2)				\
215	".popsection\n"
216
217/* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
218#define ALTERNATIVE_TERNARY(oldinstr, feature, newinstr_yes, newinstr_no) \
219	ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS,	\
220		      newinstr_yes, feature)
221
222#define ALTERNATIVE_3(oldinsn, newinsn1, feat1, newinsn2, feat2, newinsn3, feat3) \
223	OLDINSTR_3(oldinsn, 1, 2, 3)						\
224	".pushsection .altinstructions,\"a\"\n"					\
225	ALTINSTR_ENTRY(feat1, 1)						\
226	ALTINSTR_ENTRY(feat2, 2)						\
227	ALTINSTR_ENTRY(feat3, 3)						\
228	".popsection\n"								\
229	".pushsection .altinstr_replacement, \"ax\"\n"				\
230	ALTINSTR_REPLACEMENT(newinsn1, 1)					\
231	ALTINSTR_REPLACEMENT(newinsn2, 2)					\
232	ALTINSTR_REPLACEMENT(newinsn3, 3)					\
233	".popsection\n"
234
235/*
236 * Alternative instructions for different CPU types or capabilities.
237 *
238 * This allows to use optimized instructions even on generic binary
239 * kernels.
240 *
241 * length of oldinstr must be longer or equal the length of newinstr
242 * It can be padded with nops as needed.
243 *
244 * For non barrier like inlines please define new variants
245 * without volatile and memory clobber.
246 */
247#define alternative(oldinstr, newinstr, feature)			\
248	asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) : : : "memory")
249
250#define alternative_2(oldinstr, newinstr1, feature1, newinstr2, feature2) \
251	asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, feature1, newinstr2, feature2) ::: "memory")
252
253#define alternative_ternary(oldinstr, feature, newinstr_yes, newinstr_no) \
254	asm_inline volatile(ALTERNATIVE_TERNARY(oldinstr, feature, newinstr_yes, newinstr_no) ::: "memory")
255
256/*
257 * Alternative inline assembly with input.
258 *
259 * Peculiarities:
260 * No memory clobber here.
261 * Argument numbers start with 1.
 
 
 
262 * Leaving an unused argument 0 to keep API compatibility.
263 */
264#define alternative_input(oldinstr, newinstr, feature, input...)	\
265	asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature)	\
266		: : "i" (0), ## input)
267
268/*
269 * This is similar to alternative_input. But it has two features and
270 * respective instructions.
271 *
272 * If CPU has feature2, newinstr2 is used.
273 * Otherwise, if CPU has feature1, newinstr1 is used.
274 * Otherwise, oldinstr is used.
275 */
276#define alternative_input_2(oldinstr, newinstr1, feature1, newinstr2,	     \
277			   feature2, input...)				     \
278	asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, feature1,     \
279		newinstr2, feature2)					     \
280		: : "i" (0), ## input)
281
282/* Like alternative_input, but with a single output argument */
283#define alternative_io(oldinstr, newinstr, feature, output, input...)	\
284	asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature)	\
285		: output : "i" (0), ## input)
286
287/* Like alternative_io, but for replacing a direct call with another one. */
288#define alternative_call(oldfunc, newfunc, feature, output, input...)	\
289	asm_inline volatile (ALTERNATIVE("call %P[old]", "call %P[new]", feature) \
290		: output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)
291
292/*
293 * Like alternative_call, but there are two features and respective functions.
294 * If CPU has feature2, function2 is used.
295 * Otherwise, if CPU has feature1, function1 is used.
296 * Otherwise, old function is used.
297 */
298#define alternative_call_2(oldfunc, newfunc1, feature1, newfunc2, feature2,   \
299			   output, input...)				      \
300	asm_inline volatile (ALTERNATIVE_2("call %P[old]", "call %P[new1]", feature1,\
301		"call %P[new2]", feature2)				      \
302		: output, ASM_CALL_CONSTRAINT				      \
303		: [old] "i" (oldfunc), [new1] "i" (newfunc1),		      \
304		  [new2] "i" (newfunc2), ## input)
305
306/*
307 * use this macro(s) if you need more than one output parameter
308 * in alternative_io
309 */
310#define ASM_OUTPUT2(a...) a
311
312/*
313 * use this macro if you need clobbers but no inputs in
314 * alternative_{input,io,call}()
315 */
316#define ASM_NO_INPUT_CLOBBER(clbr...) "i" (0) : clbr
317
318#else /* __ASSEMBLY__ */
319
320#ifdef CONFIG_SMP
321	.macro LOCK_PREFIX
322672:	lock
323	.pushsection .smp_locks,"a"
324	.balign 4
325	.long 672b - .
326	.popsection
327	.endm
328#else
329	.macro LOCK_PREFIX
330	.endm
331#endif
332
333/*
334 * objtool annotation to ignore the alternatives and only consider the original
335 * instruction(s).
336 */
337.macro ANNOTATE_IGNORE_ALTERNATIVE
338	.Lannotate_\@:
339	.pushsection .discard.ignore_alts
340	.long .Lannotate_\@ - .
341	.popsection
342.endm
343
344/*
345 * Issue one struct alt_instr descriptor entry (need to put it into
346 * the section .altinstructions, see below). This entry contains
347 * enough information for the alternatives patching code to patch an
348 * instruction. See apply_alternatives().
349 */
350.macro altinstruction_entry orig alt feature orig_len alt_len
351	.long \orig - .
352	.long \alt - .
353	.word \feature
354	.byte \orig_len
355	.byte \alt_len
356.endm
357
358/*
359 * Define an alternative between two instructions. If @feature is
360 * present, early code in apply_alternatives() replaces @oldinstr with
361 * @newinstr. ".skip" directive takes care of proper instruction padding
362 * in case @newinstr is longer than @oldinstr.
363 */
364.macro ALTERNATIVE oldinstr, newinstr, feature
365140:
366	\oldinstr
367141:
368	.skip -(((144f-143f)-(141b-140b)) > 0) * ((144f-143f)-(141b-140b)),0x90
369142:
370
371	.pushsection .altinstructions,"a"
372	altinstruction_entry 140b,143f,\feature,142b-140b,144f-143f
373	.popsection
374
375	.pushsection .altinstr_replacement,"ax"
376143:
377	\newinstr
378144:
379	.popsection
380.endm
381
382#define old_len			141b-140b
383#define new_len1		144f-143f
384#define new_len2		145f-144f
385#define new_len3		146f-145f
386
387/*
388 * gas compatible max based on the idea from:
389 * http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax
390 *
391 * The additional "-" is needed because gas uses a "true" value of -1.
392 */
393#define alt_max_2(a, b)		((a) ^ (((a) ^ (b)) & -(-((a) < (b)))))
394#define alt_max_3(a, b, c)	(alt_max_2(alt_max_2(a, b), c))
395
396
397/*
398 * Same as ALTERNATIVE macro above but for two alternatives. If CPU
399 * has @feature1, it replaces @oldinstr with @newinstr1. If CPU has
400 * @feature2, it replaces @oldinstr with @feature2.
401 */
402.macro ALTERNATIVE_2 oldinstr, newinstr1, feature1, newinstr2, feature2
403140:
404	\oldinstr
405141:
406	.skip -((alt_max_2(new_len1, new_len2) - (old_len)) > 0) * \
407		(alt_max_2(new_len1, new_len2) - (old_len)),0x90
408142:
409
410	.pushsection .altinstructions,"a"
411	altinstruction_entry 140b,143f,\feature1,142b-140b,144f-143f
412	altinstruction_entry 140b,144f,\feature2,142b-140b,145f-144f
413	.popsection
414
415	.pushsection .altinstr_replacement,"ax"
416143:
417	\newinstr1
418144:
419	\newinstr2
420145:
421	.popsection
422.endm
423
424.macro ALTERNATIVE_3 oldinstr, newinstr1, feature1, newinstr2, feature2, newinstr3, feature3
425140:
426	\oldinstr
427141:
428	.skip -((alt_max_3(new_len1, new_len2, new_len3) - (old_len)) > 0) * \
429		(alt_max_3(new_len1, new_len2, new_len3) - (old_len)),0x90
430142:
431
432	.pushsection .altinstructions,"a"
433	altinstruction_entry 140b,143f,\feature1,142b-140b,144f-143f
434	altinstruction_entry 140b,144f,\feature2,142b-140b,145f-144f
435	altinstruction_entry 140b,145f,\feature3,142b-140b,146f-145f
436	.popsection
437
438	.pushsection .altinstr_replacement,"ax"
439143:
440	\newinstr1
441144:
442	\newinstr2
443145:
444	\newinstr3
445146:
446	.popsection
447.endm
448
449/* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
450#define ALTERNATIVE_TERNARY(oldinstr, feature, newinstr_yes, newinstr_no) \
451	ALTERNATIVE_2 oldinstr, newinstr_no, X86_FEATURE_ALWAYS,	\
452	newinstr_yes, feature
453
454#endif /* __ASSEMBLY__ */
455
456#endif /* _ASM_X86_ALTERNATIVE_H */