Linux Audio

Check our new training course

Loading...
v6.9.4
  1/* SPDX-License-Identifier: GPL-2.0 */
  2/*
  3 * Copyright (C) 1999 Cort Dougan <cort@cs.nmt.edu>
  4 */
  5#ifndef _ASM_POWERPC_SWITCH_TO_H
  6#define _ASM_POWERPC_SWITCH_TO_H
  7
  8#include <linux/sched.h>
  9#include <asm/reg.h>
 10
 11struct thread_struct;
 12struct task_struct;
 13struct pt_regs;
 14
 15extern struct task_struct *__switch_to(struct task_struct *,
 16	struct task_struct *);
 17#define switch_to(prev, next, last)	((last) = __switch_to((prev), (next)))
 18
 19extern struct task_struct *_switch(struct thread_struct *prev,
 20				   struct thread_struct *next);
 21
 22extern void switch_booke_debug_regs(struct debug_reg *new_debug);
 23
 24extern int emulate_altivec(struct pt_regs *);
 25
 26#ifdef CONFIG_PPC_BOOK3S_64
 27void restore_math(struct pt_regs *regs);
 28#else
 29static inline void restore_math(struct pt_regs *regs)
 30{
 31}
 32#endif
 33
 34void restore_tm_state(struct pt_regs *regs);
 35
 36extern void flush_all_to_thread(struct task_struct *);
 37extern void giveup_all(struct task_struct *);
 38
 39#ifdef CONFIG_PPC_FPU
 40extern void enable_kernel_fp(void);
 41extern void flush_fp_to_thread(struct task_struct *);
 42extern void giveup_fpu(struct task_struct *);
 43extern void save_fpu(struct task_struct *);
 44static inline void disable_kernel_fp(void)
 45{
 46	msr_check_and_clear(MSR_FP);
 47}
 48#else
 49static inline void save_fpu(struct task_struct *t) { }
 50static inline void flush_fp_to_thread(struct task_struct *t) { }
 51static inline void enable_kernel_fp(void)
 52{
 53	BUILD_BUG();
 54}
 55#endif
 56
 57#ifdef CONFIG_ALTIVEC
 58extern void enable_kernel_altivec(void);
 59extern void flush_altivec_to_thread(struct task_struct *);
 60extern void giveup_altivec(struct task_struct *);
 61extern void save_altivec(struct task_struct *);
 62static inline void disable_kernel_altivec(void)
 63{
 64	msr_check_and_clear(MSR_VEC);
 65}
 66#else
 67static inline void save_altivec(struct task_struct *t) { }
 68static inline void __giveup_altivec(struct task_struct *t) { }
 69static inline void enable_kernel_altivec(void)
 70{
 71	BUILD_BUG();
 72}
 73
 74static inline void disable_kernel_altivec(void)
 75{
 76	BUILD_BUG();
 77}
 78#endif
 79
 80#ifdef CONFIG_VSX
 81extern void enable_kernel_vsx(void);
 82extern void flush_vsx_to_thread(struct task_struct *);
 83static inline void disable_kernel_vsx(void)
 84{
 85	msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX);
 86}
 87#else
 88static inline void enable_kernel_vsx(void)
 89{
 90	BUILD_BUG();
 91}
 92
 93static inline void disable_kernel_vsx(void)
 94{
 95	BUILD_BUG();
 96}
 97#endif
 98
 99#ifdef CONFIG_SPE
100extern void enable_kernel_spe(void);
101extern void flush_spe_to_thread(struct task_struct *);
102extern void giveup_spe(struct task_struct *);
103extern void __giveup_spe(struct task_struct *);
104static inline void disable_kernel_spe(void)
105{
106	msr_check_and_clear(MSR_SPE);
107}
108#else
109static inline void __giveup_spe(struct task_struct *t) { }
110#endif
111
112static inline void clear_task_ebb(struct task_struct *t)
113{
114#ifdef CONFIG_PPC_BOOK3S_64
115    /* EBB perf events are not inherited, so clear all EBB state. */
116    t->thread.ebbrr = 0;
117    t->thread.ebbhr = 0;
118    t->thread.bescr = 0;
119    t->thread.mmcr2 = 0;
120    t->thread.mmcr0 = 0;
121    t->thread.siar = 0;
122    t->thread.sdar = 0;
123    t->thread.sier = 0;
124    t->thread.used_ebb = 0;
125#endif
126}
127
128void kvmppc_save_user_regs(void);
129void kvmppc_save_current_sprs(void);
130
131extern int set_thread_tidr(struct task_struct *t);
132
133#endif /* _ASM_POWERPC_SWITCH_TO_H */
v6.2
  1/* SPDX-License-Identifier: GPL-2.0 */
  2/*
  3 * Copyright (C) 1999 Cort Dougan <cort@cs.nmt.edu>
  4 */
  5#ifndef _ASM_POWERPC_SWITCH_TO_H
  6#define _ASM_POWERPC_SWITCH_TO_H
  7
  8#include <linux/sched.h>
  9#include <asm/reg.h>
 10
 11struct thread_struct;
 12struct task_struct;
 13struct pt_regs;
 14
 15extern struct task_struct *__switch_to(struct task_struct *,
 16	struct task_struct *);
 17#define switch_to(prev, next, last)	((last) = __switch_to((prev), (next)))
 18
 19extern struct task_struct *_switch(struct thread_struct *prev,
 20				   struct thread_struct *next);
 21
 22extern void switch_booke_debug_regs(struct debug_reg *new_debug);
 23
 24extern int emulate_altivec(struct pt_regs *);
 25
 26#ifdef CONFIG_PPC_BOOK3S_64
 27void restore_math(struct pt_regs *regs);
 28#else
 29static inline void restore_math(struct pt_regs *regs)
 30{
 31}
 32#endif
 33
 34void restore_tm_state(struct pt_regs *regs);
 35
 36extern void flush_all_to_thread(struct task_struct *);
 37extern void giveup_all(struct task_struct *);
 38
 39#ifdef CONFIG_PPC_FPU
 40extern void enable_kernel_fp(void);
 41extern void flush_fp_to_thread(struct task_struct *);
 42extern void giveup_fpu(struct task_struct *);
 43extern void save_fpu(struct task_struct *);
 44static inline void disable_kernel_fp(void)
 45{
 46	msr_check_and_clear(MSR_FP);
 47}
 48#else
 49static inline void save_fpu(struct task_struct *t) { }
 50static inline void flush_fp_to_thread(struct task_struct *t) { }
 
 
 
 
 51#endif
 52
 53#ifdef CONFIG_ALTIVEC
 54extern void enable_kernel_altivec(void);
 55extern void flush_altivec_to_thread(struct task_struct *);
 56extern void giveup_altivec(struct task_struct *);
 57extern void save_altivec(struct task_struct *);
 58static inline void disable_kernel_altivec(void)
 59{
 60	msr_check_and_clear(MSR_VEC);
 61}
 62#else
 63static inline void save_altivec(struct task_struct *t) { }
 64static inline void __giveup_altivec(struct task_struct *t) { }
 65static inline void enable_kernel_altivec(void)
 66{
 67	BUILD_BUG();
 68}
 69
 70static inline void disable_kernel_altivec(void)
 71{
 72	BUILD_BUG();
 73}
 74#endif
 75
 76#ifdef CONFIG_VSX
 77extern void enable_kernel_vsx(void);
 78extern void flush_vsx_to_thread(struct task_struct *);
 79static inline void disable_kernel_vsx(void)
 80{
 81	msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX);
 82}
 83#else
 84static inline void enable_kernel_vsx(void)
 85{
 86	BUILD_BUG();
 87}
 88
 89static inline void disable_kernel_vsx(void)
 90{
 91	BUILD_BUG();
 92}
 93#endif
 94
 95#ifdef CONFIG_SPE
 96extern void enable_kernel_spe(void);
 97extern void flush_spe_to_thread(struct task_struct *);
 98extern void giveup_spe(struct task_struct *);
 99extern void __giveup_spe(struct task_struct *);
100static inline void disable_kernel_spe(void)
101{
102	msr_check_and_clear(MSR_SPE);
103}
104#else
105static inline void __giveup_spe(struct task_struct *t) { }
106#endif
107
108static inline void clear_task_ebb(struct task_struct *t)
109{
110#ifdef CONFIG_PPC_BOOK3S_64
111    /* EBB perf events are not inherited, so clear all EBB state. */
112    t->thread.ebbrr = 0;
113    t->thread.ebbhr = 0;
114    t->thread.bescr = 0;
115    t->thread.mmcr2 = 0;
116    t->thread.mmcr0 = 0;
117    t->thread.siar = 0;
118    t->thread.sdar = 0;
119    t->thread.sier = 0;
120    t->thread.used_ebb = 0;
121#endif
122}
123
124void kvmppc_save_user_regs(void);
125void kvmppc_save_current_sprs(void);
126
127extern int set_thread_tidr(struct task_struct *t);
128
129#endif /* _ASM_POWERPC_SWITCH_TO_H */