Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _KSTACK_H
3#define _KSTACK_H
4
5#include <linux/thread_info.h>
6#include <linux/sched.h>
7#include <asm/ptrace.h>
8#include <asm/irq.h>
9
10/* SP must be STACK_BIAS adjusted already. */
11static inline bool kstack_valid(struct thread_info *tp, unsigned long sp)
12{
13 unsigned long base = (unsigned long) tp;
14
15 /* Stack pointer must be 16-byte aligned. */
16 if (sp & (16UL - 1))
17 return false;
18
19 if (sp >= (base + sizeof(struct thread_info)) &&
20 sp <= (base + THREAD_SIZE - sizeof(struct sparc_stackf)))
21 return true;
22
23 if (hardirq_stack[tp->cpu]) {
24 base = (unsigned long) hardirq_stack[tp->cpu];
25 if (sp >= base &&
26 sp <= (base + THREAD_SIZE - sizeof(struct sparc_stackf)))
27 return true;
28 base = (unsigned long) softirq_stack[tp->cpu];
29 if (sp >= base &&
30 sp <= (base + THREAD_SIZE - sizeof(struct sparc_stackf)))
31 return true;
32 }
33 return false;
34}
35
36/* Does "regs" point to a valid pt_regs trap frame? */
37static inline bool kstack_is_trap_frame(struct thread_info *tp, struct pt_regs *regs)
38{
39 unsigned long base = (unsigned long) tp;
40 unsigned long addr = (unsigned long) regs;
41
42 if (addr >= base &&
43 addr <= (base + THREAD_SIZE - sizeof(*regs)))
44 goto check_magic;
45
46 if (hardirq_stack[tp->cpu]) {
47 base = (unsigned long) hardirq_stack[tp->cpu];
48 if (addr >= base &&
49 addr <= (base + THREAD_SIZE - sizeof(*regs)))
50 goto check_magic;
51 base = (unsigned long) softirq_stack[tp->cpu];
52 if (addr >= base &&
53 addr <= (base + THREAD_SIZE - sizeof(*regs)))
54 goto check_magic;
55 }
56 return false;
57
58check_magic:
59 if ((regs->magic & ~0x1ff) == PT_REGS_MAGIC)
60 return true;
61 return false;
62
63}
64
65static inline __attribute__((always_inline)) void *set_hardirq_stack(void)
66{
67 void *orig_sp, *sp = hardirq_stack[smp_processor_id()];
68
69 __asm__ __volatile__("mov %%sp, %0" : "=r" (orig_sp));
70 if (orig_sp < sp ||
71 orig_sp > (sp + THREAD_SIZE)) {
72 sp += THREAD_SIZE - 192 - STACK_BIAS;
73 __asm__ __volatile__("mov %0, %%sp" : : "r" (sp));
74 }
75
76 return orig_sp;
77}
78
79static inline __attribute__((always_inline)) void restore_hardirq_stack(void *orig_sp)
80{
81 __asm__ __volatile__("mov %0, %%sp" : : "r" (orig_sp));
82}
83
84#endif /* _KSTACK_H */
1#ifndef _KSTACK_H
2#define _KSTACK_H
3
4#include <linux/thread_info.h>
5#include <linux/sched.h>
6#include <asm/ptrace.h>
7#include <asm/irq.h>
8
9/* SP must be STACK_BIAS adjusted already. */
10static inline bool kstack_valid(struct thread_info *tp, unsigned long sp)
11{
12 unsigned long base = (unsigned long) tp;
13
14 /* Stack pointer must be 16-byte aligned. */
15 if (sp & (16UL - 1))
16 return false;
17
18 if (sp >= (base + sizeof(struct thread_info)) &&
19 sp <= (base + THREAD_SIZE - sizeof(struct sparc_stackf)))
20 return true;
21
22 if (hardirq_stack[tp->cpu]) {
23 base = (unsigned long) hardirq_stack[tp->cpu];
24 if (sp >= base &&
25 sp <= (base + THREAD_SIZE - sizeof(struct sparc_stackf)))
26 return true;
27 base = (unsigned long) softirq_stack[tp->cpu];
28 if (sp >= base &&
29 sp <= (base + THREAD_SIZE - sizeof(struct sparc_stackf)))
30 return true;
31 }
32 return false;
33}
34
35/* Does "regs" point to a valid pt_regs trap frame? */
36static inline bool kstack_is_trap_frame(struct thread_info *tp, struct pt_regs *regs)
37{
38 unsigned long base = (unsigned long) tp;
39 unsigned long addr = (unsigned long) regs;
40
41 if (addr >= base &&
42 addr <= (base + THREAD_SIZE - sizeof(*regs)))
43 goto check_magic;
44
45 if (hardirq_stack[tp->cpu]) {
46 base = (unsigned long) hardirq_stack[tp->cpu];
47 if (addr >= base &&
48 addr <= (base + THREAD_SIZE - sizeof(*regs)))
49 goto check_magic;
50 base = (unsigned long) softirq_stack[tp->cpu];
51 if (addr >= base &&
52 addr <= (base + THREAD_SIZE - sizeof(*regs)))
53 goto check_magic;
54 }
55 return false;
56
57check_magic:
58 if ((regs->magic & ~0x1ff) == PT_REGS_MAGIC)
59 return true;
60 return false;
61
62}
63
64static inline __attribute__((always_inline)) void *set_hardirq_stack(void)
65{
66 void *orig_sp, *sp = hardirq_stack[smp_processor_id()];
67
68 __asm__ __volatile__("mov %%sp, %0" : "=r" (orig_sp));
69 if (orig_sp < sp ||
70 orig_sp > (sp + THREAD_SIZE)) {
71 sp += THREAD_SIZE - 192 - STACK_BIAS;
72 __asm__ __volatile__("mov %0, %%sp" : : "r" (sp));
73 }
74
75 return orig_sp;
76}
77
78static inline __attribute__((always_inline)) void restore_hardirq_stack(void *orig_sp)
79{
80 __asm__ __volatile__("mov %0, %%sp" : : "r" (orig_sp));
81}
82
83#endif /* _KSTACK_H */