Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __SPARC_MMAN_H__
3#define __SPARC_MMAN_H__
4
5#include <uapi/asm/mman.h>
6
7#ifndef __ASSEMBLY__
8#define arch_mmap_check(addr,len,flags) sparc_mmap_check(addr,len)
9int sparc_mmap_check(unsigned long addr, unsigned long len);
10
11#ifdef CONFIG_SPARC64
12#include <asm/adi_64.h>
13
14static inline void ipi_set_tstate_mcde(void *arg)
15{
16 struct mm_struct *mm = arg;
17
18 /* Set TSTATE_MCDE for the task using address map that ADI has been
19 * enabled on if the task is running. If not, it will be set
20 * automatically at the next context switch
21 */
22 if (current->mm == mm) {
23 struct pt_regs *regs;
24
25 regs = task_pt_regs(current);
26 regs->tstate |= TSTATE_MCDE;
27 }
28}
29
30#define arch_calc_vm_prot_bits(prot, pkey) sparc_calc_vm_prot_bits(prot)
31static inline unsigned long sparc_calc_vm_prot_bits(unsigned long prot)
32{
33 if (adi_capable() && (prot & PROT_ADI)) {
34 struct pt_regs *regs;
35
36 if (!current->mm->context.adi) {
37 regs = task_pt_regs(current);
38 regs->tstate |= TSTATE_MCDE;
39 current->mm->context.adi = true;
40 on_each_cpu_mask(mm_cpumask(current->mm),
41 ipi_set_tstate_mcde, current->mm, 0);
42 }
43 return VM_SPARC_ADI;
44 } else {
45 return 0;
46 }
47}
48
49#define arch_validate_prot(prot, addr) sparc_validate_prot(prot, addr)
50static inline int sparc_validate_prot(unsigned long prot, unsigned long addr)
51{
52 if (prot & ~(PROT_READ | PROT_WRITE | PROT_EXEC | PROT_SEM | PROT_ADI))
53 return 0;
54 return 1;
55}
56
57#define arch_validate_flags(vm_flags) arch_validate_flags(vm_flags)
58/* arch_validate_flags() - Ensure combination of flags is valid for a
59 * VMA.
60 */
61static inline bool arch_validate_flags(unsigned long vm_flags)
62{
63 /* If ADI is being enabled on this VMA, check for ADI
64 * capability on the platform and ensure VMA is suitable
65 * for ADI
66 */
67 if (vm_flags & VM_SPARC_ADI) {
68 if (!adi_capable())
69 return false;
70
71 /* ADI can not be enabled on PFN mapped pages */
72 if (vm_flags & (VM_PFNMAP | VM_MIXEDMAP))
73 return false;
74
75 /* Mergeable pages can become unmergeable
76 * if ADI is enabled on them even if they
77 * have identical data on them. This can be
78 * because ADI enabled pages with identical
79 * data may still not have identical ADI
80 * tags on them. Disallow ADI on mergeable
81 * pages.
82 */
83 if (vm_flags & VM_MERGEABLE)
84 return false;
85 }
86 return true;
87}
88#endif /* CONFIG_SPARC64 */
89
90#endif /* __ASSEMBLY__ */
91#endif /* __SPARC_MMAN_H__ */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __SPARC_MMAN_H__
3#define __SPARC_MMAN_H__
4
5#include <uapi/asm/mman.h>
6
7#ifndef __ASSEMBLY__
8#define arch_mmap_check(addr,len,flags) sparc_mmap_check(addr,len)
9int sparc_mmap_check(unsigned long addr, unsigned long len);
10
11#ifdef CONFIG_SPARC64
12#include <asm/adi_64.h>
13
14static inline void ipi_set_tstate_mcde(void *arg)
15{
16 struct mm_struct *mm = arg;
17
18 /* Set TSTATE_MCDE for the task using address map that ADI has been
19 * enabled on if the task is running. If not, it will be set
20 * automatically at the next context switch
21 */
22 if (current->mm == mm) {
23 struct pt_regs *regs;
24
25 regs = task_pt_regs(current);
26 regs->tstate |= TSTATE_MCDE;
27 }
28}
29
30#define arch_calc_vm_prot_bits(prot, pkey) sparc_calc_vm_prot_bits(prot)
31static inline unsigned long sparc_calc_vm_prot_bits(unsigned long prot)
32{
33 if (adi_capable() && (prot & PROT_ADI)) {
34 struct pt_regs *regs;
35
36 if (!current->mm->context.adi) {
37 regs = task_pt_regs(current);
38 regs->tstate |= TSTATE_MCDE;
39 current->mm->context.adi = true;
40 on_each_cpu_mask(mm_cpumask(current->mm),
41 ipi_set_tstate_mcde, current->mm, 0);
42 }
43 return VM_SPARC_ADI;
44 } else {
45 return 0;
46 }
47}
48
49#define arch_vm_get_page_prot(vm_flags) sparc_vm_get_page_prot(vm_flags)
50static inline pgprot_t sparc_vm_get_page_prot(unsigned long vm_flags)
51{
52 return (vm_flags & VM_SPARC_ADI) ? __pgprot(_PAGE_MCD_4V) : __pgprot(0);
53}
54
55#define arch_validate_prot(prot, addr) sparc_validate_prot(prot, addr)
56static inline int sparc_validate_prot(unsigned long prot, unsigned long addr)
57{
58 if (prot & ~(PROT_READ | PROT_WRITE | PROT_EXEC | PROT_SEM | PROT_ADI))
59 return 0;
60 return 1;
61}
62
63#define arch_validate_flags(vm_flags) arch_validate_flags(vm_flags)
64/* arch_validate_flags() - Ensure combination of flags is valid for a
65 * VMA.
66 */
67static inline bool arch_validate_flags(unsigned long vm_flags)
68{
69 /* If ADI is being enabled on this VMA, check for ADI
70 * capability on the platform and ensure VMA is suitable
71 * for ADI
72 */
73 if (vm_flags & VM_SPARC_ADI) {
74 if (!adi_capable())
75 return false;
76
77 /* ADI can not be enabled on PFN mapped pages */
78 if (vm_flags & (VM_PFNMAP | VM_MIXEDMAP))
79 return false;
80
81 /* Mergeable pages can become unmergeable
82 * if ADI is enabled on them even if they
83 * have identical data on them. This can be
84 * because ADI enabled pages with identical
85 * data may still not have identical ADI
86 * tags on them. Disallow ADI on mergeable
87 * pages.
88 */
89 if (vm_flags & VM_MERGEABLE)
90 return false;
91 }
92 return true;
93}
94#endif /* CONFIG_SPARC64 */
95
96#endif /* __ASSEMBLY__ */
97#endif /* __SPARC_MMAN_H__ */