Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4 */
5#ifndef __ASM_TLB_H
6#define __ASM_TLB_H
7
8#include <linux/mm_types.h>
9#include <asm/cpu-features.h>
10#include <asm/loongarch.h>
11
12/*
13 * TLB Invalidate Flush
14 */
15static inline void tlbclr(void)
16{
17 __asm__ __volatile__("tlbclr");
18}
19
20static inline void tlbflush(void)
21{
22 __asm__ __volatile__("tlbflush");
23}
24
25/*
26 * TLB R/W operations.
27 */
28static inline void tlb_probe(void)
29{
30 __asm__ __volatile__("tlbsrch");
31}
32
33static inline void tlb_read(void)
34{
35 __asm__ __volatile__("tlbrd");
36}
37
38static inline void tlb_write_indexed(void)
39{
40 __asm__ __volatile__("tlbwr");
41}
42
43static inline void tlb_write_random(void)
44{
45 __asm__ __volatile__("tlbfill");
46}
47
48enum invtlb_ops {
49 /* Invalid all tlb */
50 INVTLB_ALL = 0x0,
51 /* Invalid current tlb */
52 INVTLB_CURRENT_ALL = 0x1,
53 /* Invalid all global=1 lines in current tlb */
54 INVTLB_CURRENT_GTRUE = 0x2,
55 /* Invalid all global=0 lines in current tlb */
56 INVTLB_CURRENT_GFALSE = 0x3,
57 /* Invalid global=0 and matched asid lines in current tlb */
58 INVTLB_GFALSE_AND_ASID = 0x4,
59 /* Invalid addr with global=0 and matched asid in current tlb */
60 INVTLB_ADDR_GFALSE_AND_ASID = 0x5,
61 /* Invalid addr with global=1 or matched asid in current tlb */
62 INVTLB_ADDR_GTRUE_OR_ASID = 0x6,
63 /* Invalid matched gid in guest tlb */
64 INVGTLB_GID = 0x9,
65 /* Invalid global=1, matched gid in guest tlb */
66 INVGTLB_GID_GTRUE = 0xa,
67 /* Invalid global=0, matched gid in guest tlb */
68 INVGTLB_GID_GFALSE = 0xb,
69 /* Invalid global=0, matched gid and asid in guest tlb */
70 INVGTLB_GID_GFALSE_ASID = 0xc,
71 /* Invalid global=0 , matched gid, asid and addr in guest tlb */
72 INVGTLB_GID_GFALSE_ASID_ADDR = 0xd,
73 /* Invalid global=1 , matched gid, asid and addr in guest tlb */
74 INVGTLB_GID_GTRUE_ASID_ADDR = 0xe,
75 /* Invalid all gid gva-->gpa guest tlb */
76 INVGTLB_ALLGID_GVA_TO_GPA = 0x10,
77 /* Invalid all gid gpa-->hpa tlb */
78 INVTLB_ALLGID_GPA_TO_HPA = 0x11,
79 /* Invalid all gid tlb, including gva-->gpa and gpa-->hpa */
80 INVTLB_ALLGID = 0x12,
81 /* Invalid matched gid gva-->gpa guest tlb */
82 INVGTLB_GID_GVA_TO_GPA = 0x13,
83 /* Invalid matched gid gpa-->hpa tlb */
84 INVTLB_GID_GPA_TO_HPA = 0x14,
85 /* Invalid matched gid tlb,including gva-->gpa and gpa-->hpa */
86 INVTLB_GID_ALL = 0x15,
87 /* Invalid matched gid and addr gpa-->hpa tlb */
88 INVTLB_GID_ADDR = 0x16,
89};
90
91/*
92 * invtlb op info addr
93 * (0x1 << 26) | (0x24 << 20) | (0x13 << 15) |
94 * (addr << 10) | (info << 5) | op
95 */
96static inline void invtlb(u32 op, u32 info, u64 addr)
97{
98 __asm__ __volatile__(
99 "parse_r addr,%0\n\t"
100 "parse_r info,%1\n\t"
101 ".word ((0x6498000) | (addr << 10) | (info << 5) | %2)\n\t"
102 :
103 : "r"(addr), "r"(info), "i"(op)
104 :
105 );
106}
107
108static inline void invtlb_addr(u32 op, u32 info, u64 addr)
109{
110 __asm__ __volatile__(
111 "parse_r addr,%0\n\t"
112 ".word ((0x6498000) | (addr << 10) | (0 << 5) | %1)\n\t"
113 :
114 : "r"(addr), "i"(op)
115 :
116 );
117}
118
119static inline void invtlb_info(u32 op, u32 info, u64 addr)
120{
121 __asm__ __volatile__(
122 "parse_r info,%0\n\t"
123 ".word ((0x6498000) | (0 << 10) | (info << 5) | %1)\n\t"
124 :
125 : "r"(info), "i"(op)
126 :
127 );
128}
129
130static inline void invtlb_all(u32 op, u32 info, u64 addr)
131{
132 __asm__ __volatile__(
133 ".word ((0x6498000) | (0 << 10) | (0 << 5) | %0)\n\t"
134 :
135 : "i"(op)
136 :
137 );
138}
139
140#define __tlb_remove_tlb_entry(tlb, ptep, address) do { } while (0)
141
142static void tlb_flush(struct mmu_gather *tlb);
143
144#define tlb_flush tlb_flush
145#include <asm-generic/tlb.h>
146
147static inline void tlb_flush(struct mmu_gather *tlb)
148{
149 struct vm_area_struct vma;
150
151 vma.vm_mm = tlb->mm;
152 vma.vm_flags = 0;
153 if (tlb->fullmm) {
154 flush_tlb_mm(tlb->mm);
155 return;
156 }
157
158 flush_tlb_range(&vma, tlb->start, tlb->end);
159}
160
161extern void handle_tlb_load(void);
162extern void handle_tlb_store(void);
163extern void handle_tlb_modify(void);
164extern void handle_tlb_refill(void);
165extern void handle_tlb_protect(void);
166
167extern void dump_tlb_all(void);
168extern void dump_tlb_regs(void);
169
170#endif /* __ASM_TLB_H */
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4 */
5#ifndef __ASM_TLB_H
6#define __ASM_TLB_H
7
8#include <linux/mm_types.h>
9#include <asm/cpu-features.h>
10#include <asm/loongarch.h>
11
12/*
13 * TLB Invalidate Flush
14 */
15static inline void tlbclr(void)
16{
17 __asm__ __volatile__("tlbclr");
18}
19
20static inline void tlbflush(void)
21{
22 __asm__ __volatile__("tlbflush");
23}
24
25/*
26 * TLB R/W operations.
27 */
28static inline void tlb_probe(void)
29{
30 __asm__ __volatile__("tlbsrch");
31}
32
33static inline void tlb_read(void)
34{
35 __asm__ __volatile__("tlbrd");
36}
37
38static inline void tlb_write_indexed(void)
39{
40 __asm__ __volatile__("tlbwr");
41}
42
43static inline void tlb_write_random(void)
44{
45 __asm__ __volatile__("tlbfill");
46}
47
48enum invtlb_ops {
49 /* Invalid all tlb */
50 INVTLB_ALL = 0x0,
51 /* Invalid current tlb */
52 INVTLB_CURRENT_ALL = 0x1,
53 /* Invalid all global=1 lines in current tlb */
54 INVTLB_CURRENT_GTRUE = 0x2,
55 /* Invalid all global=0 lines in current tlb */
56 INVTLB_CURRENT_GFALSE = 0x3,
57 /* Invalid global=0 and matched asid lines in current tlb */
58 INVTLB_GFALSE_AND_ASID = 0x4,
59 /* Invalid addr with global=0 and matched asid in current tlb */
60 INVTLB_ADDR_GFALSE_AND_ASID = 0x5,
61 /* Invalid addr with global=1 or matched asid in current tlb */
62 INVTLB_ADDR_GTRUE_OR_ASID = 0x6,
63 /* Invalid matched gid in guest tlb */
64 INVGTLB_GID = 0x9,
65 /* Invalid global=1, matched gid in guest tlb */
66 INVGTLB_GID_GTRUE = 0xa,
67 /* Invalid global=0, matched gid in guest tlb */
68 INVGTLB_GID_GFALSE = 0xb,
69 /* Invalid global=0, matched gid and asid in guest tlb */
70 INVGTLB_GID_GFALSE_ASID = 0xc,
71 /* Invalid global=0 , matched gid, asid and addr in guest tlb */
72 INVGTLB_GID_GFALSE_ASID_ADDR = 0xd,
73 /* Invalid global=1 , matched gid, asid and addr in guest tlb */
74 INVGTLB_GID_GTRUE_ASID_ADDR = 0xe,
75 /* Invalid all gid gva-->gpa guest tlb */
76 INVGTLB_ALLGID_GVA_TO_GPA = 0x10,
77 /* Invalid all gid gpa-->hpa tlb */
78 INVTLB_ALLGID_GPA_TO_HPA = 0x11,
79 /* Invalid all gid tlb, including gva-->gpa and gpa-->hpa */
80 INVTLB_ALLGID = 0x12,
81 /* Invalid matched gid gva-->gpa guest tlb */
82 INVGTLB_GID_GVA_TO_GPA = 0x13,
83 /* Invalid matched gid gpa-->hpa tlb */
84 INVTLB_GID_GPA_TO_HPA = 0x14,
85 /* Invalid matched gid tlb,including gva-->gpa and gpa-->hpa */
86 INVTLB_GID_ALL = 0x15,
87 /* Invalid matched gid and addr gpa-->hpa tlb */
88 INVTLB_GID_ADDR = 0x16,
89};
90
91static __always_inline void invtlb(u32 op, u32 info, u64 addr)
92{
93 __asm__ __volatile__(
94 "invtlb %0, %1, %2\n\t"
95 :
96 : "i"(op), "r"(info), "r"(addr)
97 : "memory"
98 );
99}
100
101static __always_inline void invtlb_addr(u32 op, u32 info, u64 addr)
102{
103 BUILD_BUG_ON(!__builtin_constant_p(info) || info != 0);
104 __asm__ __volatile__(
105 "invtlb %0, $zero, %1\n\t"
106 :
107 : "i"(op), "r"(addr)
108 : "memory"
109 );
110}
111
112static __always_inline void invtlb_info(u32 op, u32 info, u64 addr)
113{
114 BUILD_BUG_ON(!__builtin_constant_p(addr) || addr != 0);
115 __asm__ __volatile__(
116 "invtlb %0, %1, $zero\n\t"
117 :
118 : "i"(op), "r"(info)
119 : "memory"
120 );
121}
122
123static __always_inline void invtlb_all(u32 op, u32 info, u64 addr)
124{
125 BUILD_BUG_ON(!__builtin_constant_p(info) || info != 0);
126 BUILD_BUG_ON(!__builtin_constant_p(addr) || addr != 0);
127 __asm__ __volatile__(
128 "invtlb %0, $zero, $zero\n\t"
129 :
130 : "i"(op)
131 : "memory"
132 );
133}
134
135#define __tlb_remove_tlb_entry(tlb, ptep, address) do { } while (0)
136
137static void tlb_flush(struct mmu_gather *tlb);
138
139#define tlb_flush tlb_flush
140#include <asm-generic/tlb.h>
141
142static inline void tlb_flush(struct mmu_gather *tlb)
143{
144 struct vm_area_struct vma;
145
146 vma.vm_mm = tlb->mm;
147 vm_flags_init(&vma, 0);
148 if (tlb->fullmm) {
149 flush_tlb_mm(tlb->mm);
150 return;
151 }
152
153 flush_tlb_range(&vma, tlb->start, tlb->end);
154}
155
156extern void handle_tlb_load(void);
157extern void handle_tlb_store(void);
158extern void handle_tlb_modify(void);
159extern void handle_tlb_refill(void);
160extern void handle_tlb_protect(void);
161extern void handle_tlb_load_ptw(void);
162extern void handle_tlb_store_ptw(void);
163extern void handle_tlb_modify_ptw(void);
164
165extern void dump_tlb_all(void);
166extern void dump_tlb_regs(void);
167
168#endif /* __ASM_TLB_H */