Loading...
1/*
2 * include/asm-sh/tlb_64.h
3 *
4 * Copyright (C) 2003 Paul Mundt
5 *
6 * This file is subject to the terms and conditions of the GNU General Public
7 * License. See the file "COPYING" in the main directory of this archive
8 * for more details.
9 */
10#ifndef __ASM_SH_TLB_64_H
11#define __ASM_SH_TLB_64_H
12
13/* ITLB defines */
14#define ITLB_FIXED 0x00000000 /* First fixed ITLB, see head.S */
15#define ITLB_LAST_VAR_UNRESTRICTED 0x000003F0 /* Last ITLB */
16
17/* DTLB defines */
18#define DTLB_FIXED 0x00800000 /* First fixed DTLB, see head.S */
19#define DTLB_LAST_VAR_UNRESTRICTED 0x008003F0 /* Last DTLB */
20
21#ifndef __ASSEMBLY__
22
23/**
24 * for_each_dtlb_entry - Iterate over free (non-wired) DTLB entries
25 *
26 * @tlb: TLB entry
27 */
28#define for_each_dtlb_entry(tlb) \
29 for (tlb = cpu_data->dtlb.first; \
30 tlb <= cpu_data->dtlb.last; \
31 tlb += cpu_data->dtlb.step)
32
33/**
34 * for_each_itlb_entry - Iterate over free (non-wired) ITLB entries
35 *
36 * @tlb: TLB entry
37 */
38#define for_each_itlb_entry(tlb) \
39 for (tlb = cpu_data->itlb.first; \
40 tlb <= cpu_data->itlb.last; \
41 tlb += cpu_data->itlb.step)
42
43/**
44 * __flush_tlb_slot - Flushes TLB slot @slot.
45 *
46 * @slot: Address of TLB slot.
47 */
48static inline void __flush_tlb_slot(unsigned long long slot)
49{
50 __asm__ __volatile__ ("putcfg %0, 0, r63\n" : : "r" (slot));
51}
52
53#ifdef CONFIG_MMU
54/* arch/sh64/mm/tlb.c */
55int sh64_tlb_init(void);
56unsigned long long sh64_next_free_dtlb_entry(void);
57unsigned long long sh64_get_wired_dtlb_entry(void);
58int sh64_put_wired_dtlb_entry(unsigned long long entry);
59void sh64_setup_tlb_slot(unsigned long long config_addr, unsigned long eaddr,
60 unsigned long asid, unsigned long paddr);
61void sh64_teardown_tlb_slot(unsigned long long config_addr);
62#else
63#define sh64_tlb_init() do { } while (0)
64#define sh64_next_free_dtlb_entry() (0)
65#define sh64_get_wired_dtlb_entry() (0)
66#define sh64_put_wired_dtlb_entry(entry) do { } while (0)
67#define sh64_setup_tlb_slot(conf, virt, asid, phys) do { } while (0)
68#define sh64_teardown_tlb_slot(addr) do { } while (0)
69#endif /* CONFIG_MMU */
70#endif /* __ASSEMBLY__ */
71#endif /* __ASM_SH_TLB_64_H */
1/* SPDX-License-Identifier: GPL-2.0
2 *
3 * include/asm-sh/tlb_64.h
4 *
5 * Copyright (C) 2003 Paul Mundt
6 */
7#ifndef __ASM_SH_TLB_64_H
8#define __ASM_SH_TLB_64_H
9
10/* ITLB defines */
11#define ITLB_FIXED 0x00000000 /* First fixed ITLB, see head.S */
12#define ITLB_LAST_VAR_UNRESTRICTED 0x000003F0 /* Last ITLB */
13
14/* DTLB defines */
15#define DTLB_FIXED 0x00800000 /* First fixed DTLB, see head.S */
16#define DTLB_LAST_VAR_UNRESTRICTED 0x008003F0 /* Last DTLB */
17
18#ifndef __ASSEMBLY__
19
20/**
21 * for_each_dtlb_entry - Iterate over free (non-wired) DTLB entries
22 *
23 * @tlb: TLB entry
24 */
25#define for_each_dtlb_entry(tlb) \
26 for (tlb = cpu_data->dtlb.first; \
27 tlb <= cpu_data->dtlb.last; \
28 tlb += cpu_data->dtlb.step)
29
30/**
31 * for_each_itlb_entry - Iterate over free (non-wired) ITLB entries
32 *
33 * @tlb: TLB entry
34 */
35#define for_each_itlb_entry(tlb) \
36 for (tlb = cpu_data->itlb.first; \
37 tlb <= cpu_data->itlb.last; \
38 tlb += cpu_data->itlb.step)
39
40/**
41 * __flush_tlb_slot - Flushes TLB slot @slot.
42 *
43 * @slot: Address of TLB slot.
44 */
45static inline void __flush_tlb_slot(unsigned long long slot)
46{
47 __asm__ __volatile__ ("putcfg %0, 0, r63\n" : : "r" (slot));
48}
49
50#ifdef CONFIG_MMU
51/* arch/sh64/mm/tlb.c */
52int sh64_tlb_init(void);
53unsigned long long sh64_next_free_dtlb_entry(void);
54unsigned long long sh64_get_wired_dtlb_entry(void);
55int sh64_put_wired_dtlb_entry(unsigned long long entry);
56void sh64_setup_tlb_slot(unsigned long long config_addr, unsigned long eaddr,
57 unsigned long asid, unsigned long paddr);
58void sh64_teardown_tlb_slot(unsigned long long config_addr);
59#else
60#define sh64_tlb_init() do { } while (0)
61#define sh64_next_free_dtlb_entry() (0)
62#define sh64_get_wired_dtlb_entry() (0)
63#define sh64_put_wired_dtlb_entry(entry) do { } while (0)
64#define sh64_setup_tlb_slot(conf, virt, asid, phys) do { } while (0)
65#define sh64_teardown_tlb_slot(addr) do { } while (0)
66#endif /* CONFIG_MMU */
67#endif /* __ASSEMBLY__ */
68#endif /* __ASM_SH_TLB_64_H */