Linux Audio

Check our new training course

Loading...
v3.5.6
 1/*
 
 2 * SMP support for R-Mobile / SH-Mobile
 3 *
 4 * Copyright (C) 2010  Magnus Damm
 5 * Copyright (C) 2010  Takashi Yoshii
 6 *
 7 * Based on vexpress, Copyright (c) 2003 ARM Limited, All Rights Reserved
 8 *
 9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License version 2 as
11 * published by the Free Software Foundation.
12 */
13#include <linux/linkage.h>
14#include <linux/init.h>
 
 
 
15#include <asm/memory.h>
16
17	__CPUINIT
18
19/* Cache invalidation nicked from arch/arm/mach-imx/head-v7.S, thanks!
20 *
21 * The secondary kernel init calls v7_flush_dcache_all before it enables
22 * the L1; however, the L1 comes out of reset in an undefined state, so
23 * the clean + invalidate performed by v7_flush_dcache_all causes a bunch
24 * of cache lines with uninitialized data and uninitialized tags to get
25 * written out to memory, which does really unpleasant things to the main
26 * processor.  We fix this by performing an invalidate, rather than a
27 * clean + invalidate, before jumping into the kernel.
28 *
29 * This funciton is cloned from arch/arm/mach-tegra/headsmp.S, and needs
30 * to be called for both secondary cores startup and primary core resume
31 * procedures.  Ideally, it should be moved into arch/arm/mm/cache-v7.S.
32 */
33ENTRY(v7_invalidate_l1)
34	mov	r0, #0
35	mcr	p15, 0, r0, c7, c5, 0	@ invalidate I cache
36	mcr	p15, 2, r0, c0, c0, 0
37	mrc	p15, 1, r0, c0, c0, 0
38
39	ldr	r1, =0x7fff
40	and	r2, r1, r0, lsr #13
41
42	ldr	r1, =0x3ff
43
44	and	r3, r1, r0, lsr #3	@ NumWays - 1
45	add	r2, r2, #1		@ NumSets
46
47	and	r0, r0, #0x7
48	add	r0, r0, #4	@ SetShift
49
50	clz	r1, r3		@ WayShift
51	add	r4, r3, #1	@ NumWays
521:	sub	r2, r2, #1	@ NumSets--
53	mov	r3, r4		@ Temp = NumWays
542:	subs	r3, r3, #1	@ Temp--
55	mov	r5, r3, lsl r1
56	mov	r6, r2, lsl r0
57	orr	r5, r5, r6	@ Reg = (Temp<<WayShift)|(NumSets<<SetShift)
58	mcr	p15, 0, r5, c7, c6, 2
59	bgt	2b
60	cmp	r2, #0
61	bgt	1b
62	dsb
63	isb
64	mov	pc, lr
65ENDPROC(v7_invalidate_l1)
66
67ENTRY(shmobile_invalidate_start)
68	bl	v7_invalidate_l1
69	b	secondary_startup
70ENDPROC(shmobile_invalidate_start)
71
72/*
73 * Reset vector for secondary CPUs.
74 * This will be mapped at address 0 by SBAR register.
75 * We need _long_ jump to the physical address.
76 */
 
77	.align  12
78ENTRY(shmobile_secondary_vector)
79	ldr     pc, 1f
801:	.long   shmobile_invalidate_start - PAGE_OFFSET + PLAT_PHYS_OFFSET
81ENDPROC(shmobile_secondary_vector)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
v5.4
  1/* SPDX-License-Identifier: GPL-2.0
  2 *
  3 * SMP support for R-Mobile / SH-Mobile
  4 *
  5 * Copyright (C) 2010  Magnus Damm
  6 * Copyright (C) 2010  Takashi Yoshii
  7 *
  8 * Based on vexpress, Copyright (c) 2003 ARM Limited, All Rights Reserved
 
 
 
 
  9 */
 
 10#include <linux/init.h>
 11#include <linux/linkage.h>
 12#include <linux/threads.h>
 13#include <asm/assembler.h>
 14#include <asm/memory.h>
 15
 16#define SCTLR_MMU	0x01
 17#define BOOTROM_ADDRESS	0xE6340000
 18#define RWTCSRA_ADDRESS 0xE6020004
 19#define RWTCSRA_WOVF	0x10
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 20
 21/*
 22 * Reset vector for secondary CPUs.
 23 * This will be mapped at address 0 by SBAR register.
 24 * We need _long_ jump to the physical address.
 25 */
 26	.arm
 27	.align  12
 28ENTRY(shmobile_boot_vector)
 29	ldr     r1, 1f
 30	bx	r1
 31
 32ENDPROC(shmobile_boot_vector)
 33
 34	.align	2
 35	.globl	shmobile_boot_fn
 36shmobile_boot_fn:
 371:	.space	4
 38	.globl	shmobile_boot_size
 39shmobile_boot_size:
 40	.long	. - shmobile_boot_vector
 41
 42#ifdef CONFIG_ARCH_RCAR_GEN2
 43/*
 44 * Reset vector for R-Car Gen2 and RZ/G1 secondary CPUs.
 45 * This will be mapped at address 0 by SBAR register.
 46 */
 47ENTRY(shmobile_boot_vector_gen2)
 48	mrc	p15, 0, r0, c0, c0, 5		@ r0 = MPIDR
 49	ldr	r1, shmobile_boot_cpu_gen2
 50	cmp	r0, r1
 51	bne	shmobile_smp_continue_gen2
 52
 53	mrc	p15, 0, r1, c1, c0, 0		@ r1 = SCTLR
 54	and	r0, r1, #SCTLR_MMU
 55	cmp	r0, #SCTLR_MMU
 56	beq	shmobile_smp_continue_gen2
 57
 58	ldr	r0, rwtcsra
 59	mov	r1, #0
 60	ldrb	r1, [r0]
 61	and	r0, r1, #RWTCSRA_WOVF
 62	cmp	r0, #RWTCSRA_WOVF
 63	bne	shmobile_smp_continue_gen2
 64
 65	ldr	r0, bootrom
 66	bx	r0
 67
 68shmobile_smp_continue_gen2:
 69	ldr     r1, shmobile_boot_fn_gen2
 70	bx	r1
 71
 72ENDPROC(shmobile_boot_vector_gen2)
 73
 74	.align	4
 75rwtcsra:
 76	.word	RWTCSRA_ADDRESS
 77bootrom:
 78	.word	BOOTROM_ADDRESS
 79	.globl	shmobile_boot_cpu_gen2
 80shmobile_boot_cpu_gen2:
 81	.word	0x00000000
 82
 83	.align	2
 84	.globl	shmobile_boot_fn_gen2
 85shmobile_boot_fn_gen2:
 86	.space	4
 87	.globl	shmobile_boot_size_gen2
 88shmobile_boot_size_gen2:
 89	.long	. - shmobile_boot_vector_gen2
 90#endif /* CONFIG_ARCH_RCAR_GEN2 */
 91
 92/*
 93 * Per-CPU SMP boot function/argument selection code based on MPIDR
 94 */
 95
 96ENTRY(shmobile_smp_boot)
 97	mrc	p15, 0, r1, c0, c0, 5		@ r1 = MPIDR
 98	and	r0, r1, #0xffffff		@ MPIDR_HWID_BITMASK
 99						@ r0 = cpu_logical_map() value
100	mov	r1, #0				@ r1 = CPU index
101	adr	r2, 1f
102	ldmia	r2, {r5, r6, r7}
103	add	r5, r5, r2			@ array of per-cpu mpidr values
104	add	r6, r6, r2			@ array of per-cpu functions
105	add	r7, r7, r2			@ array of per-cpu arguments
106
107shmobile_smp_boot_find_mpidr:
108	ldr	r8, [r5, r1, lsl #2]
109	cmp	r8, r0
110	bne	shmobile_smp_boot_next
111
112	ldr	r9, [r6, r1, lsl #2]
113	cmp	r9, #0
114	bne	shmobile_smp_boot_found
115
116shmobile_smp_boot_next:
117	add	r1, r1, #1
118	cmp	r1, #NR_CPUS
119	blo	shmobile_smp_boot_find_mpidr
120
121	b	shmobile_smp_sleep
122
123shmobile_smp_boot_found:
124	ldr	r0, [r7, r1, lsl #2]
125	ret	r9
126ENDPROC(shmobile_smp_boot)
127
128ENTRY(shmobile_smp_sleep)
129	wfi
130	b	shmobile_smp_boot
131ENDPROC(shmobile_smp_sleep)
132
133	.align	2
1341:	.long	shmobile_smp_mpidr - .
135	.long	shmobile_smp_fn - 1b
136	.long	shmobile_smp_arg - 1b
137
138	.bss
139	.globl	shmobile_smp_mpidr
140shmobile_smp_mpidr:
141	.space	NR_CPUS * 4
142	.globl	shmobile_smp_fn
143shmobile_smp_fn:
144	.space	NR_CPUS * 4
145	.globl	shmobile_smp_arg
146shmobile_smp_arg:
147	.space	NR_CPUS * 4