Skip to content

Commit 95731b8

Browse files
ardbiesheuvelRussell King
authored andcommitted
ARM: 9059/1: cache-v7: get rid of mini-stack
Now that we have reduced the number of registers that we need to preserve when calling v7_invalidate_l1 from the boot code, we can use scratch registers to preserve the remaining ones, and get rid of the mini stack entirely. This works around any issues regarding cache behavior in relation to the uncached accesses to this memory, which is hard to get right in the general case (i.e., both bare metal and under virtualization) While at it, switch v7_invalidate_l1 to using ip as a scratch register instead of r4. This makes the function AAPCS compliant, and removes the need to stash r4 in ip across the call. Acked-by: Nicolas Pitre <[email protected]> Signed-off-by: Ard Biesheuvel <[email protected]> Signed-off-by: Russell King <[email protected]>
1 parent f9e7a99 commit 95731b8

File tree

3 files changed

+23
-41
lines changed

3 files changed

+23
-41
lines changed

arch/arm/include/asm/memory.h

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -150,21 +150,6 @@ extern unsigned long vectors_base;
150150
*/
151151
#define PLAT_PHYS_OFFSET UL(CONFIG_PHYS_OFFSET)
152152

153-
#ifdef CONFIG_XIP_KERNEL
154-
/*
155-
* When referencing data in RAM from the XIP region in a relative manner
156-
* with the MMU off, we need the relative offset between the two physical
157-
* addresses. The macro below achieves this, which is:
158-
* __pa(v_data) - __xip_pa(v_text)
159-
*/
160-
#define PHYS_RELATIVE(v_data, v_text) \
161-
(((v_data) - PAGE_OFFSET + PLAT_PHYS_OFFSET) - \
162-
((v_text) - XIP_VIRT_ADDR(CONFIG_XIP_PHYS_ADDR) + \
163-
CONFIG_XIP_PHYS_ADDR))
164-
#else
165-
#define PHYS_RELATIVE(v_data, v_text) ((v_data) - (v_text))
166-
#endif
167-
168153
#ifndef __ASSEMBLY__
169154

170155
/*

arch/arm/mm/cache-v7.S

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -53,12 +53,12 @@ ENTRY(v7_invalidate_l1)
5353
and r2, r0, #0x7
5454
add r2, r2, #4 @ SetShift
5555

56-
1: movw r4, #0x7fff
57-
and r0, r4, r0, lsr #13 @ 'NumSets' in CCSIDR[27:13]
56+
1: movw ip, #0x7fff
57+
and r0, ip, r0, lsr #13 @ 'NumSets' in CCSIDR[27:13]
5858

59-
2: mov r4, r0, lsl r2 @ NumSet << SetShift
60-
orr r4, r4, r3 @ Reg = (Temp<<WayShift)|(NumSets<<SetShift)
61-
mcr p15, 0, r4, c7, c6, 2
59+
2: mov ip, r0, lsl r2 @ NumSet << SetShift
60+
orr ip, ip, r3 @ Reg = (Temp<<WayShift)|(NumSets<<SetShift)
61+
mcr p15, 0, ip, c7, c6, 2
6262
subs r0, r0, #1 @ Set--
6363
bpl 2b
6464
subs r3, r3, r1 @ Way--

arch/arm/mm/proc-v7.S

Lines changed: 18 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -256,6 +256,20 @@ ENDPROC(cpu_pj4b_do_resume)
256256

257257
#endif
258258

259+
@
260+
@ Invoke the v7_invalidate_l1() function, which adheres to the AAPCS
261+
@ rules, and so it may corrupt registers that we need to preserve.
262+
@
263+
.macro do_invalidate_l1
264+
mov r6, r1
265+
mov r7, r2
266+
mov r10, lr
267+
bl v7_invalidate_l1 @ corrupts {r0-r3, ip, lr}
268+
mov r1, r6
269+
mov r2, r7
270+
mov lr, r10
271+
.endm
272+
259273
/*
260274
* __v7_setup
261275
*
@@ -277,20 +291,17 @@ __v7_ca5mp_setup:
277291
__v7_ca9mp_setup:
278292
__v7_cr7mp_setup:
279293
__v7_cr8mp_setup:
294+
do_invalidate_l1
280295
mov r10, #(1 << 0) @ Cache/TLB ops broadcasting
281296
b 1f
282297
__v7_ca7mp_setup:
283298
__v7_ca12mp_setup:
284299
__v7_ca15mp_setup:
285300
__v7_b15mp_setup:
286301
__v7_ca17mp_setup:
302+
do_invalidate_l1
287303
mov r10, #0
288-
1: adr r0, __v7_setup_stack_ptr
289-
ldr r12, [r0]
290-
add r12, r12, r0 @ the local stack
291-
stmia r12, {r1-r6, lr} @ v7_invalidate_l1 touches r0-r6
292-
bl v7_invalidate_l1
293-
ldmia r12, {r1-r6, lr}
304+
1:
294305
#ifdef CONFIG_SMP
295306
orr r10, r10, #(1 << 6) @ Enable SMP/nAMP mode
296307
ALT_SMP(mrc p15, 0, r0, c1, c0, 1)
@@ -471,12 +482,7 @@ __v7_pj4b_setup:
471482
#endif /* CONFIG_CPU_PJ4B */
472483

473484
__v7_setup:
474-
adr r0, __v7_setup_stack_ptr
475-
ldr r12, [r0]
476-
add r12, r12, r0 @ the local stack
477-
stmia r12, {r1-r6, lr} @ v7_invalidate_l1 touches r0-r6
478-
bl v7_invalidate_l1
479-
ldmia r12, {r1-r6, lr}
485+
do_invalidate_l1
480486

481487
__v7_setup_cont:
482488
and r0, r9, #0xff000000 @ ARM?
@@ -548,17 +554,8 @@ __errata_finish:
548554
orr r0, r0, r6 @ set them
549555
THUMB( orr r0, r0, #1 << 30 ) @ Thumb exceptions
550556
ret lr @ return to head.S:__ret
551-
552-
.align 2
553-
__v7_setup_stack_ptr:
554-
.word PHYS_RELATIVE(__v7_setup_stack, .)
555557
ENDPROC(__v7_setup)
556558

557-
.bss
558-
.align 2
559-
__v7_setup_stack:
560-
.space 4 * 7 @ 7 registers
561-
562559
__INITDATA
563560

564561
.weak cpu_v7_bugs_init

0 commit comments

Comments
 (0)