1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
|
/* SPDX-License-Identifier: GPL-2.0+ */
/*
* Copyright (C) 2020 Cortina-Access
*
*/
#include <asm-offsets.h>
#include <config.h>
#include <linux/linkage.h>
#include <asm/macro.h>
#include <asm/armv8/mmu.h>
.globl lowlevel_init
lowlevel_init:
mov x29, lr /* Save LR */
#if defined(CONFIG_SOC_CA7774)
/* Enable SMPEN in CPUECTLR */
mrs x0, s3_1_c15_c2_1
tst x0, #0x40
b.ne skip_smp_setup
orr x0, x0, #0x40
msr s3_1_c15_c2_1, x0
skip_smp_setup:
#endif
#if defined(CONFIG_SOC_CA8277B)
/* Enable CPU Timer */
ldr x0, =CFG_SYS_TIMER_BASE
mov x1, #1
str w1, [x0]
#endif
#if defined(CONFIG_GICV2) || defined(CONFIG_GICV3)
branch_if_slave x0, 1f
#ifndef CONFIG_TARGET_VENUS
ldr x0, =GICD_BASE
bl gic_init_secure
#endif
1:
#if defined(CONFIG_GICV3)
ldr x0, =GICR_BASE
bl gic_init_secure_percpu
#elif defined(CONFIG_GICV2)
ldr x0, =GICD_BASE
ldr x1, =GICC_BASE
bl gic_init_secure_percpu
#endif
#endif
#ifdef CONFIG_ARMV8_MULTIENTRY
branch_if_master x0, 2f
/*
* Slave should wait for master clearing spin table.
* This sync prevent salves observing incorrect
* value of spin table and jumping to wrong place.
*/
#if defined(CONFIG_GICV2) || defined(CONFIG_GICV3)
#ifdef CONFIG_GICV2
ldr x0, =GICC_BASE
#endif
bl gic_wait_for_interrupt
#endif
/*
* All slaves will enter EL2 and optionally EL1.
*/
adr x4, lowlevel_in_el2
ldr x5, =ES_TO_AARCH64
bl armv8_switch_to_el2
lowlevel_in_el2:
#ifdef CONFIG_ARMV8_SWITCH_TO_EL1
adr x4, lowlevel_in_el1
ldr x5, =ES_TO_AARCH64
bl armv8_switch_to_el1
lowlevel_in_el1:
#endif
#endif /* CONFIG_ARMV8_MULTIENTRY */
2:
mov lr, x29 /* Restore LR */
ret
|