Sanitise includes across codebase
[project/bcm63xx/atf.git] / services / arm_arch_svc / arm_arch_svc_setup.c
1 /*
2 * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7 #include <common/debug.h>
8 #include <common/runtime_svc.h>
9 #include <lib/cpus/errata_report.h>
10 #include <lib/cpus/wa_cve_2017_5715.h>
11 #include <lib/cpus/wa_cve_2018_3639.h>
12 #include <lib/smccc.h>
13 #include <services/arm_arch_svc.h>
14 #include <smccc_helpers.h>
15
16 static int32_t smccc_version(void)
17 {
18 return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION);
19 }
20
21 static int32_t smccc_arch_features(u_register_t arg)
22 {
23 switch (arg) {
24 case SMCCC_VERSION:
25 case SMCCC_ARCH_FEATURES:
26 return SMC_OK;
27 #if WORKAROUND_CVE_2017_5715
28 case SMCCC_ARCH_WORKAROUND_1:
29 if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
30 return 1;
31 return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
32 #endif
33
34 #if WORKAROUND_CVE_2018_3639
35 case SMCCC_ARCH_WORKAROUND_2: {
36 #if DYNAMIC_WORKAROUND_CVE_2018_3639
37 unsigned long long ssbs;
38
39 /*
40 * Firmware doesn't have to carry out dynamic workaround if the
41 * PE implements architectural Speculation Store Bypass Safe
42 * (SSBS) feature.
43 */
44 ssbs = (read_id_aa64pfr0_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
45 ID_AA64PFR1_EL1_SSBS_MASK;
46
47 /*
48 * If architectural SSBS is available on this PE, no firmware
49 * mitigation via SMCCC_ARCH_WORKAROUND_2 is required.
50 */
51 if (ssbs != SSBS_UNAVAILABLE)
52 return 1;
53
54 /*
55 * On a platform where at least one CPU requires
56 * dynamic mitigation but others are either unaffected
57 * or permanently mitigated, report the latter as not
58 * needing dynamic mitigation.
59 */
60 if (wa_cve_2018_3639_get_disable_ptr() == NULL)
61 return 1;
62 /*
63 * If we get here, this CPU requires dynamic mitigation
64 * so report it as such.
65 */
66 return 0;
67 #else
68 /* Either the CPUs are unaffected or permanently mitigated */
69 return SMCCC_ARCH_NOT_REQUIRED;
70 #endif
71 }
72 #endif
73
74 /* Fallthrough */
75
76 default:
77 return SMC_UNK;
78 }
79 }
80
81 /*
82 * Top-level Arm Architectural Service SMC handler.
83 */
84 static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
85 u_register_t x1,
86 u_register_t x2,
87 u_register_t x3,
88 u_register_t x4,
89 void *cookie,
90 void *handle,
91 u_register_t flags)
92 {
93 switch (smc_fid) {
94 case SMCCC_VERSION:
95 SMC_RET1(handle, smccc_version());
96 case SMCCC_ARCH_FEATURES:
97 SMC_RET1(handle, smccc_arch_features(x1));
98 #if WORKAROUND_CVE_2017_5715
99 case SMCCC_ARCH_WORKAROUND_1:
100 /*
101 * The workaround has already been applied on affected PEs
102 * during entry to EL3. On unaffected PEs, this function
103 * has no effect.
104 */
105 SMC_RET0(handle);
106 #endif
107 #if WORKAROUND_CVE_2018_3639
108 case SMCCC_ARCH_WORKAROUND_2:
109 /*
110 * The workaround has already been applied on affected PEs
111 * requiring dynamic mitigation during entry to EL3.
112 * On unaffected or statically mitigated PEs, this function
113 * has no effect.
114 */
115 SMC_RET0(handle);
116 #endif
117 default:
118 WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
119 smc_fid);
120 SMC_RET1(handle, SMC_UNK);
121 }
122 }
123
124 /* Register Standard Service Calls as runtime service */
125 DECLARE_RT_SVC(
126 arm_arch_svc,
127 OEN_ARM_START,
128 OEN_ARM_END,
129 SMC_TYPE_FAST,
130 NULL,
131 arm_arch_svc_smc_handler
132 );