| lijuang | 395b5e6 | 2015-11-19 17:39:44 +0800 | [diff] [blame] | 1 | /* Copyright (c) 2011-2016, The Linux Foundation. All rights reserved. | 
| Deepa Dinamani | 904f8f8 | 2012-12-05 16:35:01 -0800 | [diff] [blame] | 2 | * | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 3 | * Redistribution and use in source and binary forms, with or without | 
|  | 4 | * modification, are permitted provided that the following conditions are | 
|  | 5 | * met: | 
| Deepa Dinamani | 904f8f8 | 2012-12-05 16:35:01 -0800 | [diff] [blame] | 6 | *     * Redistributions of source code must retain the above copyright | 
|  | 7 | *       notice, this list of conditions and the following disclaimer. | 
|  | 8 | *     * Redistributions in binary form must reproduce the above | 
|  | 9 | *       copyright notice, this list of conditions and the following | 
|  | 10 | *       disclaimer in the documentation and/or other materials provided | 
|  | 11 | *       with the distribution. | 
|  | 12 | *     * Neither the name of The Linux Foundation nor the names of its | 
|  | 13 | *       contributors may be used to endorse or promote products derived | 
|  | 14 | *       from this software without specific prior written permission. | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 15 | * | 
|  | 16 | * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED | 
|  | 17 | * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | 
|  | 18 | * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT | 
|  | 19 | * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS | 
|  | 20 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | 
|  | 21 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | 
|  | 22 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR | 
|  | 23 | * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | 
|  | 24 | * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE | 
|  | 25 | * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN | 
|  | 26 | * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 
|  | 27 | */ | 
|  | 28 |  | 
|  | 29 | #include <stdlib.h> | 
|  | 30 | #include <string.h> | 
|  | 31 | #include <err.h> | 
| Channagoud Kadabi | 179df0b | 2013-12-12 14:53:31 -0800 | [diff] [blame] | 32 | #include <asm.h> | 
|  | 33 | #include <bits.h> | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 34 | #include <arch/ops.h> | 
| vijay kumar | 4f4405f | 2014-08-08 11:49:53 +0530 | [diff] [blame] | 35 | #include <rand.h> | 
|  | 36 | #include <image_verify.h> | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 37 | #include <dload_util.h> | 
|  | 38 | #include <platform/iomap.h> | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 39 | #include <board.h> | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 40 | #include "scm.h" | 
|  | 41 |  | 
|  | 42 | #pragma GCC optimize ("O0") | 
|  | 43 |  | 
|  | 44 | /* From Linux Kernel asm/system.h */ | 
|  | 45 | #define __asmeq(x, y)  ".ifnc " x "," y " ; .err ; .endif\n\t" | 
|  | 46 |  | 
|  | 47 | #ifndef offsetof | 
|  | 48 | #  define offsetof(TYPE, MEMBER) ((size_t) &((TYPE *)0)->MEMBER) | 
|  | 49 | #endif | 
|  | 50 |  | 
| Channagoud Kadabi | 179df0b | 2013-12-12 14:53:31 -0800 | [diff] [blame] | 51 | #define SCM_CLASS_REGISTER         (0x2 << 8) | 
|  | 52 | #define SCM_MASK_IRQS              BIT(5) | 
|  | 53 | #define SCM_ATOMIC(svc, cmd, n)    ((((((svc) & 0x3f) << 10)|((cmd) & 0x3ff)) << 12) | \ | 
|  | 54 | SCM_CLASS_REGISTER | \ | 
|  | 55 | SCM_MASK_IRQS | \ | 
|  | 56 | ((n) & 0xf)) | 
|  | 57 |  | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 58 | /* SCM interface as per ARM spec present? */ | 
|  | 59 | bool scm_arm_support; | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 60 | static bool scm_initialized; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 61 |  | 
| Dinesh K Garg | 6bbbb70 | 2015-01-30 11:13:31 -0800 | [diff] [blame] | 62 | bool is_scm_armv8_support() | 
|  | 63 | { | 
| Channagoud Kadabi | 86e1e82 | 2015-11-02 11:32:34 -0800 | [diff] [blame] | 64 | #if !NO_SCM_V8_SUPPORT | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 65 | if (!scm_initialized) | 
|  | 66 | { | 
|  | 67 | scm_init(); | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 68 | } | 
| Channagoud Kadabi | 86e1e82 | 2015-11-02 11:32:34 -0800 | [diff] [blame] | 69 | #endif | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 70 |  | 
| Dinesh K Garg | 6bbbb70 | 2015-01-30 11:13:31 -0800 | [diff] [blame] | 71 | return scm_arm_support; | 
|  | 72 | } | 
|  | 73 |  | 
| Channagoud Kadabi | 77f46a3 | 2015-08-05 16:13:13 -0700 | [diff] [blame] | 74 | int is_scm_call_available(uint32_t svc_id, uint32_t cmd_id) | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 75 | { | 
| vijay kumar | 496a2ff | 2015-07-22 21:22:48 +0530 | [diff] [blame] | 76 | int ret; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 77 | scmcall_arg scm_arg = {0}; | 
| Veera Sundaram Sankaran | 0018151 | 2014-12-09 11:23:39 -0800 | [diff] [blame] | 78 | scmcall_ret scm_ret = {0}; | 
| Channagoud Kadabi | a2184b8 | 2015-07-07 10:09:32 -0700 | [diff] [blame] | 79 |  | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 80 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_INFO, IS_CALL_AVAIL_CMD); | 
|  | 81 | scm_arg.x1 = MAKE_SCM_ARGS(0x1); | 
|  | 82 | scm_arg.x2 = MAKE_SIP_SCM_CMD(svc_id, cmd_id); | 
|  | 83 |  | 
|  | 84 | ret = scm_call2(&scm_arg, &scm_ret); | 
|  | 85 |  | 
|  | 86 | if (!ret) | 
| Channagoud Kadabi | a2184b8 | 2015-07-07 10:09:32 -0700 | [diff] [blame] | 87 | return scm_ret.x1; | 
|  | 88 |  | 
|  | 89 | return ret; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 90 | } | 
|  | 91 |  | 
| Channagoud Kadabi | a2184b8 | 2015-07-07 10:09:32 -0700 | [diff] [blame] | 92 | static int scm_arm_support_available(uint32_t svc_id, uint32_t cmd_id) | 
|  | 93 | { | 
| vijay kumar | 496a2ff | 2015-07-22 21:22:48 +0530 | [diff] [blame] | 94 | int ret; | 
| Channagoud Kadabi | a2184b8 | 2015-07-07 10:09:32 -0700 | [diff] [blame] | 95 |  | 
|  | 96 | ret = is_scm_call_available(SCM_SVC_INFO, IS_CALL_AVAIL_CMD); | 
|  | 97 |  | 
|  | 98 | if (ret > 0) | 
|  | 99 | scm_arm_support = true; | 
|  | 100 |  | 
|  | 101 | return ret; | 
|  | 102 | } | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 103 |  | 
|  | 104 | void scm_init() | 
|  | 105 | { | 
| Channagoud Kadabi | a2184b8 | 2015-07-07 10:09:32 -0700 | [diff] [blame] | 106 | int ret; | 
|  | 107 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 108 | if (scm_initialized) | 
|  | 109 | return; | 
|  | 110 |  | 
| Channagoud Kadabi | a2184b8 | 2015-07-07 10:09:32 -0700 | [diff] [blame] | 111 | ret = scm_arm_support_available(SCM_SVC_INFO, IS_CALL_AVAIL_CMD); | 
|  | 112 |  | 
| vijay kumar | 496a2ff | 2015-07-22 21:22:48 +0530 | [diff] [blame] | 113 | if (ret < 0) | 
| Channagoud Kadabi | a2184b8 | 2015-07-07 10:09:32 -0700 | [diff] [blame] | 114 | dprintf(CRITICAL, "Failed to initialize SCM\n"); | 
| lijuang | 1cff838 | 2016-01-11 17:56:54 +0800 | [diff] [blame] | 115 |  | 
|  | 116 | scm_initialized = true; | 
|  | 117 |  | 
|  | 118 | #if DISABLE_DLOAD_MODE | 
|  | 119 | scm_disable_sdi(); | 
|  | 120 | #endif | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 121 | } | 
| Channagoud Kadabi | 179df0b | 2013-12-12 14:53:31 -0800 | [diff] [blame] | 122 |  | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 123 | /** | 
|  | 124 | * alloc_scm_command() - Allocate an SCM command | 
|  | 125 | * @cmd_size: size of the command buffer | 
|  | 126 | * @resp_size: size of the response buffer | 
|  | 127 | * | 
|  | 128 | * Allocate an SCM command, including enough room for the command | 
|  | 129 | * and response headers as well as the command and response buffers. | 
|  | 130 | * | 
|  | 131 | * Returns a valid &scm_command on success or %NULL if the allocation fails. | 
|  | 132 | */ | 
|  | 133 | static struct scm_command *alloc_scm_command(size_t cmd_size, size_t resp_size) | 
|  | 134 | { | 
|  | 135 | struct scm_command *cmd; | 
|  | 136 | size_t len = sizeof(*cmd) + sizeof(struct scm_response) + cmd_size + | 
| Ajay Dudani | b01e506 | 2011-12-03 23:23:42 -0800 | [diff] [blame] | 137 | resp_size; | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 138 |  | 
| Deepa Dinamani | 904f8f8 | 2012-12-05 16:35:01 -0800 | [diff] [blame] | 139 | cmd = memalign(CACHE_LINE, ROUNDUP(len, CACHE_LINE)); | 
| Ajay Dudani | b01e506 | 2011-12-03 23:23:42 -0800 | [diff] [blame] | 140 | if (cmd) { | 
| Pavel Nedev | 80ce36f | 2014-01-06 14:26:17 +0200 | [diff] [blame] | 141 | memset(cmd, 0, len); | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 142 | cmd->len = len; | 
|  | 143 | cmd->buf_offset = offsetof(struct scm_command, buf); | 
|  | 144 | cmd->resp_hdr_offset = cmd->buf_offset + cmd_size; | 
|  | 145 | } | 
|  | 146 | return cmd; | 
|  | 147 | } | 
|  | 148 |  | 
|  | 149 | /** | 
|  | 150 | * free_scm_command() - Free an SCM command | 
|  | 151 | * @cmd: command to free | 
|  | 152 | * | 
|  | 153 | * Free an SCM command. | 
|  | 154 | */ | 
|  | 155 | static inline void free_scm_command(struct scm_command *cmd) | 
|  | 156 | { | 
|  | 157 | free(cmd); | 
|  | 158 | } | 
|  | 159 |  | 
|  | 160 | /** | 
|  | 161 | * scm_command_to_response() - Get a pointer to a scm_response | 
|  | 162 | * @cmd: command | 
|  | 163 | * | 
|  | 164 | * Returns a pointer to a response for a command. | 
|  | 165 | */ | 
| Ajay Dudani | b01e506 | 2011-12-03 23:23:42 -0800 | [diff] [blame] | 166 | static inline struct scm_response *scm_command_to_response(const struct | 
|  | 167 | scm_command *cmd) | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 168 | { | 
|  | 169 | return (void *)cmd + cmd->resp_hdr_offset; | 
|  | 170 | } | 
|  | 171 |  | 
|  | 172 | /** | 
|  | 173 | * scm_get_command_buffer() - Get a pointer to a command buffer | 
|  | 174 | * @cmd: command | 
|  | 175 | * | 
|  | 176 | * Returns a pointer to the command buffer of a command. | 
|  | 177 | */ | 
|  | 178 | static inline void *scm_get_command_buffer(const struct scm_command *cmd) | 
|  | 179 | { | 
|  | 180 | return (void *)cmd->buf; | 
|  | 181 | } | 
|  | 182 |  | 
|  | 183 | /** | 
|  | 184 | * scm_get_response_buffer() - Get a pointer to a response buffer | 
|  | 185 | * @rsp: response | 
|  | 186 | * | 
|  | 187 | * Returns a pointer to a response buffer of a response. | 
|  | 188 | */ | 
|  | 189 | static inline void *scm_get_response_buffer(const struct scm_response *rsp) | 
|  | 190 | { | 
|  | 191 | return (void *)rsp + rsp->buf_offset; | 
|  | 192 | } | 
|  | 193 |  | 
|  | 194 | static uint32_t smc(uint32_t cmd_addr) | 
|  | 195 | { | 
|  | 196 | uint32_t context_id; | 
|  | 197 | register uint32_t r0 __asm__("r0") = 1; | 
| Ajay Dudani | b01e506 | 2011-12-03 23:23:42 -0800 | [diff] [blame] | 198 | register uint32_t r1 __asm__("r1") = (uint32_t) & context_id; | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 199 | register uint32_t r2 __asm__("r2") = cmd_addr; | 
| Ajay Dudani | b01e506 | 2011-12-03 23:23:42 -0800 | [diff] [blame] | 200 | __asm__("1:smc	#0	@ switch to secure world\n" "cmp	r0, #1				\n" "beq	1b				\n": "=r"(r0): "r"(r0), "r"(r1), "r"(r2):"r3", "cc"); | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 201 | return r0; | 
|  | 202 | } | 
|  | 203 |  | 
|  | 204 | /** | 
| Channagoud Kadabi | 179df0b | 2013-12-12 14:53:31 -0800 | [diff] [blame] | 205 | * scm_call_automic: Make scm call with one or no argument | 
|  | 206 | * @svc: service id | 
|  | 207 | * @cmd: command id | 
|  | 208 | * @ arg1: argument | 
|  | 209 | */ | 
|  | 210 |  | 
|  | 211 | static int scm_call_atomic(uint32_t svc, uint32_t cmd, uint32_t arg1) | 
|  | 212 | { | 
|  | 213 | uint32_t context_id; | 
|  | 214 | register uint32_t r0 __asm__("r0") = SCM_ATOMIC(svc, cmd, 1); | 
| vijay kumar | 4f4405f | 2014-08-08 11:49:53 +0530 | [diff] [blame] | 215 | register uint32_t r1 __asm__("r1") = (uint32_t)&context_id; | 
| Channagoud Kadabi | 179df0b | 2013-12-12 14:53:31 -0800 | [diff] [blame] | 216 | register uint32_t r2 __asm__("r2") = arg1; | 
|  | 217 |  | 
|  | 218 | __asm__ volatile( | 
|  | 219 | __asmeq("%0", "r0") | 
|  | 220 | __asmeq("%1", "r0") | 
|  | 221 | __asmeq("%2", "r1") | 
|  | 222 | __asmeq("%3", "r2") | 
|  | 223 | "smc    #0  @ switch to secure world\n" | 
|  | 224 | : "=r" (r0) | 
|  | 225 | : "r" (r0), "r" (r1), "r" (r2) | 
|  | 226 | : "r3"); | 
|  | 227 | return r0; | 
|  | 228 | } | 
|  | 229 |  | 
|  | 230 | /** | 
| Aparna Mallavarapu | 68e233f | 2014-03-21 19:18:34 +0530 | [diff] [blame] | 231 | * scm_call_atomic2() - Send an atomic SCM command with two arguments | 
|  | 232 | * @svc_id: service identifier | 
|  | 233 | * @cmd_id: command identifier | 
|  | 234 | * @arg1: first argument | 
|  | 235 | * @arg2: second argument | 
|  | 236 | * | 
|  | 237 | * This shall only be used with commands that are guaranteed to be | 
|  | 238 | * uninterruptable, atomic and SMP safe. | 
|  | 239 | */ | 
|  | 240 | int scm_call_atomic2(uint32_t svc, uint32_t cmd, uint32_t arg1, uint32_t arg2) | 
|  | 241 | { | 
|  | 242 | int context_id; | 
|  | 243 | register uint32_t r0 __asm__("r0") = SCM_ATOMIC(svc, cmd, 2); | 
| vijay kumar | 4f4405f | 2014-08-08 11:49:53 +0530 | [diff] [blame] | 244 | register uint32_t r1 __asm__("r1") = (uint32_t)&context_id; | 
| Aparna Mallavarapu | 68e233f | 2014-03-21 19:18:34 +0530 | [diff] [blame] | 245 | register uint32_t r2 __asm__("r2") = arg1; | 
|  | 246 | register uint32_t r3 __asm__("r3") = arg2; | 
|  | 247 |  | 
|  | 248 | __asm__ volatile( | 
|  | 249 | __asmeq("%0", "r0") | 
|  | 250 | __asmeq("%1", "r0") | 
|  | 251 | __asmeq("%2", "r1") | 
|  | 252 | __asmeq("%3", "r2") | 
|  | 253 | __asmeq("%4", "r3") | 
|  | 254 | "smc	#0	@ switch to secure world\n" | 
|  | 255 | : "=r" (r0) | 
|  | 256 | : "r" (r0), "r" (r1), "r" (r2), "r" (r3)); | 
|  | 257 | return r0; | 
|  | 258 | } | 
|  | 259 |  | 
|  | 260 | /** | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 261 | * scm_call() - Send an SCM command | 
|  | 262 | * @svc_id: service identifier | 
|  | 263 | * @cmd_id: command identifier | 
|  | 264 | * @cmd_buf: command buffer | 
|  | 265 | * @cmd_len: length of the command buffer | 
|  | 266 | * @resp_buf: response buffer | 
|  | 267 | * @resp_len: length of the response buffer | 
|  | 268 | * | 
|  | 269 | * Sends a command to the SCM and waits for the command to finish processing. | 
|  | 270 | */ | 
| Ajay Dudani | b01e506 | 2011-12-03 23:23:42 -0800 | [diff] [blame] | 271 | int | 
|  | 272 | scm_call(uint32_t svc_id, uint32_t cmd_id, const void *cmd_buf, | 
|  | 273 | size_t cmd_len, void *resp_buf, size_t resp_len) | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 274 | { | 
|  | 275 | int ret; | 
|  | 276 | struct scm_command *cmd; | 
|  | 277 | struct scm_response *rsp; | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 278 | uint8_t *resp_ptr; | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 279 |  | 
|  | 280 | cmd = alloc_scm_command(cmd_len, resp_len); | 
|  | 281 | if (!cmd) | 
|  | 282 | return ERR_NO_MEMORY; | 
|  | 283 |  | 
|  | 284 | cmd->id = (svc_id << 10) | cmd_id; | 
|  | 285 | if (cmd_buf) | 
|  | 286 | memcpy(scm_get_command_buffer(cmd), cmd_buf, cmd_len); | 
|  | 287 |  | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 288 | /* Flush command to main memory for TZ */ | 
|  | 289 | arch_clean_invalidate_cache_range((addr_t) cmd, cmd->len); | 
|  | 290 |  | 
| Ajay Dudani | b01e506 | 2011-12-03 23:23:42 -0800 | [diff] [blame] | 291 | ret = smc((uint32_t) cmd); | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 292 | if (ret) | 
|  | 293 | goto out; | 
|  | 294 |  | 
| Ajay Dudani | b01e506 | 2011-12-03 23:23:42 -0800 | [diff] [blame] | 295 | if (resp_len) { | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 296 | rsp = scm_command_to_response(cmd); | 
|  | 297 |  | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 298 | do | 
|  | 299 | { | 
|  | 300 | /* Need to invalidate before each check since TZ will update | 
|  | 301 | * the response complete flag in main memory. | 
|  | 302 | */ | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 303 | arch_invalidate_cache_range((addr_t) rsp, sizeof(*rsp)); | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 304 | } while (!rsp->is_complete); | 
|  | 305 |  | 
|  | 306 |  | 
|  | 307 | resp_ptr = scm_get_response_buffer(rsp); | 
|  | 308 |  | 
|  | 309 | /* Invalidate any cached response data */ | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 310 | arch_invalidate_cache_range((addr_t) resp_ptr, resp_len); | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 311 |  | 
|  | 312 | if (resp_buf) | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 313 | memcpy(resp_buf, resp_ptr, resp_len); | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 314 | } | 
| Ajay Dudani | b01e506 | 2011-12-03 23:23:42 -0800 | [diff] [blame] | 315 | out: | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 316 | free_scm_command(cmd); | 
|  | 317 | return ret; | 
|  | 318 | } | 
|  | 319 |  | 
| Siddhartha Agrawal | eb094c5 | 2013-01-28 12:11:43 -0800 | [diff] [blame] | 320 | int restore_secure_cfg(uint32_t id) | 
|  | 321 | { | 
| Channagoud Kadabi | acaa75e | 2014-06-09 16:29:29 -0700 | [diff] [blame] | 322 | int ret = 0; | 
| Siddhartha Agrawal | eb094c5 | 2013-01-28 12:11:43 -0800 | [diff] [blame] | 323 | tz_secure_cfg secure_cfg; | 
|  | 324 |  | 
| Siddhartha Agrawal | d464889 | 2013-02-17 18:16:18 -0800 | [diff] [blame] | 325 | secure_cfg.id    = id; | 
| Siddhartha Agrawal | eb094c5 | 2013-01-28 12:11:43 -0800 | [diff] [blame] | 326 | secure_cfg.spare = 0; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 327 | scmcall_arg scm_arg = {0}; | 
| Siddhartha Agrawal | eb094c5 | 2013-01-28 12:11:43 -0800 | [diff] [blame] | 328 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 329 | if(!is_scm_armv8_support()) | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 330 | { | 
|  | 331 | ret = scm_call(SVC_MEMORY_PROTECTION, IOMMU_SECURE_CFG, &secure_cfg, sizeof(secure_cfg), | 
|  | 332 | NULL, 0); | 
|  | 333 | } | 
|  | 334 | else | 
|  | 335 | { | 
|  | 336 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SVC_MEMORY_PROTECTION, IOMMU_SECURE_CFG); | 
|  | 337 | scm_arg.x1 = MAKE_SCM_ARGS(0x2); | 
|  | 338 | scm_arg.x2 = id; | 
|  | 339 | scm_arg.x3 = 0x0; /* Spare unused */ | 
| Siddhartha Agrawal | eb094c5 | 2013-01-28 12:11:43 -0800 | [diff] [blame] | 340 |  | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 341 | ret = scm_call2(&scm_arg, NULL); | 
|  | 342 | } | 
|  | 343 |  | 
|  | 344 | if (ret) | 
|  | 345 | { | 
| Siddhartha Agrawal | eb094c5 | 2013-01-28 12:11:43 -0800 | [diff] [blame] | 346 | dprintf(CRITICAL, "Secure Config failed\n"); | 
|  | 347 | ret = 1; | 
| Channagoud Kadabi | acaa75e | 2014-06-09 16:29:29 -0700 | [diff] [blame] | 348 | } | 
| Siddhartha Agrawal | eb094c5 | 2013-01-28 12:11:43 -0800 | [diff] [blame] | 349 |  | 
|  | 350 | return ret; | 
| Siddhartha Agrawal | eb094c5 | 2013-01-28 12:11:43 -0800 | [diff] [blame] | 351 | } | 
|  | 352 |  | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 353 | /* SCM Encrypt Command */ | 
|  | 354 | int encrypt_scm(uint32_t ** img_ptr, uint32_t * img_len_ptr) | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 355 | { | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 356 | int ret; | 
|  | 357 | img_req cmd; | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 358 | scmcall_arg scm_arg = {0}; | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 359 |  | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 360 |  | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 361 | cmd.img_ptr     = (uint32*) img_ptr; | 
|  | 362 | cmd.img_len_ptr = img_len_ptr; | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 363 |  | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 364 | /* Image data is operated upon by TZ, which accesses only the main memory. | 
|  | 365 | * It must be flushed/invalidated before and after TZ call. | 
|  | 366 | */ | 
|  | 367 | arch_clean_invalidate_cache_range((addr_t) *img_ptr, *img_len_ptr); | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 368 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 369 | if (!is_scm_armv8_support()) | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 370 | { | 
|  | 371 | ret = scm_call(SCM_SVC_SSD, SSD_ENCRYPT_ID, &cmd, sizeof(cmd), NULL, 0); | 
|  | 372 | } | 
|  | 373 | else | 
|  | 374 | { | 
|  | 375 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_SSD,SSD_ENCRYPT_ID); | 
| vijay kumar | 83b50d6 | 2015-01-09 19:09:59 +0530 | [diff] [blame] | 376 | scm_arg.x1 = MAKE_SCM_ARGS(0x2,SMC_PARAM_TYPE_BUFFER_READWRITE,SMC_PARAM_TYPE_BUFFER_READWRITE); | 
| Veera Sundaram Sankaran | 0018151 | 2014-12-09 11:23:39 -0800 | [diff] [blame] | 377 | scm_arg.x2 = (uint32_t) cmd.img_ptr; | 
|  | 378 | scm_arg.x3 = (uint32_t) cmd.img_len_ptr; | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 379 |  | 
|  | 380 | ret = scm_call2(&scm_arg, NULL); | 
|  | 381 | } | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 382 |  | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 383 | /* Values at img_ptr and img_len_ptr are updated by TZ. Must be invalidated | 
|  | 384 | * before we use them. | 
| Amol Jadi | 55e58da | 2011-11-17 14:03:34 -0800 | [diff] [blame] | 385 | */ | 
|  | 386 | arch_clean_invalidate_cache_range((addr_t) img_ptr, sizeof(img_ptr)); | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 387 | arch_clean_invalidate_cache_range((addr_t) img_len_ptr, sizeof(img_len_ptr)); | 
| Amol Jadi | 55e58da | 2011-11-17 14:03:34 -0800 | [diff] [blame] | 388 |  | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 389 | /* Invalidate the updated image data */ | 
|  | 390 | arch_clean_invalidate_cache_range((addr_t) *img_ptr, *img_len_ptr); | 
| Amol Jadi | 55e58da | 2011-11-17 14:03:34 -0800 | [diff] [blame] | 391 |  | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 392 | return ret; | 
|  | 393 | } | 
|  | 394 |  | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 395 | /* SCM Decrypt Command */ | 
|  | 396 | int decrypt_scm(uint32_t ** img_ptr, uint32_t * img_len_ptr) | 
|  | 397 | { | 
|  | 398 | int ret; | 
|  | 399 | img_req cmd; | 
|  | 400 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 401 | if (is_scm_armv8_support()) | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 402 | { | 
|  | 403 | dprintf(INFO, "%s:SCM call is not supported\n",__func__); | 
|  | 404 | return -1; | 
|  | 405 | } | 
|  | 406 |  | 
| Neeti Desai | 127b9e0 | 2012-03-20 16:11:23 -0700 | [diff] [blame] | 407 | cmd.img_ptr     = (uint32*) img_ptr; | 
|  | 408 | cmd.img_len_ptr = img_len_ptr; | 
|  | 409 |  | 
|  | 410 | /* Image data is operated upon by TZ, which accesses only the main memory. | 
|  | 411 | * It must be flushed/invalidated before and after TZ call. | 
|  | 412 | */ | 
|  | 413 | arch_clean_invalidate_cache_range((addr_t) *img_ptr, *img_len_ptr); | 
|  | 414 |  | 
|  | 415 | ret = scm_call(SCM_SVC_SSD, SSD_DECRYPT_ID, &cmd, sizeof(cmd), NULL, 0); | 
|  | 416 |  | 
|  | 417 | /* Values at img_ptr and img_len_ptr are updated by TZ. Must be invalidated | 
|  | 418 | * before we use them. | 
|  | 419 | */ | 
|  | 420 | arch_clean_invalidate_cache_range((addr_t) img_ptr, sizeof(img_ptr)); | 
|  | 421 | arch_clean_invalidate_cache_range((addr_t) img_len_ptr, sizeof(img_len_ptr)); | 
|  | 422 |  | 
|  | 423 | /* Invalidate the updated image data */ | 
|  | 424 | arch_clean_invalidate_cache_range((addr_t) *img_ptr, *img_len_ptr); | 
|  | 425 |  | 
|  | 426 | return ret; | 
|  | 427 | } | 
|  | 428 |  | 
|  | 429 |  | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 430 | static int ssd_image_is_encrypted(uint32_t ** img_ptr, uint32_t * img_len_ptr, uint32 * ctx_id) | 
|  | 431 | { | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 432 | int              ret     = 0; | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 433 | ssd_parse_md_req parse_req; | 
|  | 434 | ssd_parse_md_rsp parse_rsp; | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 435 | int              prev_len = 0; | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 436 | scmcall_arg scm_arg = {0}; | 
|  | 437 | scmcall_ret scm_ret = {0}; | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 438 | /* Populate meta-data ptr. Here md_len is the meta-data length. | 
|  | 439 | * The Code below follows a growing length approach. First send | 
|  | 440 | * min(img_len_ptr,SSD_HEADER_MIN_SIZE) say 128 bytes for example. | 
|  | 441 | * If parse_rsp.status = PARSING_INCOMPLETE we send md_len = 256. | 
|  | 442 | * If subsequent status = PARSING_INCOMPLETE we send md_len = 512, | 
|  | 443 | * 1024bytes and so on until we get an valid response(rsp.status) from TZ*/ | 
|  | 444 |  | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 445 | parse_req.md     = (uint32*)*img_ptr; | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 446 | parse_req.md_len = ((*img_len_ptr) >= SSD_HEADER_MIN_SIZE) ? SSD_HEADER_MIN_SIZE : (*img_len_ptr); | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 447 |  | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 448 | arch_clean_invalidate_cache_range((addr_t) *img_ptr, parse_req.md_len); | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 449 |  | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 450 | do | 
|  | 451 | { | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 452 | if (!is_scm_armv8_support()) | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 453 | { | 
|  | 454 | ret = scm_call(SCM_SVC_SSD, | 
|  | 455 | SSD_PARSE_MD_ID, | 
|  | 456 | &parse_req, | 
|  | 457 | sizeof(parse_req), | 
|  | 458 | &parse_rsp, | 
|  | 459 | sizeof(parse_rsp)); | 
|  | 460 | } | 
|  | 461 | else | 
|  | 462 | { | 
|  | 463 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_SSD, SSD_PARSE_MD_ID); | 
|  | 464 | scm_arg.x1 = MAKE_SCM_ARGS(0x2,SMC_PARAM_TYPE_VALUE,SMC_PARAM_TYPE_BUFFER_READWRITE); | 
|  | 465 | scm_arg.x2 = parse_req.md_len; | 
| Veera Sundaram Sankaran | 0018151 | 2014-12-09 11:23:39 -0800 | [diff] [blame] | 466 | scm_arg.x3 = (uint32_t) parse_req.md; | 
| Aparna Mallavarapu | d83990a | 2014-12-24 12:54:35 +0530 | [diff] [blame] | 467 | scm_arg.atomic = true; | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 468 |  | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 469 | ret = scm_call2(&scm_arg, &scm_ret); | 
|  | 470 | parse_rsp.status = scm_ret.x1; | 
|  | 471 | } | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 472 | if(!ret && (parse_rsp.status == SSD_PMD_PARSING_INCOMPLETE)) | 
|  | 473 | { | 
|  | 474 | prev_len          = parse_req.md_len; | 
|  | 475 |  | 
|  | 476 | parse_req.md_len *= MULTIPLICATION_FACTOR; | 
|  | 477 |  | 
| Venkatesh Yadav Abbarapu | af7bfe0 | 2013-11-11 16:56:04 +0530 | [diff] [blame] | 478 | arch_clean_invalidate_cache_range((addr_t) (*img_ptr + prev_len), | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 479 | (parse_req.md_len - prev_len) ); | 
|  | 480 |  | 
|  | 481 | continue; | 
|  | 482 | } | 
|  | 483 | else | 
|  | 484 | break; | 
|  | 485 |  | 
|  | 486 | } while(true); | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 487 |  | 
|  | 488 | if(!ret) | 
|  | 489 | { | 
|  | 490 | if(parse_rsp.status == SSD_PMD_ENCRYPTED) | 
|  | 491 | { | 
|  | 492 | *ctx_id      = parse_rsp.md_ctx_id; | 
| Sundarajan Srinivasan | eb6d220 | 2013-06-04 14:24:10 -0700 | [diff] [blame] | 493 | *img_len_ptr = *img_len_ptr - ((uint8_t*)parse_rsp.md_end_ptr - (uint8_t*)*img_ptr); | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 494 | *img_ptr     = (uint32_t*)parse_rsp.md_end_ptr; | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 495 | } | 
| Sundarajan Srinivasan | eb6d220 | 2013-06-04 14:24:10 -0700 | [diff] [blame] | 496 |  | 
|  | 497 | ret = parse_rsp.status; | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 498 | } | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 499 | else | 
|  | 500 | { | 
|  | 501 | dprintf(CRITICAL,"ssd_image_is_encrypted call failed"); | 
|  | 502 |  | 
|  | 503 | ASSERT(ret == 0); | 
|  | 504 | } | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 505 |  | 
|  | 506 | return ret; | 
|  | 507 | } | 
|  | 508 |  | 
|  | 509 | int decrypt_scm_v2(uint32_t ** img_ptr, uint32_t * img_len_ptr) | 
|  | 510 | { | 
|  | 511 | int                      ret    = 0; | 
|  | 512 | uint32                   ctx_id = 0; | 
|  | 513 | ssd_decrypt_img_frag_req decrypt_req; | 
|  | 514 | ssd_decrypt_img_frag_rsp decrypt_rsp; | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 515 | scmcall_arg scm_arg = {0}; | 
|  | 516 | scmcall_ret scm_ret = {0}; | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 517 |  | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 518 |  | 
| Sundarajan Srinivasan | eb6d220 | 2013-06-04 14:24:10 -0700 | [diff] [blame] | 519 | ret = ssd_image_is_encrypted(img_ptr,img_len_ptr,&ctx_id); | 
|  | 520 | switch(ret) | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 521 | { | 
| Sundarajan Srinivasan | eb6d220 | 2013-06-04 14:24:10 -0700 | [diff] [blame] | 522 | case SSD_PMD_ENCRYPTED: | 
|  | 523 | /* Image data is operated upon by TZ, which accesses only the main memory. | 
|  | 524 | * It must be flushed/invalidated before and after TZ call. | 
|  | 525 | */ | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 526 |  | 
| Sundarajan Srinivasan | eb6d220 | 2013-06-04 14:24:10 -0700 | [diff] [blame] | 527 | arch_clean_invalidate_cache_range((addr_t) *img_ptr, *img_len_ptr); | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 528 |  | 
| Sundarajan Srinivasan | eb6d220 | 2013-06-04 14:24:10 -0700 | [diff] [blame] | 529 | /*decrypt the image here*/ | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 530 |  | 
| Sundarajan Srinivasan | eb6d220 | 2013-06-04 14:24:10 -0700 | [diff] [blame] | 531 | decrypt_req.md_ctx_id = ctx_id; | 
|  | 532 | decrypt_req.last_frag = 1; | 
|  | 533 | decrypt_req.frag_len  = *img_len_ptr; | 
|  | 534 | decrypt_req.frag      = *img_ptr; | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 535 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 536 | if (!is_scm_armv8_support()) | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 537 | { | 
|  | 538 | ret = scm_call(SCM_SVC_SSD, | 
|  | 539 | SSD_DECRYPT_IMG_FRAG_ID, | 
|  | 540 | &decrypt_req, | 
|  | 541 | sizeof(decrypt_req), | 
|  | 542 | &decrypt_rsp, | 
|  | 543 | sizeof(decrypt_rsp)); | 
|  | 544 | } | 
|  | 545 | else | 
|  | 546 | { | 
|  | 547 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_SSD, SSD_DECRYPT_IMG_FRAG_ID); | 
|  | 548 | scm_arg.x1 = MAKE_SCM_ARGS(0x4,SMC_PARAM_TYPE_VALUE,SMC_PARAM_TYPE_VALUE,SMC_PARAM_TYPE_VALUE,SMC_PARAM_TYPE_BUFFER_READWRITE); | 
|  | 549 | scm_arg.x2 = decrypt_req.md_ctx_id; | 
|  | 550 | scm_arg.x3 = decrypt_req.last_frag; | 
|  | 551 | scm_arg.x4 = decrypt_req.frag_len; | 
| Veera Sundaram Sankaran | 0018151 | 2014-12-09 11:23:39 -0800 | [diff] [blame] | 552 | scm_arg.x5[0] = (uint32_t) decrypt_req.frag; | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 553 |  | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 554 | ret = scm_call2(&scm_arg, &scm_ret); | 
|  | 555 | decrypt_rsp.status = scm_ret.x1; | 
|  | 556 | } | 
| Sundarajan Srinivasan | eb6d220 | 2013-06-04 14:24:10 -0700 | [diff] [blame] | 557 | if(!ret){ | 
|  | 558 | ret = decrypt_rsp.status; | 
|  | 559 | } | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 560 |  | 
| Sundarajan Srinivasan | eb6d220 | 2013-06-04 14:24:10 -0700 | [diff] [blame] | 561 | /* Values at img_ptr and img_len_ptr are updated by TZ. Must be invalidated | 
|  | 562 | * before we use them. | 
|  | 563 | */ | 
|  | 564 | arch_invalidate_cache_range((addr_t) img_ptr, sizeof(img_ptr)); | 
|  | 565 | arch_invalidate_cache_range((addr_t) img_len_ptr, sizeof(img_len_ptr)); | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 566 |  | 
| Sundarajan Srinivasan | eb6d220 | 2013-06-04 14:24:10 -0700 | [diff] [blame] | 567 | /* Invalidate the updated image data */ | 
|  | 568 | arch_invalidate_cache_range((addr_t) *img_ptr, *img_len_ptr); | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 569 |  | 
| Sundarajan Srinivasan | eb6d220 | 2013-06-04 14:24:10 -0700 | [diff] [blame] | 570 | break; | 
|  | 571 |  | 
|  | 572 | case SSD_PMD_NOT_ENCRYPTED: | 
|  | 573 | case SSD_PMD_NO_MD_FOUND: | 
|  | 574 | ret = 0; | 
|  | 575 | break; | 
|  | 576 |  | 
|  | 577 | case SSD_PMD_BUSY: | 
|  | 578 | case SSD_PMD_BAD_MD_PTR_OR_LEN: | 
|  | 579 | case SSD_PMD_PARSING_INCOMPLETE: | 
|  | 580 | case SSD_PMD_PARSING_FAILED: | 
|  | 581 | case SSD_PMD_SETUP_CIPHER_FAILED: | 
|  | 582 | dprintf(CRITICAL,"decrypt_scm_v2: failed status %d\n",ret); | 
|  | 583 | break; | 
|  | 584 |  | 
|  | 585 | default: | 
|  | 586 | dprintf(CRITICAL,"decrypt_scm_v2: case default: failed status %d\n",ret); | 
|  | 587 | break; | 
| sundarajan srinivasan | 6173b87 | 2013-03-13 17:36:48 -0700 | [diff] [blame] | 588 | } | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 589 | return ret; | 
|  | 590 | } | 
|  | 591 |  | 
|  | 592 | int scm_svc_version(uint32 * major, uint32 * minor) | 
|  | 593 | { | 
|  | 594 | feature_version_req feature_req; | 
|  | 595 | feature_version_rsp feature_rsp; | 
|  | 596 | int                 ret = 0; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 597 | scmcall_arg scm_arg = {0}; | 
|  | 598 | scmcall_ret scm_ret = {0}; | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 599 |  | 
|  | 600 | feature_req.feature_id = TZBSP_FVER_SSD; | 
|  | 601 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 602 | if (!is_scm_armv8_support()) | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 603 | { | 
|  | 604 | ret = scm_call(TZBSP_SVC_INFO, | 
|  | 605 | TZ_INFO_GET_FEATURE_ID, | 
|  | 606 | &feature_req, | 
|  | 607 | sizeof(feature_req), | 
|  | 608 | &feature_rsp, | 
|  | 609 | sizeof(feature_rsp)); | 
|  | 610 | } | 
|  | 611 | else | 
|  | 612 | { | 
|  | 613 | scm_arg.x0 = MAKE_SIP_SCM_CMD(TZBSP_SVC_INFO, TZ_INFO_GET_FEATURE_ID); | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 614 | scm_arg.x1 = MAKE_SCM_ARGS(0x1,SMC_PARAM_TYPE_VALUE); | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 615 | scm_arg.x2 = feature_req.feature_id; | 
|  | 616 |  | 
|  | 617 | ret = scm_call2(&scm_arg, &scm_ret); | 
|  | 618 | feature_rsp.version = scm_ret.x1; | 
|  | 619 | } | 
|  | 620 |  | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 621 | if(!ret) | 
|  | 622 | *major = TZBSP_GET_FEATURE_VERSION(feature_rsp.version); | 
|  | 623 |  | 
|  | 624 | return ret; | 
|  | 625 | } | 
|  | 626 |  | 
| Amit Blay | bdfabc6 | 2015-01-29 22:04:13 +0200 | [diff] [blame] | 627 | int scm_svc_get_secure_state(uint32_t *state_low, uint32_t *state_high) | 
|  | 628 | { | 
|  | 629 | get_secure_state_req req; | 
|  | 630 | get_secure_state_rsp rsp; | 
|  | 631 |  | 
|  | 632 | int ret = 0; | 
|  | 633 |  | 
|  | 634 | scmcall_arg scm_arg = {0}; | 
|  | 635 | scmcall_ret scm_ret = {0}; | 
|  | 636 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 637 | if (!is_scm_armv8_support()) | 
| Amit Blay | bdfabc6 | 2015-01-29 22:04:13 +0200 | [diff] [blame] | 638 | { | 
|  | 639 | req.status_ptr = (uint32_t*)&rsp; | 
|  | 640 | req.status_len = sizeof(rsp); | 
|  | 641 |  | 
|  | 642 | ret = scm_call(TZBSP_SVC_INFO, | 
|  | 643 | TZ_INFO_GET_SECURE_STATE, | 
|  | 644 | &req, | 
|  | 645 | sizeof(req), | 
|  | 646 | NULL, | 
|  | 647 | 0); | 
|  | 648 | } | 
|  | 649 | else | 
|  | 650 | { | 
|  | 651 | scm_arg.x0 = MAKE_SIP_SCM_CMD(TZBSP_SVC_INFO, TZ_INFO_GET_SECURE_STATE); | 
|  | 652 | scm_arg.x1 = MAKE_SCM_ARGS(0x0); | 
|  | 653 |  | 
|  | 654 | ret = scm_call2(&scm_arg, &scm_ret); | 
|  | 655 |  | 
|  | 656 | rsp.status_low = scm_ret.x1; | 
|  | 657 | rsp.status_high = scm_ret.x2; | 
|  | 658 | } | 
|  | 659 |  | 
|  | 660 | if(!ret) | 
|  | 661 | { | 
|  | 662 | *state_low = rsp.status_low; | 
|  | 663 | *state_high = rsp.status_high; | 
|  | 664 | } | 
|  | 665 |  | 
|  | 666 | return ret; | 
|  | 667 | } | 
|  | 668 |  | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 669 | int scm_protect_keystore(uint32_t * img_ptr, uint32_t  img_len) | 
|  | 670 | { | 
|  | 671 | int                      ret=0; | 
|  | 672 | ssd_protect_keystore_req protect_req; | 
|  | 673 | ssd_protect_keystore_rsp protect_rsp; | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 674 | scmcall_arg scm_arg = {0}; | 
|  | 675 | scmcall_ret scm_ret = {0}; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 676 |  | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 677 | protect_req.keystore_ptr = img_ptr; | 
|  | 678 | protect_req.keystore_len = img_len; | 
|  | 679 |  | 
|  | 680 | arch_clean_invalidate_cache_range((addr_t) img_ptr, img_len); | 
|  | 681 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 682 | if (!is_scm_armv8_support()) | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 683 | { | 
|  | 684 | ret = scm_call(SCM_SVC_SSD, | 
|  | 685 | SSD_PROTECT_KEYSTORE_ID, | 
|  | 686 | &protect_req, | 
|  | 687 | sizeof(protect_req), | 
|  | 688 | &protect_rsp, | 
|  | 689 | sizeof(protect_rsp)); | 
|  | 690 | } | 
|  | 691 | else | 
|  | 692 | { | 
|  | 693 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_SSD, SSD_PROTECT_KEYSTORE_ID); | 
|  | 694 | scm_arg.x1 = MAKE_SCM_ARGS(0x2,SMC_PARAM_TYPE_BUFFER_READWRITE,SMC_PARAM_TYPE_VALUE); | 
| Veera Sundaram Sankaran | 0018151 | 2014-12-09 11:23:39 -0800 | [diff] [blame] | 695 | scm_arg.x2 = (uint32_t) protect_req.keystore_ptr; | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 696 | scm_arg.x3 = protect_req.keystore_len; | 
|  | 697 |  | 
|  | 698 | ret = scm_call2(&scm_arg, &scm_ret); | 
|  | 699 | protect_rsp.status = scm_ret.x1; | 
|  | 700 | } | 
| sundarajan srinivasan | 4dfd4f7 | 2013-02-27 14:13:09 -0800 | [diff] [blame] | 701 | if(!ret) | 
|  | 702 | { | 
|  | 703 | if(protect_rsp.status == TZBSP_SSD_PKS_SUCCESS) | 
|  | 704 | dprintf(INFO,"Successfully loaded the keystore "); | 
|  | 705 | else | 
|  | 706 | { | 
|  | 707 | dprintf(INFO,"Loading keystore failed status %d ",protect_rsp.status); | 
|  | 708 | ret = protect_rsp.status; | 
|  | 709 | } | 
|  | 710 | } | 
|  | 711 | else | 
|  | 712 | dprintf(INFO,"scm_call failed "); | 
|  | 713 |  | 
|  | 714 | return ret; | 
|  | 715 | } | 
|  | 716 |  | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 717 | void set_tamper_fuse_cmd() | 
|  | 718 | { | 
|  | 719 | uint32_t svc_id; | 
|  | 720 | uint32_t cmd_id; | 
|  | 721 | void *cmd_buf; | 
|  | 722 | size_t cmd_len; | 
|  | 723 | void *resp_buf = NULL; | 
|  | 724 | size_t resp_len = 0; | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 725 | scmcall_arg scm_arg = {0}; | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 726 |  | 
|  | 727 | uint32_t fuse_id = HLOS_IMG_TAMPER_FUSE; | 
|  | 728 | cmd_buf = (void *)&fuse_id; | 
|  | 729 | cmd_len = sizeof(fuse_id); | 
|  | 730 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 731 | if (!is_scm_armv8_support()) | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 732 | { | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 733 | /*no response */ | 
|  | 734 | resp_buf = NULL; | 
|  | 735 | resp_len = 0; | 
|  | 736 |  | 
|  | 737 | svc_id = SCM_SVC_FUSE; | 
|  | 738 | cmd_id = SCM_BLOW_SW_FUSE_ID; | 
|  | 739 |  | 
|  | 740 | scm_call(svc_id, cmd_id, cmd_buf, cmd_len, resp_buf, resp_len); | 
|  | 741 | } | 
|  | 742 | else | 
|  | 743 | { | 
|  | 744 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_FUSE, SCM_BLOW_SW_FUSE_ID); | 
|  | 745 | scm_arg.x1 = MAKE_SCM_ARGS(0x2,SMC_PARAM_TYPE_BUFFER_READWRITE,SMC_PARAM_TYPE_VALUE); | 
| Veera Sundaram Sankaran | 0018151 | 2014-12-09 11:23:39 -0800 | [diff] [blame] | 746 | scm_arg.x2  = (uint32_t) cmd_buf; | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 747 | scm_arg.x3 = cmd_len; | 
|  | 748 |  | 
|  | 749 | scm_call2(&scm_arg, NULL); | 
|  | 750 |  | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 751 | } | 
|  | 752 |  | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 753 | } | 
|  | 754 |  | 
|  | 755 | uint8_t get_tamper_fuse_cmd() | 
|  | 756 | { | 
|  | 757 | uint32_t svc_id; | 
|  | 758 | uint32_t cmd_id; | 
|  | 759 | void *cmd_buf; | 
|  | 760 | size_t cmd_len; | 
|  | 761 | size_t resp_len = 0; | 
|  | 762 | uint8_t resp_buf; | 
|  | 763 |  | 
|  | 764 | uint32_t fuse_id = HLOS_IMG_TAMPER_FUSE; | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 765 | scmcall_arg scm_arg = {0}; | 
|  | 766 | scmcall_ret scm_ret = {0}; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 767 |  | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 768 | cmd_buf = (void *)&fuse_id; | 
|  | 769 | cmd_len = sizeof(fuse_id); | 
|  | 770 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 771 | if (!is_scm_armv8_support()) | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 772 | { | 
|  | 773 | /*response */ | 
|  | 774 | resp_len = sizeof(resp_buf); | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 775 |  | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 776 | svc_id = SCM_SVC_FUSE; | 
|  | 777 | cmd_id = SCM_IS_SW_FUSE_BLOWN_ID; | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 778 |  | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 779 | scm_call(svc_id, cmd_id, cmd_buf, cmd_len, &resp_buf, resp_len); | 
|  | 780 | return resp_buf; | 
|  | 781 | } | 
|  | 782 | else | 
|  | 783 | { | 
|  | 784 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_FUSE, SCM_IS_SW_FUSE_BLOWN_ID); | 
|  | 785 | scm_arg.x1 = MAKE_SCM_ARGS(0x2,SMC_PARAM_TYPE_BUFFER_READWRITE,SMC_PARAM_TYPE_VALUE); | 
| Veera Sundaram Sankaran | 0018151 | 2014-12-09 11:23:39 -0800 | [diff] [blame] | 786 | scm_arg.x2  = (uint32_t) cmd_buf; | 
| vijay kumar | e95092d | 2014-10-20 19:24:49 +0530 | [diff] [blame] | 787 | scm_arg.x3 = cmd_len; | 
|  | 788 |  | 
|  | 789 | scm_call2(&scm_arg, &scm_ret); | 
|  | 790 | return (uint8_t)scm_ret.x1; | 
|  | 791 | } | 
| Shashank Mittal | 162244e | 2011-08-08 19:01:25 -0700 | [diff] [blame] | 792 | } | 
| Deepa Dinamani | 193874e | 2012-02-07 14:00:04 -0800 | [diff] [blame] | 793 |  | 
| Amir Samuelov | 4620ad2 | 2013-03-13 11:30:05 +0200 | [diff] [blame] | 794 | /* | 
|  | 795 | * struct qseecom_save_partition_hash_req | 
|  | 796 | * @partition_id - partition id. | 
|  | 797 | * @digest[SHA256_DIGEST_LENGTH] -  sha256 digest. | 
|  | 798 | */ | 
|  | 799 | struct qseecom_save_partition_hash_req { | 
|  | 800 | uint32_t partition_id; /* in */ | 
|  | 801 | uint8_t digest[SHA256_DIGEST_LENGTH]; /* in */ | 
|  | 802 | }; | 
|  | 803 |  | 
|  | 804 |  | 
|  | 805 | void save_kernel_hash_cmd(void *digest) | 
|  | 806 | { | 
|  | 807 | uint32_t svc_id; | 
|  | 808 | uint32_t cmd_id; | 
|  | 809 | void *cmd_buf; | 
|  | 810 | size_t cmd_len; | 
|  | 811 | void *resp_buf = NULL; | 
|  | 812 | size_t resp_len = 0; | 
|  | 813 | struct qseecom_save_partition_hash_req req; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 814 | scmcall_arg scm_arg = {0}; | 
| Amir Samuelov | 4620ad2 | 2013-03-13 11:30:05 +0200 | [diff] [blame] | 815 |  | 
|  | 816 | /*no response */ | 
|  | 817 | resp_buf = NULL; | 
|  | 818 | resp_len = 0; | 
|  | 819 |  | 
|  | 820 | req.partition_id = 0; /* kernel */ | 
|  | 821 | memcpy(req.digest, digest, sizeof(req.digest)); | 
|  | 822 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 823 | if (!is_scm_armv8_support()) | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 824 | { | 
|  | 825 | svc_id = SCM_SVC_ES; | 
|  | 826 | cmd_id = SCM_SAVE_PARTITION_HASH_ID; | 
|  | 827 | cmd_buf = (void *)&req; | 
|  | 828 | cmd_len = sizeof(req); | 
| Amir Samuelov | 4620ad2 | 2013-03-13 11:30:05 +0200 | [diff] [blame] | 829 |  | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 830 | scm_call(svc_id, cmd_id, cmd_buf, cmd_len, resp_buf, resp_len); | 
|  | 831 | } | 
|  | 832 | else | 
|  | 833 | { | 
|  | 834 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_ES, SCM_SAVE_PARTITION_HASH_ID); | 
|  | 835 | scm_arg.x1 = MAKE_SCM_ARGS(0x3, 0, SMC_PARAM_TYPE_BUFFER_READWRITE); | 
|  | 836 | scm_arg.x2 = req.partition_id; | 
| Veera Sundaram Sankaran | 0018151 | 2014-12-09 11:23:39 -0800 | [diff] [blame] | 837 | scm_arg.x3 = (uint32_t) &req.digest; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 838 | scm_arg.x4 = sizeof(req.digest); | 
|  | 839 |  | 
|  | 840 | if (scm_call2(&scm_arg, NULL)) | 
|  | 841 | dprintf(CRITICAL, "Failed to Save kernel hash\n"); | 
|  | 842 | } | 
| Amir Samuelov | 4620ad2 | 2013-03-13 11:30:05 +0200 | [diff] [blame] | 843 | } | 
|  | 844 |  | 
| Amit Blay | fe23ee2 | 2015-01-09 19:09:51 +0200 | [diff] [blame] | 845 | int mdtp_cipher_dip_cmd(uint8_t *in_buf, uint32_t in_buf_size, uint8_t *out_buf, | 
|  | 846 | uint32_t out_buf_size, uint32_t direction) | 
|  | 847 | { | 
|  | 848 | uint32_t svc_id; | 
|  | 849 | uint32_t cmd_id; | 
|  | 850 | void *cmd_buf; | 
|  | 851 | void *rsp_buf; | 
|  | 852 | size_t cmd_len; | 
|  | 853 | size_t rsp_len; | 
|  | 854 | mdtp_cipher_dip_req req; | 
|  | 855 | scmcall_arg scm_arg = {0}; | 
|  | 856 | scmcall_ret scm_ret = {0}; | 
|  | 857 |  | 
|  | 858 | ASSERT(in_buf != NULL); | 
|  | 859 | ASSERT(out_buf != NULL); | 
|  | 860 |  | 
|  | 861 | req.in_buf = in_buf; | 
|  | 862 | req.in_buf_size = in_buf_size; | 
|  | 863 | req.out_buf = out_buf; | 
|  | 864 | req.out_buf_size = out_buf_size; | 
|  | 865 | req.direction = direction; | 
|  | 866 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 867 | if (!is_scm_armv8_support()) | 
| Amit Blay | fe23ee2 | 2015-01-09 19:09:51 +0200 | [diff] [blame] | 868 | { | 
|  | 869 | svc_id = SCM_SVC_MDTP; | 
|  | 870 | cmd_id = SCM_MDTP_CIPHER_DIP; | 
|  | 871 | cmd_buf = (void *)&req; | 
|  | 872 | cmd_len = sizeof(req); | 
|  | 873 | rsp_buf = NULL; | 
|  | 874 | rsp_len = 0; | 
|  | 875 |  | 
|  | 876 | if (scm_call(svc_id, cmd_id, cmd_buf, cmd_len, rsp_buf, rsp_len)) | 
|  | 877 | { | 
|  | 878 | dprintf(CRITICAL, "Failed to call Cipher DIP SCM\n"); | 
|  | 879 | return -1; | 
|  | 880 | } | 
|  | 881 | } | 
|  | 882 | else | 
|  | 883 | { | 
|  | 884 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_MDTP, SCM_MDTP_CIPHER_DIP); | 
|  | 885 | scm_arg.x1 = MAKE_SCM_ARGS(0x5, SMC_PARAM_TYPE_BUFFER_READ, SMC_PARAM_TYPE_VALUE, | 
|  | 886 | SMC_PARAM_TYPE_BUFFER_READWRITE, SMC_PARAM_TYPE_VALUE, SMC_PARAM_TYPE_VALUE); | 
|  | 887 | scm_arg.x2 = (uint32_t)req.in_buf; | 
|  | 888 | scm_arg.x3 = req.in_buf_size; | 
|  | 889 | scm_arg.x4 = (uint32_t)req.out_buf; | 
|  | 890 | scm_arg.x5[0] = req.out_buf_size; | 
|  | 891 | scm_arg.x5[1] = req.direction; | 
|  | 892 |  | 
|  | 893 | if (scm_call2(&scm_arg, &scm_ret)) | 
|  | 894 | { | 
|  | 895 | dprintf(CRITICAL, "Failed in Cipher DIP SCM call\n"); | 
|  | 896 | return -1; | 
|  | 897 | } | 
|  | 898 | } | 
|  | 899 |  | 
|  | 900 | return 0; | 
|  | 901 | } | 
|  | 902 |  | 
| Amit Blay | bdfabc6 | 2015-01-29 22:04:13 +0200 | [diff] [blame] | 903 | int qfprom_read_row_cmd(uint32_t row_address, | 
|  | 904 | uint32_t addr_type, | 
|  | 905 | uint32_t *row_data, | 
|  | 906 | uint32_t *qfprom_api_status) | 
|  | 907 | { | 
|  | 908 | uint32_t svc_id; | 
|  | 909 | uint32_t cmd_id; | 
|  | 910 | void *cmd_buf; | 
|  | 911 | void *rsp_buf; | 
|  | 912 | size_t cmd_len; | 
|  | 913 | size_t rsp_len; | 
|  | 914 | qfprom_read_row_req req; | 
|  | 915 | scmcall_arg scm_arg = {0}; | 
|  | 916 | scmcall_ret scm_ret = {0}; | 
|  | 917 |  | 
|  | 918 | req.row_address = row_address; | 
|  | 919 | req.addr_type = addr_type; | 
|  | 920 | req.row_data = row_data; | 
|  | 921 | req.qfprom_api_status = qfprom_api_status; | 
|  | 922 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 923 | if (!is_scm_armv8_support()) | 
| Amit Blay | bdfabc6 | 2015-01-29 22:04:13 +0200 | [diff] [blame] | 924 | { | 
|  | 925 | svc_id = SCM_SVC_FUSE; | 
|  | 926 | cmd_id = SCM_QFPROM_READ_ROW_ID; | 
|  | 927 | cmd_buf = (void *)&req; | 
|  | 928 | cmd_len = sizeof(req); | 
|  | 929 | rsp_buf = NULL; | 
|  | 930 | rsp_len = 0; | 
|  | 931 |  | 
|  | 932 | if (scm_call(svc_id, cmd_id, cmd_buf, cmd_len, rsp_buf, rsp_len)) | 
|  | 933 | { | 
|  | 934 | dprintf(CRITICAL, "Failed to call SCM_SVC_FUSE.SCM_QFPROM_READ_ROW_ID SCM\n"); | 
|  | 935 | return -1; | 
|  | 936 | } | 
|  | 937 | } | 
|  | 938 | else | 
|  | 939 | { | 
|  | 940 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_FUSE, SCM_QFPROM_READ_ROW_ID); | 
|  | 941 | scm_arg.x1 = MAKE_SCM_ARGS(0x4, SMC_PARAM_TYPE_VALUE, SMC_PARAM_TYPE_VALUE, | 
|  | 942 | SMC_PARAM_TYPE_BUFFER_READWRITE, SMC_PARAM_TYPE_BUFFER_READWRITE); | 
|  | 943 | scm_arg.x2 = req.row_address; | 
|  | 944 | scm_arg.x3 = req.addr_type; | 
|  | 945 | scm_arg.x4 = (uint32_t)req.row_data; | 
|  | 946 | scm_arg.x5[0] = (uint32_t)req.qfprom_api_status; | 
|  | 947 |  | 
|  | 948 | if (scm_call2(&scm_arg, &scm_ret)) | 
|  | 949 | { | 
|  | 950 | dprintf(CRITICAL, "Failed to call SCM_SVC_FUSE.SCM_QFPROM_READ_ROW_ID SCM\n"); | 
|  | 951 | return -1; | 
|  | 952 | } | 
|  | 953 | } | 
|  | 954 |  | 
|  | 955 | return 0; | 
|  | 956 | } | 
|  | 957 |  | 
| Deepa Dinamani | 193874e | 2012-02-07 14:00:04 -0800 | [diff] [blame] | 958 | /* | 
|  | 959 | * Switches the CE1 channel between ADM and register usage. | 
|  | 960 | * channel : AP_CE_REGISTER_USE, CE1 uses register interface | 
|  | 961 | *         : AP_CE_ADM_USE, CE1 uses ADM interface | 
|  | 962 | */ | 
|  | 963 | uint8_t switch_ce_chn_cmd(enum ap_ce_channel_type channel) | 
|  | 964 | { | 
|  | 965 | uint32_t svc_id; | 
|  | 966 | uint32_t cmd_id; | 
|  | 967 | void *cmd_buf; | 
|  | 968 | size_t cmd_len; | 
|  | 969 | size_t resp_len = 0; | 
|  | 970 | uint8_t resp_buf; | 
|  | 971 |  | 
|  | 972 | struct { | 
|  | 973 | uint32_t resource; | 
|  | 974 | uint32_t chn_id; | 
|  | 975 | }__PACKED switch_ce_chn_buf; | 
|  | 976 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 977 | if (is_scm_armv8_support()) | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 978 | { | 
|  | 979 | dprintf(INFO, "%s:SCM call is not supported\n",__func__); | 
|  | 980 | return 0; | 
|  | 981 | } | 
|  | 982 |  | 
| Deepa Dinamani | 193874e | 2012-02-07 14:00:04 -0800 | [diff] [blame] | 983 | switch_ce_chn_buf.resource = TZ_RESOURCE_CE_AP; | 
|  | 984 | switch_ce_chn_buf.chn_id = channel; | 
|  | 985 | cmd_buf = (void *)&switch_ce_chn_buf; | 
|  | 986 | cmd_len = sizeof(switch_ce_chn_buf); | 
|  | 987 |  | 
|  | 988 | /*response */ | 
|  | 989 | resp_len = sizeof(resp_buf); | 
|  | 990 |  | 
|  | 991 | svc_id = SCM_SVC_CE_CHN_SWITCH_ID; | 
|  | 992 | cmd_id = SCM_CE_CHN_SWITCH_ID; | 
|  | 993 |  | 
|  | 994 | scm_call(svc_id, cmd_id, cmd_buf, cmd_len, &resp_buf, resp_len); | 
|  | 995 | return resp_buf; | 
|  | 996 | } | 
|  | 997 |  | 
| Channagoud Kadabi | 179df0b | 2013-12-12 14:53:31 -0800 | [diff] [blame] | 998 | int scm_halt_pmic_arbiter() | 
|  | 999 | { | 
|  | 1000 | int ret = 0; | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1001 | scmcall_arg scm_arg = {0}; | 
| Channagoud Kadabi | 179df0b | 2013-12-12 14:53:31 -0800 | [diff] [blame] | 1002 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 1003 | if (is_scm_armv8_support()) { | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1004 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_PWR, SCM_IO_DISABLE_PMIC_ARBITER); | 
|  | 1005 | scm_arg.x1 = MAKE_SCM_ARGS(0x1); | 
|  | 1006 | scm_arg.x2 = 0; | 
|  | 1007 | scm_arg.atomic = true; | 
|  | 1008 | ret = scm_call2(&scm_arg, NULL); | 
|  | 1009 | } else { | 
|  | 1010 | ret = scm_call_atomic(SCM_SVC_PWR, SCM_IO_DISABLE_PMIC_ARBITER, 0); | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1011 | } | 
|  | 1012 |  | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1013 | /* Retry with the SCM_IO_DISABLE_PMIC_ARBITER1 func ID if the above Func ID fails*/ | 
|  | 1014 | if(ret) { | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 1015 | if (is_scm_armv8_support()) { | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1016 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_PWR, SCM_IO_DISABLE_PMIC_ARBITER1); | 
|  | 1017 | scm_arg.x1 = MAKE_SCM_ARGS(0x1); | 
|  | 1018 | scm_arg.x2 = 0; | 
|  | 1019 | scm_arg.atomic = true; | 
|  | 1020 | ret = scm_call2(&scm_arg, NULL); | 
|  | 1021 | } else | 
|  | 1022 | ret = scm_call_atomic(SCM_SVC_PWR, SCM_IO_DISABLE_PMIC_ARBITER1, 0); | 
|  | 1023 | } | 
| Channagoud Kadabi | 179df0b | 2013-12-12 14:53:31 -0800 | [diff] [blame] | 1024 |  | 
|  | 1025 | return ret; | 
|  | 1026 | } | 
| Abhimanyu Kapur | b3207fb | 2014-01-27 21:33:23 -0800 | [diff] [blame] | 1027 |  | 
|  | 1028 | /* Execption Level exec secure-os call | 
|  | 1029 | * Jumps to kernel via secure-os and does not return | 
|  | 1030 | * on successful jump. System parameters are setup & | 
|  | 1031 | * passed on to secure-os and are utilized to boot the | 
|  | 1032 | * kernel. | 
|  | 1033 | * | 
|  | 1034 | @ kernel_entry	: kernel entry point passed in as link register. | 
|  | 1035 | @ dtb_offset	: dt blob address passed in as w0. | 
|  | 1036 | @ svc_id	: indicates direction of switch 32->64 or 64->32 | 
|  | 1037 | * | 
|  | 1038 | * Assumes all sanity checks have been performed on arguments. | 
|  | 1039 | */ | 
|  | 1040 |  | 
|  | 1041 | void scm_elexec_call(paddr_t kernel_entry, paddr_t dtb_offset) | 
|  | 1042 | { | 
|  | 1043 | uint32_t svc_id = SCM_SVC_MILESTONE_32_64_ID; | 
|  | 1044 | uint32_t cmd_id = SCM_SVC_MILESTONE_CMD_ID; | 
|  | 1045 | void *cmd_buf; | 
|  | 1046 | size_t cmd_len; | 
| Sridhar Parasuram | fc6ea71 | 2015-06-30 11:22:49 -0700 | [diff] [blame] | 1047 | static el1_system_param param __attribute__((aligned(0x1000))); | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1048 | scmcall_arg scm_arg = {0}; | 
| Abhimanyu Kapur | b3207fb | 2014-01-27 21:33:23 -0800 | [diff] [blame] | 1049 |  | 
|  | 1050 | param.el1_x0 = dtb_offset; | 
|  | 1051 | param.el1_elr = kernel_entry; | 
|  | 1052 |  | 
| Abhimanyu Kapur | b3207fb | 2014-01-27 21:33:23 -0800 | [diff] [blame] | 1053 | /* Response Buffer = Null as no response expected */ | 
|  | 1054 | dprintf(INFO, "Jumping to kernel via monitor\n"); | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1055 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 1056 | if (!is_scm_armv8_support()) | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1057 | { | 
|  | 1058 | /* Command Buffer */ | 
|  | 1059 | cmd_buf = (void *)¶m; | 
|  | 1060 | cmd_len = sizeof(el1_system_param); | 
|  | 1061 |  | 
|  | 1062 | scm_call(svc_id, cmd_id, cmd_buf, cmd_len, NULL, 0); | 
|  | 1063 | } | 
|  | 1064 | else | 
|  | 1065 | { | 
|  | 1066 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_MILESTONE_32_64_ID, SCM_SVC_MILESTONE_CMD_ID); | 
|  | 1067 | scm_arg.x1 = MAKE_SCM_ARGS(0x2, SMC_PARAM_TYPE_BUFFER_READ); | 
| Veera Sundaram Sankaran | 0018151 | 2014-12-09 11:23:39 -0800 | [diff] [blame] | 1068 | scm_arg.x2 = (uint32_t ) ¶m; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1069 | scm_arg.x3 = sizeof(el1_system_param); | 
|  | 1070 |  | 
|  | 1071 | scm_call2(&scm_arg, NULL); | 
|  | 1072 | } | 
| Abhimanyu Kapur | b3207fb | 2014-01-27 21:33:23 -0800 | [diff] [blame] | 1073 |  | 
|  | 1074 | /* Assert if execution ever reaches here */ | 
|  | 1075 | dprintf(CRITICAL, "Failed to jump to kernel\n"); | 
|  | 1076 | ASSERT(0); | 
|  | 1077 | } | 
| Maria Yu | beeeeaf | 2014-06-30 13:05:43 +0800 | [diff] [blame] | 1078 |  | 
|  | 1079 | /* SCM Random Command */ | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1080 | int scm_random(uintptr_t * rbuf, uint32_t  r_len) | 
| Maria Yu | beeeeaf | 2014-06-30 13:05:43 +0800 | [diff] [blame] | 1081 | { | 
|  | 1082 | int ret; | 
|  | 1083 | struct tz_prng_data data; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1084 | scmcall_arg scm_arg = {0}; | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1085 | // Memory passed to TZ should be algined to cache line | 
|  | 1086 | BUF_DMA_ALIGN(rand_buf, sizeof(uintptr_t)); | 
| Maria Yu | beeeeaf | 2014-06-30 13:05:43 +0800 | [diff] [blame] | 1087 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 1088 | if (!is_scm_armv8_support()) | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1089 | { | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1090 | data.out_buf     = (uint8_t*) rand_buf; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1091 | data.out_buf_size = r_len; | 
| Maria Yu | beeeeaf | 2014-06-30 13:05:43 +0800 | [diff] [blame] | 1092 |  | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1093 | /* | 
|  | 1094 | * random buffer must be flushed/invalidated before and after TZ call. | 
|  | 1095 | */ | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1096 | arch_clean_invalidate_cache_range((addr_t) rand_buf, r_len); | 
| Maria Yu | beeeeaf | 2014-06-30 13:05:43 +0800 | [diff] [blame] | 1097 |  | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1098 | ret = scm_call(TZ_SVC_CRYPTO, PRNG_CMD_ID, &data, sizeof(data), NULL, 0); | 
| Maria Yu | beeeeaf | 2014-06-30 13:05:43 +0800 | [diff] [blame] | 1099 |  | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1100 | /* Invalidate the updated random buffer */ | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1101 | arch_clean_invalidate_cache_range((addr_t) rand_buf, r_len); | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1102 | } | 
|  | 1103 | else | 
|  | 1104 | { | 
|  | 1105 | scm_arg.x0 = MAKE_SIP_SCM_CMD(TZ_SVC_CRYPTO, PRNG_CMD_ID); | 
|  | 1106 | scm_arg.x1 = MAKE_SCM_ARGS(0x2,SMC_PARAM_TYPE_BUFFER_READWRITE); | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1107 | scm_arg.x2 = (uint32_t) rand_buf; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1108 | scm_arg.x3 = r_len; | 
|  | 1109 |  | 
|  | 1110 | ret = scm_call2(&scm_arg, NULL); | 
|  | 1111 | if (!ret) | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1112 | arch_clean_invalidate_cache_range((addr_t) rand_buf, r_len); | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1113 | else | 
|  | 1114 | dprintf(CRITICAL, "Secure canary SCM failed: %x\n", ret); | 
|  | 1115 | } | 
| Maria Yu | beeeeaf | 2014-06-30 13:05:43 +0800 | [diff] [blame] | 1116 |  | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1117 | //Copy back into the return buffer | 
|  | 1118 | *rbuf = *rand_buf; | 
| Maria Yu | beeeeaf | 2014-06-30 13:05:43 +0800 | [diff] [blame] | 1119 | return ret; | 
|  | 1120 | } | 
|  | 1121 |  | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1122 | uintptr_t get_canary() | 
| Maria Yu | beeeeaf | 2014-06-30 13:05:43 +0800 | [diff] [blame] | 1123 | { | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1124 | uintptr_t canary; | 
|  | 1125 | if(scm_random(&canary, sizeof(canary))) { | 
| Maria Yu | beeeeaf | 2014-06-30 13:05:43 +0800 | [diff] [blame] | 1126 | dprintf(CRITICAL,"scm_call for random failed !!!"); | 
|  | 1127 | /* | 
|  | 1128 | * fall back to use lib rand API if scm call failed. | 
|  | 1129 | */ | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1130 | canary =  rand(); | 
| Maria Yu | beeeeaf | 2014-06-30 13:05:43 +0800 | [diff] [blame] | 1131 | } | 
|  | 1132 |  | 
|  | 1133 | return canary; | 
|  | 1134 | } | 
| Aparna Mallavarapu | 6875ade | 2014-06-16 22:15:28 +0530 | [diff] [blame] | 1135 |  | 
|  | 1136 | int scm_xpu_err_fatal_init() | 
|  | 1137 | { | 
|  | 1138 | uint32_t ret = 0; | 
|  | 1139 | uint32_t response = 0; | 
|  | 1140 | tz_xpu_prot_cmd cmd; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1141 | scmcall_arg scm_arg = {0}; | 
|  | 1142 | scmcall_ret scm_ret = {0}; | 
| Aparna Mallavarapu | 6875ade | 2014-06-16 22:15:28 +0530 | [diff] [blame] | 1143 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 1144 | if (!is_scm_armv8_support()) | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1145 | { | 
|  | 1146 | cmd.config = ERR_FATAL_ENABLE; | 
|  | 1147 | cmd.spare = 0; | 
| Aparna Mallavarapu | 6875ade | 2014-06-16 22:15:28 +0530 | [diff] [blame] | 1148 |  | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1149 | ret = scm_call(SVC_MEMORY_PROTECTION, XPU_ERR_FATAL, &cmd, sizeof(cmd), &response, | 
|  | 1150 | sizeof(response)); | 
|  | 1151 | } | 
|  | 1152 | else | 
|  | 1153 | { | 
|  | 1154 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SVC_MEMORY_PROTECTION, XPU_ERR_FATAL); | 
|  | 1155 | scm_arg.x1 = MAKE_SCM_ARGS(0x2); | 
|  | 1156 | scm_arg.x2 = ERR_FATAL_ENABLE; | 
|  | 1157 | scm_arg.x3 = 0x0; | 
|  | 1158 | ret =  scm_call2(&scm_arg, &scm_ret); | 
|  | 1159 | response = scm_ret.x1; | 
|  | 1160 | } | 
| Aparna Mallavarapu | 6875ade | 2014-06-16 22:15:28 +0530 | [diff] [blame] | 1161 |  | 
|  | 1162 | if (ret) | 
|  | 1163 | dprintf(CRITICAL, "Failed to set XPU violations as fatal errors: %u\n", response); | 
|  | 1164 | else | 
|  | 1165 | dprintf(INFO, "Configured XPU violations to be fatal errors\n"); | 
|  | 1166 |  | 
|  | 1167 | return ret; | 
|  | 1168 | } | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1169 |  | 
|  | 1170 | static uint32_t scm_call_a32(uint32_t x0, uint32_t x1, uint32_t x2, uint32_t x3, uint32_t x4, uint32_t x5, scmcall_ret *ret) | 
|  | 1171 | { | 
|  | 1172 | register uint32_t r0 __asm__("r0") = x0; | 
|  | 1173 | register uint32_t r1 __asm__("r1") = x1; | 
|  | 1174 | register uint32_t r2 __asm__("r2") = x2; | 
|  | 1175 | register uint32_t r3 __asm__("r3") = x3; | 
|  | 1176 | register uint32_t r4 __asm__("r4") = x4; | 
|  | 1177 | register uint32_t r5 __asm__("r5") = x5; | 
| Channagoud Kadabi | e75efb9 | 2015-05-19 14:20:05 -0700 | [diff] [blame] | 1178 | register uint32_t r6 __asm__("r6") = 0; | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1179 |  | 
| Dinesh K Garg | 6bbbb70 | 2015-01-30 11:13:31 -0800 | [diff] [blame] | 1180 | do { | 
|  | 1181 | __asm__ volatile( | 
|  | 1182 | __asmeq("%0", "r0") | 
|  | 1183 | __asmeq("%1", "r1") | 
|  | 1184 | __asmeq("%2", "r2") | 
|  | 1185 | __asmeq("%3", "r3") | 
|  | 1186 | __asmeq("%4", "r0") | 
|  | 1187 | __asmeq("%5", "r1") | 
|  | 1188 | __asmeq("%6", "r2") | 
|  | 1189 | __asmeq("%7", "r3") | 
|  | 1190 | __asmeq("%8", "r4") | 
|  | 1191 | __asmeq("%9", "r5") | 
| Channagoud Kadabi | e75efb9 | 2015-05-19 14:20:05 -0700 | [diff] [blame] | 1192 | __asmeq("%10", "r6") | 
| Dinesh K Garg | 6bbbb70 | 2015-01-30 11:13:31 -0800 | [diff] [blame] | 1193 | "smc    #0  @ switch to secure world\n" | 
|  | 1194 | : "=r" (r0), "=r" (r1), "=r" (r2), "=r" (r3) | 
| Channagoud Kadabi | e75efb9 | 2015-05-19 14:20:05 -0700 | [diff] [blame] | 1195 | : "r" (r0), "r" (r1), "r" (r2), "r" (r3), "r" (r4), "r" (r5), "r" (r6)); | 
| Dinesh K Garg | 6bbbb70 | 2015-01-30 11:13:31 -0800 | [diff] [blame] | 1196 | } while(r0 == 1); | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1197 |  | 
|  | 1198 | if (ret) | 
|  | 1199 | { | 
|  | 1200 | ret->x1 = r1; | 
|  | 1201 | ret->x2 = r2; | 
|  | 1202 | ret->x3 = r3; | 
|  | 1203 | } | 
|  | 1204 |  | 
|  | 1205 | return r0; | 
|  | 1206 | } | 
|  | 1207 |  | 
|  | 1208 | uint32_t scm_call2(scmcall_arg *arg, scmcall_ret *ret) | 
|  | 1209 | { | 
|  | 1210 | uint32_t *indir_arg = NULL; | 
|  | 1211 | uint32_t x5; | 
|  | 1212 | int i; | 
|  | 1213 | uint32_t rc; | 
|  | 1214 |  | 
|  | 1215 | arg->x0 = arg->atomic ? (arg->x0 | SCM_ATOMIC_BIT) : arg->x0; | 
|  | 1216 | x5 = arg->x5[0]; | 
|  | 1217 |  | 
| Amit Blay | fe23ee2 | 2015-01-09 19:09:51 +0200 | [diff] [blame] | 1218 | if ((arg->x1 & 0xF) > SCM_MAX_ARG_LEN - 1) | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1219 | { | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1220 | indir_arg = memalign(CACHE_LINE, ROUNDUP((SCM_INDIR_MAX_LEN * sizeof(uint32_t)), CACHE_LINE)); | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1221 | ASSERT(indir_arg); | 
|  | 1222 |  | 
|  | 1223 | for (i = 0 ; i < SCM_INDIR_MAX_LEN; i++) | 
|  | 1224 | { | 
|  | 1225 | indir_arg[i] = arg->x5[i]; | 
|  | 1226 | } | 
| Channagoud Kadabi | efdeb8a | 2015-09-23 11:52:20 -0700 | [diff] [blame] | 1227 | arch_clean_invalidate_cache_range((addr_t) indir_arg, ROUNDUP((SCM_INDIR_MAX_LEN * sizeof(uint32_t)), CACHE_LINE)); | 
| Channagoud Kadabi | dd85e7f | 2014-08-05 19:58:37 -0700 | [diff] [blame] | 1228 | x5 = (addr_t) indir_arg; | 
|  | 1229 | } | 
|  | 1230 |  | 
|  | 1231 | rc = scm_call_a32(arg->x0, arg->x1, arg->x2, arg->x3, arg->x4, x5, ret); | 
|  | 1232 |  | 
|  | 1233 | if (rc) | 
|  | 1234 | { | 
|  | 1235 | dprintf(CRITICAL, "SCM call: 0x%x failed with :%x\n", arg->x0, rc); | 
|  | 1236 | return rc; | 
|  | 1237 | } | 
|  | 1238 |  | 
|  | 1239 | if (indir_arg) | 
|  | 1240 | free(indir_arg); | 
|  | 1241 |  | 
|  | 1242 | return 0; | 
|  | 1243 | } | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1244 |  | 
| Aparna Mallavarapu | 8adbee2 | 2015-03-10 19:58:06 +0530 | [diff] [blame] | 1245 | static bool secure_boot_enabled = true; | 
|  | 1246 | static bool wdog_debug_fuse_disabled = true; | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1247 |  | 
|  | 1248 | void scm_check_boot_fuses() | 
|  | 1249 | { | 
|  | 1250 | uint32_t ret = 0; | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1251 | uint32_t resp; | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1252 | scmcall_arg scm_arg = {0}; | 
|  | 1253 | scmcall_ret scm_ret = {0}; | 
|  | 1254 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 1255 | if (!is_scm_armv8_support()) { | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1256 | ret = scm_call(TZBSP_SVC_INFO, IS_SECURE_BOOT_ENABLED, NULL, 0, &resp, sizeof(resp)); | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1257 | } else { | 
|  | 1258 | scm_arg.x0 = MAKE_SIP_SCM_CMD(TZBSP_SVC_INFO, IS_SECURE_BOOT_ENABLED); | 
|  | 1259 | ret = scm_call2(&scm_arg, &scm_ret); | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1260 | resp = scm_ret.x1; | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1261 | } | 
|  | 1262 |  | 
|  | 1263 | /* Parse Bit 0 and Bit 2 of the response */ | 
|  | 1264 | if(!ret) { | 
|  | 1265 | /* Bit 0 - SECBOOT_ENABLE_CHECK */ | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1266 | if(resp & 0x1) | 
| Aparna Mallavarapu | 8adbee2 | 2015-03-10 19:58:06 +0530 | [diff] [blame] | 1267 | secure_boot_enabled = false; | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1268 | /* Bit 2 - DEBUG_DISABLE_CHECK */ | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1269 | if(resp & 0x4) | 
| Aparna Mallavarapu | 8adbee2 | 2015-03-10 19:58:06 +0530 | [diff] [blame] | 1270 | wdog_debug_fuse_disabled = false; | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1271 | } else | 
|  | 1272 | dprintf(CRITICAL, "scm call to check secure boot fuses failed\n"); | 
|  | 1273 | } | 
|  | 1274 |  | 
|  | 1275 | bool is_secure_boot_enable() | 
|  | 1276 | { | 
|  | 1277 | scm_check_boot_fuses(); | 
| Aparna Mallavarapu | 8adbee2 | 2015-03-10 19:58:06 +0530 | [diff] [blame] | 1278 | return secure_boot_enabled; | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1279 | } | 
|  | 1280 |  | 
|  | 1281 | static uint32_t scm_io_read(addr_t address) | 
|  | 1282 | { | 
|  | 1283 | uint32_t ret; | 
|  | 1284 | scmcall_arg scm_arg = {0}; | 
|  | 1285 | scmcall_ret scm_ret = {0}; | 
|  | 1286 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 1287 | if (!is_scm_armv8_support()) { | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1288 | ret = scm_call_atomic(SCM_SVC_IO, SCM_IO_READ, address); | 
|  | 1289 | } else { | 
|  | 1290 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_IO, SCM_IO_READ); | 
|  | 1291 | scm_arg.x1 = MAKE_SCM_ARGS(0x1); | 
|  | 1292 | scm_arg.x2 = address; | 
|  | 1293 | scm_arg.atomic = true; | 
|  | 1294 | ret = scm_call2(&scm_arg, &scm_ret); | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 1295 | /* Return the value read if the call is successful */ | 
|  | 1296 | if (!ret) | 
|  | 1297 | ret = scm_ret.x1; | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1298 | } | 
|  | 1299 | return ret; | 
|  | 1300 | } | 
|  | 1301 |  | 
| Aparna Mallavarapu | da91ea9 | 2015-07-10 12:03:46 +0530 | [diff] [blame] | 1302 | uint32_t scm_io_write(uint32_t address, uint32_t val) | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1303 | { | 
|  | 1304 | uint32_t ret; | 
|  | 1305 | scmcall_arg scm_arg = {0}; | 
|  | 1306 | scmcall_ret scm_ret = {0}; | 
|  | 1307 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 1308 | if (!is_scm_armv8_support()) { | 
| Aparna Mallavarapu | 246c30b | 2014-12-11 12:07:51 +0530 | [diff] [blame] | 1309 | ret = scm_call_atomic2(SCM_SVC_IO, SCM_IO_WRITE, address, val); | 
|  | 1310 | } else { | 
|  | 1311 | scm_arg.x0 = MAKE_SIP_SCM_CMD(SCM_SVC_IO, SCM_IO_WRITE); | 
|  | 1312 | scm_arg.x1 = MAKE_SCM_ARGS(0x2); | 
|  | 1313 | scm_arg.x2 = address; | 
|  | 1314 | scm_arg.x3 = val; | 
|  | 1315 | scm_arg.atomic = true; | 
|  | 1316 | ret = scm_call2(&scm_arg, &scm_ret); | 
|  | 1317 | } | 
|  | 1318 | return ret; | 
|  | 1319 | } | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1320 |  | 
| Aparna Mallavarapu | da91ea9 | 2015-07-10 12:03:46 +0530 | [diff] [blame] | 1321 | int scm_call2_atomic(uint32_t svc, uint32_t cmd, uint32_t arg1, uint32_t arg2) | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1322 | { | 
|  | 1323 | uint32_t ret = 0; | 
|  | 1324 | scmcall_arg scm_arg = {0}; | 
|  | 1325 | scmcall_ret scm_ret = {0}; | 
|  | 1326 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 1327 | if (!is_scm_armv8_support()) | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1328 | { | 
|  | 1329 | ret = scm_call_atomic2(svc, cmd, arg1, arg2); | 
|  | 1330 | } else { | 
|  | 1331 | scm_arg.x0 = MAKE_SIP_SCM_CMD(svc, cmd); | 
|  | 1332 | scm_arg.x1 = MAKE_SCM_ARGS(0x2); | 
|  | 1333 | scm_arg.x2 = arg1; | 
|  | 1334 | scm_arg.x3 = arg2; | 
|  | 1335 | ret =  scm_call2(&scm_arg, &scm_ret); | 
|  | 1336 | } | 
|  | 1337 | return ret; | 
|  | 1338 | } | 
|  | 1339 |  | 
| lijuang | 1cff838 | 2016-01-11 17:56:54 +0800 | [diff] [blame] | 1340 | int scm_disable_sdi() | 
|  | 1341 | { | 
|  | 1342 | int ret = 0; | 
|  | 1343 |  | 
|  | 1344 | scm_check_boot_fuses(); | 
|  | 1345 |  | 
|  | 1346 | /* Make WDOG_DEBUG DISABLE scm call only in non-secure boot */ | 
|  | 1347 | if(!(secure_boot_enabled || wdog_debug_fuse_disabled)) { | 
|  | 1348 | ret = scm_call2_atomic(SCM_SVC_BOOT, WDOG_DEBUG_DISABLE, 1, 0); | 
|  | 1349 | if(ret) | 
|  | 1350 | dprintf(CRITICAL, "Failed to disable secure wdog debug: %d\n", ret); | 
|  | 1351 | } | 
|  | 1352 | return ret; | 
|  | 1353 | } | 
|  | 1354 |  | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1355 | #if PLATFORM_USE_SCM_DLOAD | 
| lijuang | 395b5e6 | 2015-11-19 17:39:44 +0800 | [diff] [blame] | 1356 | int scm_dload_mode(enum reboot_reason mode) | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1357 | { | 
|  | 1358 | int ret = 0; | 
|  | 1359 | uint32_t dload_type; | 
|  | 1360 |  | 
|  | 1361 | dprintf(SPEW, "DLOAD mode: %d\n", mode); | 
| lijuang | 1cff838 | 2016-01-11 17:56:54 +0800 | [diff] [blame] | 1362 | if (mode == NORMAL_DLOAD) { | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1363 | dload_type = SCM_DLOAD_MODE; | 
| lijuang | 1cff838 | 2016-01-11 17:56:54 +0800 | [diff] [blame] | 1364 | #if DISABLE_DLOAD_MODE | 
|  | 1365 | return 0; | 
|  | 1366 | #endif | 
|  | 1367 | } else if(mode == EMERGENCY_DLOAD) | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1368 | dload_type = SCM_EDLOAD_MODE; | 
|  | 1369 | else | 
|  | 1370 | dload_type = 0; | 
|  | 1371 |  | 
|  | 1372 | /* Write to the Boot MISC register */ | 
| Channagoud Kadabi | a2184b8 | 2015-07-07 10:09:32 -0700 | [diff] [blame] | 1373 | ret = is_scm_call_available(SCM_SVC_BOOT, SCM_DLOAD_CMD); | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1374 |  | 
| Channagoud Kadabi | a2184b8 | 2015-07-07 10:09:32 -0700 | [diff] [blame] | 1375 | if (ret > 0) | 
|  | 1376 | ret = scm_call2_atomic(SCM_SVC_BOOT, SCM_DLOAD_CMD, dload_type, 0); | 
|  | 1377 | else | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1378 | ret = scm_io_write(TCSR_BOOT_MISC_DETECT,dload_type); | 
| Channagoud Kadabi | a2184b8 | 2015-07-07 10:09:32 -0700 | [diff] [blame] | 1379 |  | 
|  | 1380 | if(ret) { | 
|  | 1381 | dprintf(CRITICAL, "Failed to write to boot misc: %d\n", ret); | 
|  | 1382 | return ret; | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1383 | } | 
|  | 1384 |  | 
| lijuang | 1cff838 | 2016-01-11 17:56:54 +0800 | [diff] [blame] | 1385 | #if !DISABLE_DLOAD_MODE | 
|  | 1386 | return scm_disable_sdi(); | 
|  | 1387 | #else | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1388 | return ret; | 
| lijuang | 1cff838 | 2016-01-11 17:56:54 +0800 | [diff] [blame] | 1389 | #endif | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1390 | } | 
| Channagoud Kadabi | 6479ce3 | 2015-06-17 17:30:40 -0700 | [diff] [blame] | 1391 |  | 
|  | 1392 | bool scm_device_enter_dload() | 
|  | 1393 | { | 
|  | 1394 | uint32_t ret = 0; | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 1395 | uint32_t dload_mode = 0; | 
| Channagoud Kadabi | 6479ce3 | 2015-06-17 17:30:40 -0700 | [diff] [blame] | 1396 |  | 
|  | 1397 | scmcall_arg scm_arg = {0}; | 
|  | 1398 | scmcall_ret scm_ret = {0}; | 
|  | 1399 |  | 
|  | 1400 | scm_arg.x0 = MAKE_SIP_SCM_CMD(TZ_SVC_DLOAD_MODE, SCM_DLOAD_CMD); | 
|  | 1401 | ret = scm_call2(&scm_arg, &scm_ret); | 
|  | 1402 | if (ret) | 
|  | 1403 | dprintf(CRITICAL, "SCM call to check dload mode failed: %x\n", ret); | 
|  | 1404 |  | 
| Channagoud Kadabi | 36cdfb6 | 2015-10-23 16:53:37 -0700 | [diff] [blame] | 1405 | if (!ret) | 
|  | 1406 | { | 
|  | 1407 | dload_mode = scm_io_read(TCSR_BOOT_MISC_DETECT); | 
|  | 1408 | if (board_soc_version() < 0x30000) | 
|  | 1409 | dload_mode = (dload_mode >> 16) & 0xFFFF; | 
|  | 1410 | } | 
|  | 1411 |  | 
|  | 1412 | if (dload_mode == SCM_DLOAD_MODE) | 
| Channagoud Kadabi | 6479ce3 | 2015-06-17 17:30:40 -0700 | [diff] [blame] | 1413 | return true; | 
|  | 1414 |  | 
|  | 1415 | return false; | 
|  | 1416 | } | 
| Aparna Mallavarapu | 664ea77 | 2015-02-24 18:44:33 +0530 | [diff] [blame] | 1417 | #endif |