blob: 6b5cf72bc09550f96898f7f6cc4dfeb561106723 [file] [log] [blame]
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -07001/*
2 * Copyright (c) 2008 Travis Geiselbrecht
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining
5 * a copy of this software and associated documentation files
6 * (the "Software"), to deal in the Software without restriction,
7 * including without limitation the rights to use, copy, modify, merge,
8 * publish, distribute, sublicense, and/or sell copies of the Software,
9 * and to permit persons to whom the Software is furnished to do so,
10 * subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be
13 * included in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
18 * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
19 * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
20 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
21 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23#include <debug.h>
24#include <arch.h>
25#include <arch/ops.h>
26#include <arch/arm.h>
27#include <arch/arm/mmu.h>
Channagoud Kadabi8cd82cb2015-02-04 21:04:19 -080028#include <arch/defines.h>
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -070029#include <platform.h>
30
31#if ARM_CPU_CORTEX_A8
32static void set_vector_base(addr_t addr)
33{
34 __asm__ volatile("mcr p15, 0, %0, c12, c0, 0" :: "r" (addr));
35}
36#endif
37
38void arch_early_init(void)
39{
40 /* turn off the cache */
41 arch_disable_cache(UCACHE);
42
43 /* set the vector base to our exception vectors so we dont need to double map at 0 */
44#if ARM_CPU_CORTEX_A8
45 set_vector_base(MEMBASE);
46#endif
47
48#if ARM_WITH_MMU
49 arm_mmu_init();
50
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -070051#endif
52
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -070053 /* turn the cache back on */
54 arch_enable_cache(UCACHE);
55
56#if ARM_WITH_NEON
57 /* enable cp10 and cp11 */
58 uint32_t val;
59 __asm__ volatile("mrc p15, 0, %0, c1, c0, 2" : "=r" (val));
60 val |= (3<<22)|(3<<20);
61 __asm__ volatile("mcr p15, 0, %0, c1, c0, 2" :: "r" (val));
62
Channagoud Kadabi8cd82cb2015-02-04 21:04:19 -080063 isb();
64
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -070065 /* set enable bit in fpexc */
Trevor Bourget3aec6752012-01-09 20:35:46 -080066 __asm__ volatile("mrc p10, 7, %0, c8, c0, 0" : "=r" (val));
67 val |= (1<<30);
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -070068 __asm__ volatile("mcr p10, 7, %0, c8, c0, 0" :: "r" (val));
69#endif
Ajay Dudanica0b6a22011-05-18 19:48:32 -070070
71#if ARM_CPU_CORTEX_A8
72 /* enable the cycle count register */
73 uint32_t en;
74 __asm__ volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (en));
75 en &= ~(1<<3); /* cycle count every cycle */
76 en |= 1; /* enable all performance counters */
77 __asm__ volatile("mcr p15, 0, %0, c9, c12, 0" :: "r" (en));
78
79 /* enable cycle counter */
80 en = (1<<31);
81 __asm__ volatile("mcr p15, 0, %0, c9, c12, 1" :: "r" (en));
82#endif
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -070083}
84
85void arch_init(void)
86{
87}
88