blob: dc171b40a8bec053746224b2890a0682db82a221 [file] [log] [blame]
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -07001/*
2 * Copyright (c) 2008 Travis Geiselbrecht
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining
5 * a copy of this software and associated documentation files
6 * (the "Software"), to deal in the Software without restriction,
7 * including without limitation the rights to use, copy, modify, merge,
8 * publish, distribute, sublicense, and/or sell copies of the Software,
9 * and to permit persons to whom the Software is furnished to do so,
10 * subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be
13 * included in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
18 * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
19 * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
20 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
21 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23#include <debug.h>
24#include <arch.h>
25#include <arch/ops.h>
26#include <arch/arm.h>
27#include <arch/arm/mmu.h>
28#include <platform.h>
29
30#if ARM_CPU_CORTEX_A8
31static void set_vector_base(addr_t addr)
32{
33 __asm__ volatile("mcr p15, 0, %0, c12, c0, 0" :: "r" (addr));
34}
35#endif
36
37void arch_early_init(void)
38{
39 /* turn off the cache */
40 arch_disable_cache(UCACHE);
41
42 /* set the vector base to our exception vectors so we dont need to double map at 0 */
43#if ARM_CPU_CORTEX_A8
44 set_vector_base(MEMBASE);
45#endif
46
47#if ARM_WITH_MMU
48 arm_mmu_init();
49
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -070050#endif
51
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -070052 /* turn the cache back on */
53 arch_enable_cache(UCACHE);
54
55#if ARM_WITH_NEON
56 /* enable cp10 and cp11 */
57 uint32_t val;
58 __asm__ volatile("mrc p15, 0, %0, c1, c0, 2" : "=r" (val));
59 val |= (3<<22)|(3<<20);
60 __asm__ volatile("mcr p15, 0, %0, c1, c0, 2" :: "r" (val));
61
62 /* set enable bit in fpexc */
Trevor Bourget3aec6752012-01-09 20:35:46 -080063 __asm__ volatile("mrc p10, 7, %0, c8, c0, 0" : "=r" (val));
64 val |= (1<<30);
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -070065 __asm__ volatile("mcr p10, 7, %0, c8, c0, 0" :: "r" (val));
66#endif
Ajay Dudanica0b6a22011-05-18 19:48:32 -070067
68#if ARM_CPU_CORTEX_A8
69 /* enable the cycle count register */
70 uint32_t en;
71 __asm__ volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (en));
72 en &= ~(1<<3); /* cycle count every cycle */
73 en |= 1; /* enable all performance counters */
74 __asm__ volatile("mcr p15, 0, %0, c9, c12, 0" :: "r" (en));
75
76 /* enable cycle counter */
77 en = (1<<31);
78 __asm__ volatile("mcr p15, 0, %0, c9, c12, 1" :: "r" (en));
79#endif
Travis Geiselbrecht1d0df692008-09-01 02:26:09 -070080}
81
82void arch_init(void)
83{
84}
85