Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 1 | /* |
| 2 | * DaVinci timer subsystem |
| 3 | * |
| 4 | * Author: Kevin Hilman, MontaVista Software, Inc. <source@mvista.com> |
| 5 | * |
| 6 | * 2007 (c) MontaVista Software, Inc. This file is licensed under |
| 7 | * the terms of the GNU General Public License version 2. This program |
| 8 | * is licensed "as is" without any warranty of any kind, whether express |
| 9 | * or implied. |
| 10 | */ |
| 11 | #include <linux/kernel.h> |
| 12 | #include <linux/init.h> |
| 13 | #include <linux/types.h> |
| 14 | #include <linux/interrupt.h> |
| 15 | #include <linux/clocksource.h> |
| 16 | #include <linux/clockchips.h> |
Russell King | fced80c | 2008-09-06 12:10:45 +0100 | [diff] [blame] | 17 | #include <linux/io.h> |
Kevin Hilman | f5c122d | 2009-04-14 07:04:16 -0500 | [diff] [blame] | 18 | #include <linux/clk.h> |
| 19 | #include <linux/err.h> |
Kevin Hilman | fb63138 | 2009-04-29 16:23:59 -0700 | [diff] [blame] | 20 | #include <linux/platform_device.h> |
Stephen Boyd | 38ff87f | 2013-06-01 23:39:40 -0700 | [diff] [blame] | 21 | #include <linux/sched_clock.h> |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 22 | |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 23 | #include <asm/mach/irq.h> |
| 24 | #include <asm/mach/time.h> |
Sekhar Nori | 5d0ef6a | 2011-12-23 17:57:19 +0000 | [diff] [blame] | 25 | |
Kevin Hilman | f5c122d | 2009-04-14 07:04:16 -0500 | [diff] [blame] | 26 | #include <mach/cputype.h> |
Sekhar Nori | 5d0ef6a | 2011-12-23 17:57:19 +0000 | [diff] [blame] | 27 | #include <mach/hardware.h> |
Mark A. Greer | f64691b | 2009-04-15 12:40:11 -0700 | [diff] [blame] | 28 | #include <mach/time.h> |
Sekhar Nori | 5d0ef6a | 2011-12-23 17:57:19 +0000 | [diff] [blame] | 29 | |
Kevin Hilman | f5c122d | 2009-04-14 07:04:16 -0500 | [diff] [blame] | 30 | #include "clock.h" |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 31 | |
| 32 | static struct clock_event_device clockevent_davinci; |
Kevin Hilman | e609900 | 2009-04-14 07:06:37 -0500 | [diff] [blame] | 33 | static unsigned int davinci_clock_tick_rate; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 34 | |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 35 | /* |
| 36 | * This driver configures the 2 64-bit count-up timers as 4 independent |
| 37 | * 32-bit count-up timers used as follows: |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 38 | */ |
Mark A. Greer | f64691b | 2009-04-15 12:40:11 -0700 | [diff] [blame] | 39 | |
| 40 | enum { |
| 41 | TID_CLOCKEVENT, |
| 42 | TID_CLOCKSOURCE, |
| 43 | }; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 44 | |
| 45 | /* Timer register offsets */ |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 46 | #define PID12 0x0 |
| 47 | #define TIM12 0x10 |
| 48 | #define TIM34 0x14 |
| 49 | #define PRD12 0x18 |
| 50 | #define PRD34 0x1c |
| 51 | #define TCR 0x20 |
| 52 | #define TGCR 0x24 |
| 53 | #define WDTCR 0x28 |
| 54 | |
| 55 | /* Offsets of the 8 compare registers */ |
| 56 | #define CMP12_0 0x60 |
| 57 | #define CMP12_1 0x64 |
| 58 | #define CMP12_2 0x68 |
| 59 | #define CMP12_3 0x6c |
| 60 | #define CMP12_4 0x70 |
| 61 | #define CMP12_5 0x74 |
| 62 | #define CMP12_6 0x78 |
| 63 | #define CMP12_7 0x7c |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 64 | |
| 65 | /* Timer register bitfields */ |
| 66 | #define TCR_ENAMODE_DISABLE 0x0 |
| 67 | #define TCR_ENAMODE_ONESHOT 0x1 |
| 68 | #define TCR_ENAMODE_PERIODIC 0x2 |
| 69 | #define TCR_ENAMODE_MASK 0x3 |
| 70 | |
| 71 | #define TGCR_TIMMODE_SHIFT 2 |
| 72 | #define TGCR_TIMMODE_64BIT_GP 0x0 |
| 73 | #define TGCR_TIMMODE_32BIT_UNCHAINED 0x1 |
| 74 | #define TGCR_TIMMODE_64BIT_WDOG 0x2 |
| 75 | #define TGCR_TIMMODE_32BIT_CHAINED 0x3 |
| 76 | |
| 77 | #define TGCR_TIM12RS_SHIFT 0 |
| 78 | #define TGCR_TIM34RS_SHIFT 1 |
| 79 | #define TGCR_RESET 0x0 |
| 80 | #define TGCR_UNRESET 0x1 |
| 81 | #define TGCR_RESET_MASK 0x3 |
| 82 | |
| 83 | #define WDTCR_WDEN_SHIFT 14 |
| 84 | #define WDTCR_WDEN_DISABLE 0x0 |
| 85 | #define WDTCR_WDEN_ENABLE 0x1 |
| 86 | #define WDTCR_WDKEY_SHIFT 16 |
| 87 | #define WDTCR_WDKEY_SEQ0 0xa5c6 |
| 88 | #define WDTCR_WDKEY_SEQ1 0xda7e |
| 89 | |
| 90 | struct timer_s { |
| 91 | char *name; |
| 92 | unsigned int id; |
| 93 | unsigned long period; |
| 94 | unsigned long opts; |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 95 | unsigned long flags; |
Kevin Hilman | f5c122d | 2009-04-14 07:04:16 -0500 | [diff] [blame] | 96 | void __iomem *base; |
| 97 | unsigned long tim_off; |
| 98 | unsigned long prd_off; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 99 | unsigned long enamode_shift; |
| 100 | struct irqaction irqaction; |
| 101 | }; |
| 102 | static struct timer_s timers[]; |
| 103 | |
| 104 | /* values for 'opts' field of struct timer_s */ |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 105 | #define TIMER_OPTS_DISABLED 0x01 |
| 106 | #define TIMER_OPTS_ONESHOT 0x02 |
| 107 | #define TIMER_OPTS_PERIODIC 0x04 |
| 108 | #define TIMER_OPTS_STATE_MASK 0x07 |
| 109 | |
| 110 | #define TIMER_OPTS_USE_COMPARE 0x80000000 |
| 111 | #define USING_COMPARE(t) ((t)->opts & TIMER_OPTS_USE_COMPARE) |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 112 | |
Mark A. Greer | f64691b | 2009-04-15 12:40:11 -0700 | [diff] [blame] | 113 | static char *id_to_name[] = { |
| 114 | [T0_BOT] = "timer0_0", |
| 115 | [T0_TOP] = "timer0_1", |
| 116 | [T1_BOT] = "timer1_0", |
| 117 | [T1_TOP] = "timer1_1", |
| 118 | }; |
| 119 | |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 120 | static int timer32_config(struct timer_s *t) |
| 121 | { |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 122 | u32 tcr; |
Mark A. Greer | 5570078 | 2009-04-15 12:42:06 -0700 | [diff] [blame] | 123 | struct davinci_soc_info *soc_info = &davinci_soc_info; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 124 | |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 125 | if (USING_COMPARE(t)) { |
| 126 | struct davinci_timer_instance *dtip = |
| 127 | soc_info->timer_info->timers; |
| 128 | int event_timer = ID_TO_TIMER(timers[TID_CLOCKEVENT].id); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 129 | |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 130 | /* |
| 131 | * Next interrupt should be the current time reg value plus |
| 132 | * the new period (using 32-bit unsigned addition/wrapping |
| 133 | * to 0 on overflow). This assumes that the clocksource |
| 134 | * is setup to count to 2^32-1 before wrapping around to 0. |
| 135 | */ |
| 136 | __raw_writel(__raw_readl(t->base + t->tim_off) + t->period, |
| 137 | t->base + dtip[event_timer].cmp_off); |
| 138 | } else { |
| 139 | tcr = __raw_readl(t->base + TCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 140 | |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 141 | /* disable timer */ |
| 142 | tcr &= ~(TCR_ENAMODE_MASK << t->enamode_shift); |
| 143 | __raw_writel(tcr, t->base + TCR); |
| 144 | |
| 145 | /* reset counter to zero, set new period */ |
| 146 | __raw_writel(0, t->base + t->tim_off); |
| 147 | __raw_writel(t->period, t->base + t->prd_off); |
| 148 | |
| 149 | /* Set enable mode */ |
| 150 | if (t->opts & TIMER_OPTS_ONESHOT) |
| 151 | tcr |= TCR_ENAMODE_ONESHOT << t->enamode_shift; |
| 152 | else if (t->opts & TIMER_OPTS_PERIODIC) |
| 153 | tcr |= TCR_ENAMODE_PERIODIC << t->enamode_shift; |
| 154 | |
| 155 | __raw_writel(tcr, t->base + TCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 156 | } |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 157 | return 0; |
| 158 | } |
| 159 | |
| 160 | static inline u32 timer32_read(struct timer_s *t) |
| 161 | { |
Kevin Hilman | f5c122d | 2009-04-14 07:04:16 -0500 | [diff] [blame] | 162 | return __raw_readl(t->base + t->tim_off); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 163 | } |
| 164 | |
| 165 | static irqreturn_t timer_interrupt(int irq, void *dev_id) |
| 166 | { |
| 167 | struct clock_event_device *evt = &clockevent_davinci; |
| 168 | |
| 169 | evt->event_handler(evt); |
| 170 | return IRQ_HANDLED; |
| 171 | } |
| 172 | |
| 173 | /* called when 32-bit counter wraps */ |
| 174 | static irqreturn_t freerun_interrupt(int irq, void *dev_id) |
| 175 | { |
| 176 | return IRQ_HANDLED; |
| 177 | } |
| 178 | |
| 179 | static struct timer_s timers[] = { |
| 180 | [TID_CLOCKEVENT] = { |
| 181 | .name = "clockevent", |
| 182 | .opts = TIMER_OPTS_DISABLED, |
| 183 | .irqaction = { |
| 184 | .flags = IRQF_DISABLED | IRQF_TIMER, |
| 185 | .handler = timer_interrupt, |
| 186 | } |
| 187 | }, |
| 188 | [TID_CLOCKSOURCE] = { |
| 189 | .name = "free-run counter", |
| 190 | .period = ~0, |
| 191 | .opts = TIMER_OPTS_PERIODIC, |
| 192 | .irqaction = { |
| 193 | .flags = IRQF_DISABLED | IRQF_TIMER, |
| 194 | .handler = freerun_interrupt, |
| 195 | } |
| 196 | }, |
| 197 | }; |
| 198 | |
| 199 | static void __init timer_init(void) |
| 200 | { |
Mark A. Greer | f64691b | 2009-04-15 12:40:11 -0700 | [diff] [blame] | 201 | struct davinci_soc_info *soc_info = &davinci_soc_info; |
| 202 | struct davinci_timer_instance *dtip = soc_info->timer_info->timers; |
Cyril Chemparathy | 1bcd38a | 2010-05-07 17:06:35 -0400 | [diff] [blame] | 203 | void __iomem *base[2]; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 204 | int i; |
| 205 | |
| 206 | /* Global init of each 64-bit timer as a whole */ |
| 207 | for(i=0; i<2; i++) { |
Kevin Hilman | f5c122d | 2009-04-14 07:04:16 -0500 | [diff] [blame] | 208 | u32 tgcr; |
Cyril Chemparathy | 1bcd38a | 2010-05-07 17:06:35 -0400 | [diff] [blame] | 209 | |
| 210 | base[i] = ioremap(dtip[i].base, SZ_4K); |
| 211 | if (WARN_ON(!base[i])) |
| 212 | continue; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 213 | |
| 214 | /* Disabled, Internal clock source */ |
Cyril Chemparathy | 1bcd38a | 2010-05-07 17:06:35 -0400 | [diff] [blame] | 215 | __raw_writel(0, base[i] + TCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 216 | |
| 217 | /* reset both timers, no pre-scaler for timer34 */ |
| 218 | tgcr = 0; |
Cyril Chemparathy | 1bcd38a | 2010-05-07 17:06:35 -0400 | [diff] [blame] | 219 | __raw_writel(tgcr, base[i] + TGCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 220 | |
| 221 | /* Set both timers to unchained 32-bit */ |
| 222 | tgcr = TGCR_TIMMODE_32BIT_UNCHAINED << TGCR_TIMMODE_SHIFT; |
Cyril Chemparathy | 1bcd38a | 2010-05-07 17:06:35 -0400 | [diff] [blame] | 223 | __raw_writel(tgcr, base[i] + TGCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 224 | |
| 225 | /* Unreset timers */ |
| 226 | tgcr |= (TGCR_UNRESET << TGCR_TIM12RS_SHIFT) | |
| 227 | (TGCR_UNRESET << TGCR_TIM34RS_SHIFT); |
Cyril Chemparathy | 1bcd38a | 2010-05-07 17:06:35 -0400 | [diff] [blame] | 228 | __raw_writel(tgcr, base[i] + TGCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 229 | |
| 230 | /* Init both counters to zero */ |
Cyril Chemparathy | 1bcd38a | 2010-05-07 17:06:35 -0400 | [diff] [blame] | 231 | __raw_writel(0, base[i] + TIM12); |
| 232 | __raw_writel(0, base[i] + TIM34); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 233 | } |
| 234 | |
| 235 | /* Init of each timer as a 32-bit timer */ |
| 236 | for (i=0; i< ARRAY_SIZE(timers); i++) { |
| 237 | struct timer_s *t = &timers[i]; |
Mark A. Greer | f64691b | 2009-04-15 12:40:11 -0700 | [diff] [blame] | 238 | int timer = ID_TO_TIMER(t->id); |
| 239 | u32 irq; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 240 | |
Cyril Chemparathy | 1bcd38a | 2010-05-07 17:06:35 -0400 | [diff] [blame] | 241 | t->base = base[timer]; |
| 242 | if (!t->base) |
| 243 | continue; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 244 | |
Mark A. Greer | f64691b | 2009-04-15 12:40:11 -0700 | [diff] [blame] | 245 | if (IS_TIMER_BOT(t->id)) { |
| 246 | t->enamode_shift = 6; |
| 247 | t->tim_off = TIM12; |
| 248 | t->prd_off = PRD12; |
| 249 | irq = dtip[timer].bottom_irq; |
| 250 | } else { |
| 251 | t->enamode_shift = 22; |
| 252 | t->tim_off = TIM34; |
| 253 | t->prd_off = PRD34; |
| 254 | irq = dtip[timer].top_irq; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 255 | } |
Mark A. Greer | f64691b | 2009-04-15 12:40:11 -0700 | [diff] [blame] | 256 | |
| 257 | /* Register interrupt */ |
| 258 | t->irqaction.name = t->name; |
| 259 | t->irqaction.dev_id = (void *)t; |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 260 | |
| 261 | if (t->irqaction.handler != NULL) { |
| 262 | irq = USING_COMPARE(t) ? dtip[i].cmp_irq : irq; |
Mark A. Greer | f64691b | 2009-04-15 12:40:11 -0700 | [diff] [blame] | 263 | setup_irq(irq, &t->irqaction); |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 264 | } |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 265 | } |
| 266 | } |
| 267 | |
| 268 | /* |
| 269 | * clocksource |
| 270 | */ |
Magnus Damm | 8e19608 | 2009-04-21 12:24:00 -0700 | [diff] [blame] | 271 | static cycle_t read_cycles(struct clocksource *cs) |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 272 | { |
| 273 | struct timer_s *t = &timers[TID_CLOCKSOURCE]; |
| 274 | |
| 275 | return (cycles_t)timer32_read(t); |
| 276 | } |
| 277 | |
| 278 | static struct clocksource clocksource_davinci = { |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 279 | .rating = 300, |
Marc Zyngier | 30c9c5b | 2012-01-16 11:44:12 +0000 | [diff] [blame] | 280 | .read = read_cycles, |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 281 | .mask = CLOCKSOURCE_MASK(32), |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 282 | .flags = CLOCK_SOURCE_IS_CONTINUOUS, |
| 283 | }; |
| 284 | |
| 285 | /* |
Andreas Gaeer | 6d1c57c | 2010-10-06 10:38:55 +0200 | [diff] [blame] | 286 | * Overwrite weak default sched_clock with something more precise |
| 287 | */ |
Marc Zyngier | 30c9c5b | 2012-01-16 11:44:12 +0000 | [diff] [blame] | 288 | static u32 notrace davinci_read_sched_clock(void) |
Andreas Gaeer | 6d1c57c | 2010-10-06 10:38:55 +0200 | [diff] [blame] | 289 | { |
Marc Zyngier | 30c9c5b | 2012-01-16 11:44:12 +0000 | [diff] [blame] | 290 | return timer32_read(&timers[TID_CLOCKSOURCE]); |
Andreas Gaeer | 6d1c57c | 2010-10-06 10:38:55 +0200 | [diff] [blame] | 291 | } |
| 292 | |
| 293 | /* |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 294 | * clockevent |
| 295 | */ |
| 296 | static int davinci_set_next_event(unsigned long cycles, |
| 297 | struct clock_event_device *evt) |
| 298 | { |
| 299 | struct timer_s *t = &timers[TID_CLOCKEVENT]; |
| 300 | |
| 301 | t->period = cycles; |
| 302 | timer32_config(t); |
| 303 | return 0; |
| 304 | } |
| 305 | |
| 306 | static void davinci_set_mode(enum clock_event_mode mode, |
| 307 | struct clock_event_device *evt) |
| 308 | { |
| 309 | struct timer_s *t = &timers[TID_CLOCKEVENT]; |
| 310 | |
| 311 | switch (mode) { |
| 312 | case CLOCK_EVT_MODE_PERIODIC: |
Kevin Hilman | e609900 | 2009-04-14 07:06:37 -0500 | [diff] [blame] | 313 | t->period = davinci_clock_tick_rate / (HZ); |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 314 | t->opts &= ~TIMER_OPTS_STATE_MASK; |
| 315 | t->opts |= TIMER_OPTS_PERIODIC; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 316 | timer32_config(t); |
| 317 | break; |
| 318 | case CLOCK_EVT_MODE_ONESHOT: |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 319 | t->opts &= ~TIMER_OPTS_STATE_MASK; |
| 320 | t->opts |= TIMER_OPTS_ONESHOT; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 321 | break; |
| 322 | case CLOCK_EVT_MODE_UNUSED: |
| 323 | case CLOCK_EVT_MODE_SHUTDOWN: |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 324 | t->opts &= ~TIMER_OPTS_STATE_MASK; |
| 325 | t->opts |= TIMER_OPTS_DISABLED; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 326 | break; |
Thomas Gleixner | 18de5bc | 2007-07-21 04:37:34 -0700 | [diff] [blame] | 327 | case CLOCK_EVT_MODE_RESUME: |
| 328 | break; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 329 | } |
| 330 | } |
| 331 | |
| 332 | static struct clock_event_device clockevent_davinci = { |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 333 | .features = CLOCK_EVT_FEAT_PERIODIC | CLOCK_EVT_FEAT_ONESHOT, |
| 334 | .shift = 32, |
| 335 | .set_next_event = davinci_set_next_event, |
| 336 | .set_mode = davinci_set_mode, |
| 337 | }; |
| 338 | |
| 339 | |
Stephen Warren | 6bb27d7 | 2012-11-08 12:40:59 -0700 | [diff] [blame] | 340 | void __init davinci_timer_init(void) |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 341 | { |
Kevin Hilman | e609900 | 2009-04-14 07:06:37 -0500 | [diff] [blame] | 342 | struct clk *timer_clk; |
Mark A. Greer | f64691b | 2009-04-15 12:40:11 -0700 | [diff] [blame] | 343 | struct davinci_soc_info *soc_info = &davinci_soc_info; |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 344 | unsigned int clockevent_id; |
| 345 | unsigned int clocksource_id; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 346 | static char err[] __initdata = KERN_ERR |
| 347 | "%s: can't register clocksource!\n"; |
Kevin Hilman | d99c387 | 2010-03-11 14:57:35 -0800 | [diff] [blame] | 348 | int i; |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 349 | |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 350 | clockevent_id = soc_info->timer_info->clockevent_id; |
| 351 | clocksource_id = soc_info->timer_info->clocksource_id; |
| 352 | |
| 353 | timers[TID_CLOCKEVENT].id = clockevent_id; |
| 354 | timers[TID_CLOCKSOURCE].id = clocksource_id; |
| 355 | |
| 356 | /* |
| 357 | * If using same timer for both clock events & clocksource, |
| 358 | * a compare register must be used to generate an event interrupt. |
| 359 | * This is equivalent to a oneshot timer only (not periodic). |
| 360 | */ |
| 361 | if (clockevent_id == clocksource_id) { |
| 362 | struct davinci_timer_instance *dtip = |
| 363 | soc_info->timer_info->timers; |
| 364 | int event_timer = ID_TO_TIMER(clockevent_id); |
| 365 | |
| 366 | /* Only bottom timers can use compare regs */ |
| 367 | if (IS_TIMER_TOP(clockevent_id)) |
| 368 | pr_warning("davinci_timer_init: Invalid use" |
| 369 | " of system timers. Results unpredictable.\n"); |
| 370 | else if ((dtip[event_timer].cmp_off == 0) |
| 371 | || (dtip[event_timer].cmp_irq == 0)) |
| 372 | pr_warning("davinci_timer_init: Invalid timer instance" |
| 373 | " setup. Results unpredictable.\n"); |
| 374 | else { |
| 375 | timers[TID_CLOCKEVENT].opts |= TIMER_OPTS_USE_COMPARE; |
| 376 | clockevent_davinci.features = CLOCK_EVT_FEAT_ONESHOT; |
| 377 | } |
| 378 | } |
Mark A. Greer | f64691b | 2009-04-15 12:40:11 -0700 | [diff] [blame] | 379 | |
Kevin Hilman | e609900 | 2009-04-14 07:06:37 -0500 | [diff] [blame] | 380 | timer_clk = clk_get(NULL, "timer0"); |
| 381 | BUG_ON(IS_ERR(timer_clk)); |
m-karicheri2@ti.com | b6f1ffe | 2012-08-02 16:53:48 +0000 | [diff] [blame] | 382 | clk_prepare_enable(timer_clk); |
Kevin Hilman | e609900 | 2009-04-14 07:06:37 -0500 | [diff] [blame] | 383 | |
Cyril Chemparathy | 8ca2e59 | 2010-03-25 17:43:45 -0400 | [diff] [blame] | 384 | /* init timer hw */ |
| 385 | timer_init(); |
| 386 | |
Kevin Hilman | e609900 | 2009-04-14 07:06:37 -0500 | [diff] [blame] | 387 | davinci_clock_tick_rate = clk_get_rate(timer_clk); |
| 388 | |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 389 | /* setup clocksource */ |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 390 | clocksource_davinci.name = id_to_name[clocksource_id]; |
Russell King | 7c044be | 2010-12-13 13:17:12 +0000 | [diff] [blame] | 391 | if (clocksource_register_hz(&clocksource_davinci, |
| 392 | davinci_clock_tick_rate)) |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 393 | printk(err, clocksource_davinci.name); |
| 394 | |
Marc Zyngier | 30c9c5b | 2012-01-16 11:44:12 +0000 | [diff] [blame] | 395 | setup_sched_clock(davinci_read_sched_clock, 32, |
| 396 | davinci_clock_tick_rate); |
| 397 | |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 398 | /* setup clockevent */ |
Mark A. Greer | f64691b | 2009-04-15 12:40:11 -0700 | [diff] [blame] | 399 | clockevent_davinci.name = id_to_name[timers[TID_CLOCKEVENT].id]; |
Kevin Hilman | e609900 | 2009-04-14 07:06:37 -0500 | [diff] [blame] | 400 | clockevent_davinci.mult = div_sc(davinci_clock_tick_rate, NSEC_PER_SEC, |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 401 | clockevent_davinci.shift); |
| 402 | clockevent_davinci.max_delta_ns = |
| 403 | clockevent_delta2ns(0xfffffffe, &clockevent_davinci); |
Mark A. Greer | 3abd5ac | 2009-04-15 12:41:54 -0700 | [diff] [blame] | 404 | clockevent_davinci.min_delta_ns = 50000; /* 50 usec */ |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 405 | |
Rusty Russell | 320ab2b | 2008-12-13 21:20:26 +1030 | [diff] [blame] | 406 | clockevent_davinci.cpumask = cpumask_of(0); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 407 | clockevents_register_device(&clockevent_davinci); |
Kevin Hilman | d99c387 | 2010-03-11 14:57:35 -0800 | [diff] [blame] | 408 | |
| 409 | for (i=0; i< ARRAY_SIZE(timers); i++) |
| 410 | timer32_config(&timers[i]); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 411 | } |
| 412 | |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 413 | /* reset board using watchdog timer */ |
Cyril Chemparathy | c78a5bc | 2010-05-01 18:38:28 -0400 | [diff] [blame] | 414 | void davinci_watchdog_reset(struct platform_device *pdev) |
Kevin Hilman | fb63138 | 2009-04-29 16:23:59 -0700 | [diff] [blame] | 415 | { |
Kevin Hilman | f5c122d | 2009-04-14 07:04:16 -0500 | [diff] [blame] | 416 | u32 tgcr, wdtcr; |
Cyril Chemparathy | c78a5bc | 2010-05-01 18:38:28 -0400 | [diff] [blame] | 417 | void __iomem *base; |
Kevin Hilman | e609900 | 2009-04-14 07:06:37 -0500 | [diff] [blame] | 418 | struct clk *wd_clk; |
Kevin Hilman | e609900 | 2009-04-14 07:06:37 -0500 | [diff] [blame] | 419 | |
Cyril Chemparathy | c78a5bc | 2010-05-01 18:38:28 -0400 | [diff] [blame] | 420 | base = ioremap(pdev->resource[0].start, SZ_4K); |
| 421 | if (WARN_ON(!base)) |
| 422 | return; |
| 423 | |
Kevin Hilman | 5fcd294 | 2009-06-03 12:24:50 -0700 | [diff] [blame] | 424 | wd_clk = clk_get(&pdev->dev, NULL); |
Kevin Hilman | e609900 | 2009-04-14 07:06:37 -0500 | [diff] [blame] | 425 | if (WARN_ON(IS_ERR(wd_clk))) |
| 426 | return; |
m-karicheri2@ti.com | b6f1ffe | 2012-08-02 16:53:48 +0000 | [diff] [blame] | 427 | clk_prepare_enable(wd_clk); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 428 | |
| 429 | /* disable, internal clock source */ |
Kevin Hilman | f5c122d | 2009-04-14 07:04:16 -0500 | [diff] [blame] | 430 | __raw_writel(0, base + TCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 431 | |
| 432 | /* reset timer, set mode to 64-bit watchdog, and unreset */ |
| 433 | tgcr = 0; |
David Griego | a23f7dc | 2009-06-01 11:41:54 -0700 | [diff] [blame] | 434 | __raw_writel(tgcr, base + TGCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 435 | tgcr = TGCR_TIMMODE_64BIT_WDOG << TGCR_TIMMODE_SHIFT; |
| 436 | tgcr |= (TGCR_UNRESET << TGCR_TIM12RS_SHIFT) | |
| 437 | (TGCR_UNRESET << TGCR_TIM34RS_SHIFT); |
David Griego | a23f7dc | 2009-06-01 11:41:54 -0700 | [diff] [blame] | 438 | __raw_writel(tgcr, base + TGCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 439 | |
| 440 | /* clear counter and period regs */ |
Kevin Hilman | f5c122d | 2009-04-14 07:04:16 -0500 | [diff] [blame] | 441 | __raw_writel(0, base + TIM12); |
| 442 | __raw_writel(0, base + TIM34); |
| 443 | __raw_writel(0, base + PRD12); |
| 444 | __raw_writel(0, base + PRD34); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 445 | |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 446 | /* put watchdog in pre-active state */ |
David Griego | a23f7dc | 2009-06-01 11:41:54 -0700 | [diff] [blame] | 447 | wdtcr = __raw_readl(base + WDTCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 448 | wdtcr = (WDTCR_WDKEY_SEQ0 << WDTCR_WDKEY_SHIFT) | |
| 449 | (WDTCR_WDEN_ENABLE << WDTCR_WDEN_SHIFT); |
Kevin Hilman | f5c122d | 2009-04-14 07:04:16 -0500 | [diff] [blame] | 450 | __raw_writel(wdtcr, base + WDTCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 451 | |
| 452 | /* put watchdog in active state */ |
| 453 | wdtcr = (WDTCR_WDKEY_SEQ1 << WDTCR_WDKEY_SHIFT) | |
| 454 | (WDTCR_WDEN_ENABLE << WDTCR_WDEN_SHIFT); |
Kevin Hilman | f5c122d | 2009-04-14 07:04:16 -0500 | [diff] [blame] | 455 | __raw_writel(wdtcr, base + WDTCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 456 | |
| 457 | /* write an invalid value to the WDKEY field to trigger |
| 458 | * a watchdog reset */ |
| 459 | wdtcr = 0x00004000; |
Kevin Hilman | f5c122d | 2009-04-14 07:04:16 -0500 | [diff] [blame] | 460 | __raw_writel(wdtcr, base + WDTCR); |
Kevin Hilman | 7c6337e | 2007-04-30 19:37:19 +0100 | [diff] [blame] | 461 | } |