| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 1 | /* | 
 | 2 |  * OMAP3 Power Management Routines | 
 | 3 |  * | 
 | 4 |  * Copyright (C) 2006-2008 Nokia Corporation | 
 | 5 |  * Tony Lindgren <tony@atomide.com> | 
 | 6 |  * Jouni Hogander | 
 | 7 |  * | 
| Rajendra Nayak | 2f5939c | 2008-09-26 17:50:07 +0530 | [diff] [blame] | 8 |  * Copyright (C) 2007 Texas Instruments, Inc. | 
 | 9 |  * Rajendra Nayak <rnayak@ti.com> | 
 | 10 |  * | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 11 |  * Copyright (C) 2005 Texas Instruments, Inc. | 
 | 12 |  * Richard Woodruff <r-woodruff2@ti.com> | 
 | 13 |  * | 
 | 14 |  * Based on pm.c for omap1 | 
 | 15 |  * | 
 | 16 |  * This program is free software; you can redistribute it and/or modify | 
 | 17 |  * it under the terms of the GNU General Public License version 2 as | 
 | 18 |  * published by the Free Software Foundation. | 
 | 19 |  */ | 
 | 20 |  | 
 | 21 | #include <linux/pm.h> | 
 | 22 | #include <linux/suspend.h> | 
 | 23 | #include <linux/interrupt.h> | 
 | 24 | #include <linux/module.h> | 
 | 25 | #include <linux/list.h> | 
 | 26 | #include <linux/err.h> | 
 | 27 | #include <linux/gpio.h> | 
| Kevin Hilman | c40552b | 2009-10-06 14:25:09 -0700 | [diff] [blame] | 28 | #include <linux/clk.h> | 
| Tero Kristo | dccaad8 | 2009-11-17 18:34:53 +0200 | [diff] [blame] | 29 | #include <linux/delay.h> | 
| Tejun Heo | 5a0e3ad | 2010-03-24 17:04:11 +0900 | [diff] [blame] | 30 | #include <linux/slab.h> | 
| Tony Lindgren | 45c3eb7 | 2012-11-30 08:41:50 -0800 | [diff] [blame] | 31 | #include <linux/omap-dma.h> | 
| Tony Lindgren | 4b25408 | 2012-08-30 15:37:24 -0700 | [diff] [blame] | 32 | #include <linux/platform_data/gpio-omap.h> | 
 | 33 |  | 
| Jean Pihet | 5e7c58d | 2011-03-03 11:25:43 +0100 | [diff] [blame] | 34 | #include <trace/events/power.h> | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 35 |  | 
| Tony Lindgren | bf027ca | 2012-10-29 13:54:06 -0700 | [diff] [blame] | 36 | #include <asm/fncpy.h> | 
| Russell King | 2c74a0c | 2011-06-22 17:41:48 +0100 | [diff] [blame] | 37 | #include <asm/suspend.h> | 
| David Howells | 9f97da7 | 2012-03-28 18:30:01 +0100 | [diff] [blame] | 38 | #include <asm/system_misc.h> | 
| Russell King | 2c74a0c | 2011-06-22 17:41:48 +0100 | [diff] [blame] | 39 |  | 
| Paul Walmsley | 1540f214 | 2010-12-21 21:05:15 -0700 | [diff] [blame] | 40 | #include "clockdomain.h" | 
| Paul Walmsley | 72e06d0 | 2010-12-21 21:05:16 -0700 | [diff] [blame] | 41 | #include "powerdomain.h" | 
| Tony Lindgren | e4c060d | 2012-10-05 13:25:59 -0700 | [diff] [blame] | 42 | #include "soc.h" | 
| Tony Lindgren | 4e65331 | 2011-11-10 22:45:17 +0100 | [diff] [blame] | 43 | #include "common.h" | 
| Paul Walmsley | ff4ae5d | 2012-10-21 01:01:11 -0600 | [diff] [blame] | 44 | #include "cm3xxx.h" | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 45 | #include "cm-regbits-34xx.h" | 
| Tony Lindgren | 99f0b8d | 2012-10-17 11:07:18 -0700 | [diff] [blame] | 46 | #include "gpmc.h" | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 47 | #include "prm-regbits-34xx.h" | 
| Paul Walmsley | 139563a | 2012-10-21 01:01:10 -0600 | [diff] [blame] | 48 | #include "prm3xxx.h" | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 49 | #include "pm.h" | 
| Tero Kristo | 13a6fe0f | 2008-10-13 13:17:06 +0300 | [diff] [blame] | 50 | #include "sdrc.h" | 
| Tony Lindgren | bf027ca | 2012-10-29 13:54:06 -0700 | [diff] [blame] | 51 | #include "sram.h" | 
| Paul Walmsley | 4814ced | 2010-10-08 11:40:20 -0600 | [diff] [blame] | 52 | #include "control.h" | 
| Tero Kristo | 13a6fe0f | 2008-10-13 13:17:06 +0300 | [diff] [blame] | 53 |  | 
| Nishanth Menon | 8cdfd83 | 2010-12-20 14:05:05 -0600 | [diff] [blame] | 54 | /* pm34xx errata defined in pm.h */ | 
 | 55 | u16 pm34xx_errata; | 
 | 56 |  | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 57 | struct power_state { | 
 | 58 | 	struct powerdomain *pwrdm; | 
 | 59 | 	u32 next_state; | 
| Kevin Hilman | 10f90ed | 2009-06-24 11:39:18 -0700 | [diff] [blame] | 60 | #ifdef CONFIG_SUSPEND | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 61 | 	u32 saved_state; | 
| Kevin Hilman | 10f90ed | 2009-06-24 11:39:18 -0700 | [diff] [blame] | 62 | #endif | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 63 | 	struct list_head node; | 
 | 64 | }; | 
 | 65 |  | 
 | 66 | static LIST_HEAD(pwrst_list); | 
 | 67 |  | 
| Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 68 | static int (*_omap_save_secure_sram)(u32 *addr); | 
| Jean Pihet | 46e130d | 2011-06-29 18:40:23 +0200 | [diff] [blame] | 69 | void (*omap3_do_wfi_sram)(void); | 
| Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 70 |  | 
| Rajendra Nayak | fa3c2a4 | 2008-09-26 17:49:22 +0530 | [diff] [blame] | 71 | static struct powerdomain *mpu_pwrdm, *neon_pwrdm; | 
 | 72 | static struct powerdomain *core_pwrdm, *per_pwrdm; | 
| Kalle Jokiniemi | 3a7ec26 | 2009-03-26 15:59:01 +0200 | [diff] [blame] | 73 |  | 
| Rajendra Nayak | 2f5939c | 2008-09-26 17:50:07 +0530 | [diff] [blame] | 74 | static void omap3_core_save_context(void) | 
 | 75 | { | 
| Paul Walmsley | 596efe4 | 2010-12-21 21:05:16 -0700 | [diff] [blame] | 76 | 	omap3_ctrl_save_padconf(); | 
| Tero Kristo | dccaad8 | 2009-11-17 18:34:53 +0200 | [diff] [blame] | 77 |  | 
 | 78 | 	/* | 
 | 79 | 	 * Force write last pad into memory, as this can fail in some | 
| Jean Pihet | 8352129 | 2010-12-18 16:44:46 +0100 | [diff] [blame] | 80 | 	 * cases according to errata 1.157, 1.185 | 
| Tero Kristo | dccaad8 | 2009-11-17 18:34:53 +0200 | [diff] [blame] | 81 | 	 */ | 
 | 82 | 	omap_ctrl_writel(omap_ctrl_readl(OMAP343X_PADCONF_ETK_D14), | 
 | 83 | 		OMAP343X_CONTROL_MEM_WKUP + 0x2a0); | 
 | 84 |  | 
| Rajendra Nayak | 2f5939c | 2008-09-26 17:50:07 +0530 | [diff] [blame] | 85 | 	/* Save the Interrupt controller context */ | 
 | 86 | 	omap_intc_save_context(); | 
 | 87 | 	/* Save the GPMC context */ | 
 | 88 | 	omap3_gpmc_save_context(); | 
 | 89 | 	/* Save the system control module context, padconf already save above*/ | 
 | 90 | 	omap3_control_save_context(); | 
| Tero Kristo | f2d1185 | 2008-08-28 13:13:31 +0000 | [diff] [blame] | 91 | 	omap_dma_global_context_save(); | 
| Rajendra Nayak | 2f5939c | 2008-09-26 17:50:07 +0530 | [diff] [blame] | 92 | } | 
 | 93 |  | 
 | 94 | static void omap3_core_restore_context(void) | 
 | 95 | { | 
 | 96 | 	/* Restore the control module context, padconf restored by h/w */ | 
 | 97 | 	omap3_control_restore_context(); | 
 | 98 | 	/* Restore the GPMC context */ | 
 | 99 | 	omap3_gpmc_restore_context(); | 
 | 100 | 	/* Restore the interrupt controller context */ | 
 | 101 | 	omap_intc_restore_context(); | 
| Tero Kristo | f2d1185 | 2008-08-28 13:13:31 +0000 | [diff] [blame] | 102 | 	omap_dma_global_context_restore(); | 
| Rajendra Nayak | 2f5939c | 2008-09-26 17:50:07 +0530 | [diff] [blame] | 103 | } | 
 | 104 |  | 
| Tero Kristo | 9d97140 | 2008-12-12 11:20:05 +0200 | [diff] [blame] | 105 | /* | 
 | 106 |  * FIXME: This function should be called before entering off-mode after | 
 | 107 |  * OMAP3 secure services have been accessed. Currently it is only called | 
 | 108 |  * once during boot sequence, but this works as we are not using secure | 
 | 109 |  * services. | 
 | 110 |  */ | 
| Kevin Hilman | 617fcc9 | 2011-01-25 16:40:01 -0800 | [diff] [blame] | 111 | static void omap3_save_secure_ram_context(void) | 
| Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 112 | { | 
 | 113 | 	u32 ret; | 
| Kevin Hilman | 617fcc9 | 2011-01-25 16:40:01 -0800 | [diff] [blame] | 114 | 	int mpu_next_state = pwrdm_read_next_pwrst(mpu_pwrdm); | 
| Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 115 |  | 
 | 116 | 	if (omap_type() != OMAP2_DEVICE_TYPE_GP) { | 
| Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 117 | 		/* | 
 | 118 | 		 * MPU next state must be set to POWER_ON temporarily, | 
 | 119 | 		 * otherwise the WFI executed inside the ROM code | 
 | 120 | 		 * will hang the system. | 
 | 121 | 		 */ | 
 | 122 | 		pwrdm_set_next_pwrst(mpu_pwrdm, PWRDM_POWER_ON); | 
 | 123 | 		ret = _omap_save_secure_sram((u32 *) | 
 | 124 | 				__pa(omap3_secure_ram_storage)); | 
| Kevin Hilman | 617fcc9 | 2011-01-25 16:40:01 -0800 | [diff] [blame] | 125 | 		pwrdm_set_next_pwrst(mpu_pwrdm, mpu_next_state); | 
| Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 126 | 		/* Following is for error tracking, it should not happen */ | 
 | 127 | 		if (ret) { | 
| Mark A. Greer | 9817985 | 2012-03-17 18:22:48 -0700 | [diff] [blame] | 128 | 			pr_err("save_secure_sram() returns %08x\n", ret); | 
| Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 129 | 			while (1) | 
 | 130 | 				; | 
 | 131 | 		} | 
 | 132 | 	} | 
 | 133 | } | 
 | 134 |  | 
| Jon Hunter | 77da2d9 | 2009-06-27 00:07:25 -0500 | [diff] [blame] | 135 | /* | 
 | 136 |  * PRCM Interrupt Handler Helper Function | 
 | 137 |  * | 
 | 138 |  * The purpose of this function is to clear any wake-up events latched | 
 | 139 |  * in the PRCM PM_WKST_x registers. It is possible that a wake-up event | 
 | 140 |  * may occur whilst attempting to clear a PM_WKST_x register and thus | 
 | 141 |  * set another bit in this register. A while loop is used to ensure | 
 | 142 |  * that any peripheral wake-up events occurring while attempting to | 
 | 143 |  * clear the PM_WKST_x are detected and cleared. | 
 | 144 |  */ | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 145 | static int prcm_clear_mod_irqs(s16 module, u8 regs, u32 ignore_bits) | 
| Jon Hunter | 77da2d9 | 2009-06-27 00:07:25 -0500 | [diff] [blame] | 146 | { | 
| Vikram Pandita | 71a8077 | 2009-07-17 19:33:09 -0500 | [diff] [blame] | 147 | 	u32 wkst, fclk, iclk, clken; | 
| Jon Hunter | 77da2d9 | 2009-06-27 00:07:25 -0500 | [diff] [blame] | 148 | 	u16 wkst_off = (regs == 3) ? OMAP3430ES2_PM_WKST3 : PM_WKST1; | 
 | 149 | 	u16 fclk_off = (regs == 3) ? OMAP3430ES2_CM_FCLKEN3 : CM_FCLKEN1; | 
 | 150 | 	u16 iclk_off = (regs == 3) ? CM_ICLKEN3 : CM_ICLKEN1; | 
| Paul Walmsley | 5d80597 | 2009-07-22 10:18:07 -0700 | [diff] [blame] | 151 | 	u16 grpsel_off = (regs == 3) ? | 
 | 152 | 		OMAP3430ES2_PM_MPUGRPSEL3 : OMAP3430_PM_MPUGRPSEL; | 
| Paul Walmsley | 8cb0ac9 | 2009-07-22 10:29:02 -0700 | [diff] [blame] | 153 | 	int c = 0; | 
| Jon Hunter | 77da2d9 | 2009-06-27 00:07:25 -0500 | [diff] [blame] | 154 |  | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 155 | 	wkst = omap2_prm_read_mod_reg(module, wkst_off); | 
 | 156 | 	wkst &= omap2_prm_read_mod_reg(module, grpsel_off); | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 157 | 	wkst &= ~ignore_bits; | 
| Jon Hunter | 77da2d9 | 2009-06-27 00:07:25 -0500 | [diff] [blame] | 158 | 	if (wkst) { | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 159 | 		iclk = omap2_cm_read_mod_reg(module, iclk_off); | 
 | 160 | 		fclk = omap2_cm_read_mod_reg(module, fclk_off); | 
| Jon Hunter | 77da2d9 | 2009-06-27 00:07:25 -0500 | [diff] [blame] | 161 | 		while (wkst) { | 
| Vikram Pandita | 71a8077 | 2009-07-17 19:33:09 -0500 | [diff] [blame] | 162 | 			clken = wkst; | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 163 | 			omap2_cm_set_mod_reg_bits(clken, module, iclk_off); | 
| Vikram Pandita | 71a8077 | 2009-07-17 19:33:09 -0500 | [diff] [blame] | 164 | 			/* | 
 | 165 | 			 * For USBHOST, we don't know whether HOST1 or | 
 | 166 | 			 * HOST2 woke us up, so enable both f-clocks | 
 | 167 | 			 */ | 
 | 168 | 			if (module == OMAP3430ES2_USBHOST_MOD) | 
 | 169 | 				clken |= 1 << OMAP3430ES2_EN_USBHOST2_SHIFT; | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 170 | 			omap2_cm_set_mod_reg_bits(clken, module, fclk_off); | 
 | 171 | 			omap2_prm_write_mod_reg(wkst, module, wkst_off); | 
 | 172 | 			wkst = omap2_prm_read_mod_reg(module, wkst_off); | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 173 | 			wkst &= ~ignore_bits; | 
| Paul Walmsley | 8cb0ac9 | 2009-07-22 10:29:02 -0700 | [diff] [blame] | 174 | 			c++; | 
| Jon Hunter | 77da2d9 | 2009-06-27 00:07:25 -0500 | [diff] [blame] | 175 | 		} | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 176 | 		omap2_cm_write_mod_reg(iclk, module, iclk_off); | 
 | 177 | 		omap2_cm_write_mod_reg(fclk, module, fclk_off); | 
| Jon Hunter | 77da2d9 | 2009-06-27 00:07:25 -0500 | [diff] [blame] | 178 | 	} | 
| Paul Walmsley | 8cb0ac9 | 2009-07-22 10:29:02 -0700 | [diff] [blame] | 179 |  | 
 | 180 | 	return c; | 
 | 181 | } | 
 | 182 |  | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 183 | static irqreturn_t _prcm_int_handle_io(int irq, void *unused) | 
| Paul Walmsley | 8cb0ac9 | 2009-07-22 10:29:02 -0700 | [diff] [blame] | 184 | { | 
 | 185 | 	int c; | 
 | 186 |  | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 187 | 	c = prcm_clear_mod_irqs(WKUP_MOD, 1, | 
 | 188 | 		~(OMAP3430_ST_IO_MASK | OMAP3430_ST_IO_CHAIN_MASK)); | 
| Paul Walmsley | 8cb0ac9 | 2009-07-22 10:29:02 -0700 | [diff] [blame] | 189 |  | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 190 | 	return c ? IRQ_HANDLED : IRQ_NONE; | 
| Jon Hunter | 77da2d9 | 2009-06-27 00:07:25 -0500 | [diff] [blame] | 191 | } | 
 | 192 |  | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 193 | static irqreturn_t _prcm_int_handle_wakeup(int irq, void *unused) | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 194 | { | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 195 | 	int c; | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 196 |  | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 197 | 	/* | 
 | 198 | 	 * Clear all except ST_IO and ST_IO_CHAIN for wkup module, | 
 | 199 | 	 * these are handled in a separate handler to avoid acking | 
 | 200 | 	 * IO events before parsing in mux code | 
 | 201 | 	 */ | 
 | 202 | 	c = prcm_clear_mod_irqs(WKUP_MOD, 1, | 
 | 203 | 		OMAP3430_ST_IO_MASK | OMAP3430_ST_IO_CHAIN_MASK); | 
 | 204 | 	c += prcm_clear_mod_irqs(CORE_MOD, 1, 0); | 
 | 205 | 	c += prcm_clear_mod_irqs(OMAP3430_PER_MOD, 1, 0); | 
 | 206 | 	if (omap_rev() > OMAP3430_REV_ES1_0) { | 
 | 207 | 		c += prcm_clear_mod_irqs(CORE_MOD, 3, 0); | 
 | 208 | 		c += prcm_clear_mod_irqs(OMAP3430ES2_USBHOST_MOD, 1, 0); | 
 | 209 | 	} | 
| Paul Walmsley | 8cb0ac9 | 2009-07-22 10:29:02 -0700 | [diff] [blame] | 210 |  | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 211 | 	return c ? IRQ_HANDLED : IRQ_NONE; | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 212 | } | 
 | 213 |  | 
| Russell King | cbe2634 | 2011-06-30 08:45:49 +0100 | [diff] [blame] | 214 | static void omap34xx_save_context(u32 *save) | 
 | 215 | { | 
 | 216 | 	u32 val; | 
 | 217 |  | 
 | 218 | 	/* Read Auxiliary Control Register */ | 
 | 219 | 	asm("mrc p15, 0, %0, c1, c0, 1" : "=r" (val)); | 
 | 220 | 	*save++ = 1; | 
 | 221 | 	*save++ = val; | 
 | 222 |  | 
 | 223 | 	/* Read L2 AUX ctrl register */ | 
 | 224 | 	asm("mrc p15, 1, %0, c9, c0, 2" : "=r" (val)); | 
 | 225 | 	*save++ = 1; | 
 | 226 | 	*save++ = val; | 
 | 227 | } | 
 | 228 |  | 
| Russell King | 29cb3cd | 2011-07-02 09:54:01 +0100 | [diff] [blame] | 229 | static int omap34xx_do_sram_idle(unsigned long save_state) | 
| Rajendra Nayak | 57f277b | 2008-09-26 17:49:34 +0530 | [diff] [blame] | 230 | { | 
| Russell King | cbe2634 | 2011-06-30 08:45:49 +0100 | [diff] [blame] | 231 | 	omap34xx_cpu_suspend(save_state); | 
| Russell King | 29cb3cd | 2011-07-02 09:54:01 +0100 | [diff] [blame] | 232 | 	return 0; | 
| Rajendra Nayak | 57f277b | 2008-09-26 17:49:34 +0530 | [diff] [blame] | 233 | } | 
 | 234 |  | 
| Rajendra Nayak | 99e6a4d | 2008-10-08 17:30:58 +0530 | [diff] [blame] | 235 | void omap_sram_idle(void) | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 236 | { | 
 | 237 | 	/* Variable to tell what needs to be saved and restored | 
 | 238 | 	 * in omap_sram_idle*/ | 
 | 239 | 	/* save_state = 0 => Nothing to save and restored */ | 
 | 240 | 	/* save_state = 1 => Only L1 and logic lost */ | 
 | 241 | 	/* save_state = 2 => Only L2 lost */ | 
 | 242 | 	/* save_state = 3 => L1, L2 and logic lost */ | 
| Rajendra Nayak | fa3c2a4 | 2008-09-26 17:49:22 +0530 | [diff] [blame] | 243 | 	int save_state = 0; | 
 | 244 | 	int mpu_next_state = PWRDM_POWER_ON; | 
 | 245 | 	int per_next_state = PWRDM_POWER_ON; | 
 | 246 | 	int core_next_state = PWRDM_POWER_ON; | 
| Paul Walmsley | 72e06d0 | 2010-12-21 21:05:16 -0700 | [diff] [blame] | 247 | 	int per_going_off; | 
| Paul Walmsley | eeb3711 | 2012-04-13 06:34:32 -0600 | [diff] [blame] | 248 | 	int core_prev_state; | 
| Tero Kristo | 13a6fe0f | 2008-10-13 13:17:06 +0300 | [diff] [blame] | 249 | 	u32 sdrc_pwr = 0; | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 250 |  | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 251 | 	mpu_next_state = pwrdm_read_next_pwrst(mpu_pwrdm); | 
 | 252 | 	switch (mpu_next_state) { | 
| Rajendra Nayak | fa3c2a4 | 2008-09-26 17:49:22 +0530 | [diff] [blame] | 253 | 	case PWRDM_POWER_ON: | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 254 | 	case PWRDM_POWER_RET: | 
 | 255 | 		/* No need to save context */ | 
 | 256 | 		save_state = 0; | 
 | 257 | 		break; | 
| Rajendra Nayak | 61255ab | 2008-09-26 17:49:56 +0530 | [diff] [blame] | 258 | 	case PWRDM_POWER_OFF: | 
 | 259 | 		save_state = 3; | 
 | 260 | 		break; | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 261 | 	default: | 
 | 262 | 		/* Invalid state */ | 
| Mark A. Greer | 9817985 | 2012-03-17 18:22:48 -0700 | [diff] [blame] | 263 | 		pr_err("Invalid mpu state in sram_idle\n"); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 264 | 		return; | 
 | 265 | 	} | 
| Peter 'p2' De Schrijver | fe617af | 2008-10-15 17:48:44 +0300 | [diff] [blame] | 266 |  | 
| Rajendra Nayak | fa3c2a4 | 2008-09-26 17:49:22 +0530 | [diff] [blame] | 267 | 	/* NEON control */ | 
 | 268 | 	if (pwrdm_read_pwrst(neon_pwrdm) == PWRDM_POWER_ON) | 
| Jouni Hogander | 7139178 | 2008-10-28 10:59:05 +0200 | [diff] [blame] | 269 | 		pwrdm_set_next_pwrst(neon_pwrdm, mpu_next_state); | 
| Rajendra Nayak | fa3c2a4 | 2008-09-26 17:49:22 +0530 | [diff] [blame] | 270 |  | 
| Mike Chan | 40742fa | 2010-05-03 16:04:06 -0700 | [diff] [blame] | 271 | 	/* Enable IO-PAD and IO-CHAIN wakeups */ | 
| Kevin Hilman | 658ce97 | 2008-11-04 20:50:52 -0800 | [diff] [blame] | 272 | 	per_next_state = pwrdm_read_next_pwrst(per_pwrdm); | 
| Tero Kristo | ecf157d | 2008-12-01 13:17:29 +0200 | [diff] [blame] | 273 | 	core_next_state = pwrdm_read_next_pwrst(core_pwrdm); | 
| Mike Chan | 40742fa | 2010-05-03 16:04:06 -0700 | [diff] [blame] | 274 |  | 
| Kevin Hilman | e0e29fd | 2012-08-07 11:28:06 -0700 | [diff] [blame] | 275 | 	pwrdm_pre_transition(NULL); | 
| Charulatha V | ff2f8e5 | 2011-09-13 18:32:37 +0530 | [diff] [blame] | 276 |  | 
| Mike Chan | 40742fa | 2010-05-03 16:04:06 -0700 | [diff] [blame] | 277 | 	/* PER */ | 
| Kevin Hilman | 658ce97 | 2008-11-04 20:50:52 -0800 | [diff] [blame] | 278 | 	if (per_next_state < PWRDM_POWER_ON) { | 
| Paul Walmsley | 72e06d0 | 2010-12-21 21:05:16 -0700 | [diff] [blame] | 279 | 		per_going_off = (per_next_state == PWRDM_POWER_OFF) ? 1 : 0; | 
| Paul Walmsley | 72e06d0 | 2010-12-21 21:05:16 -0700 | [diff] [blame] | 280 | 		omap2_gpio_prepare_for_idle(per_going_off); | 
| Kevin Hilman | 658ce97 | 2008-11-04 20:50:52 -0800 | [diff] [blame] | 281 | 	} | 
 | 282 |  | 
 | 283 | 	/* CORE */ | 
| Rajendra Nayak | fa3c2a4 | 2008-09-26 17:49:22 +0530 | [diff] [blame] | 284 | 	if (core_next_state < PWRDM_POWER_ON) { | 
| Rajendra Nayak | 2f5939c | 2008-09-26 17:50:07 +0530 | [diff] [blame] | 285 | 		if (core_next_state == PWRDM_POWER_OFF) { | 
 | 286 | 			omap3_core_save_context(); | 
| Paul Walmsley | f0611a5 | 2010-12-21 15:30:56 -0700 | [diff] [blame] | 287 | 			omap3_cm_save_context(); | 
| Rajendra Nayak | 2f5939c | 2008-09-26 17:50:07 +0530 | [diff] [blame] | 288 | 		} | 
| Rajendra Nayak | fa3c2a4 | 2008-09-26 17:49:22 +0530 | [diff] [blame] | 289 | 	} | 
| Mike Chan | 40742fa | 2010-05-03 16:04:06 -0700 | [diff] [blame] | 290 |  | 
| Tero Kristo | f18cc2f | 2009-10-23 19:03:50 +0300 | [diff] [blame] | 291 | 	omap3_intc_prepare_idle(); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 292 |  | 
| Rajendra Nayak | 61255ab | 2008-09-26 17:49:56 +0530 | [diff] [blame] | 293 | 	/* | 
| Paul Walmsley | 3047454 | 2011-10-06 13:43:23 -0600 | [diff] [blame] | 294 | 	 * On EMU/HS devices ROM code restores a SRDC value | 
 | 295 | 	 * from scratchpad which has automatic self refresh on timeout | 
 | 296 | 	 * of AUTO_CNT = 1 enabled. This takes care of erratum ID i443. | 
 | 297 | 	 * Hence store/restore the SDRC_POWER register here. | 
 | 298 | 	 */ | 
 | 299 | 	if (cpu_is_omap3430() && omap_rev() >= OMAP3430_REV_ES3_0 && | 
 | 300 | 	    (omap_type() == OMAP2_DEVICE_TYPE_EMU || | 
 | 301 | 	     omap_type() == OMAP2_DEVICE_TYPE_SEC) && | 
| Rajendra Nayak | f265dc4 | 2009-06-09 22:30:41 +0530 | [diff] [blame] | 302 | 	    core_next_state == PWRDM_POWER_OFF) | 
| Tero Kristo | 13a6fe0f | 2008-10-13 13:17:06 +0300 | [diff] [blame] | 303 | 		sdrc_pwr = sdrc_read_reg(SDRC_POWER); | 
| Tero Kristo | 13a6fe0f | 2008-10-13 13:17:06 +0300 | [diff] [blame] | 304 |  | 
 | 305 | 	/* | 
| Russell King | 076f2cc | 2011-06-22 15:42:54 +0100 | [diff] [blame] | 306 | 	 * omap3_arm_context is the location where some ARM context | 
 | 307 | 	 * get saved. The rest is placed on the stack, and restored | 
 | 308 | 	 * from there before resuming. | 
| Rajendra Nayak | 61255ab | 2008-09-26 17:49:56 +0530 | [diff] [blame] | 309 | 	 */ | 
| Russell King | cbe2634 | 2011-06-30 08:45:49 +0100 | [diff] [blame] | 310 | 	if (save_state) | 
 | 311 | 		omap34xx_save_context(omap3_arm_context); | 
| Russell King | 076f2cc | 2011-06-22 15:42:54 +0100 | [diff] [blame] | 312 | 	if (save_state == 1 || save_state == 3) | 
| Russell King | 2c74a0c | 2011-06-22 17:41:48 +0100 | [diff] [blame] | 313 | 		cpu_suspend(save_state, omap34xx_do_sram_idle); | 
| Russell King | 076f2cc | 2011-06-22 15:42:54 +0100 | [diff] [blame] | 314 | 	else | 
 | 315 | 		omap34xx_do_sram_idle(save_state); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 316 |  | 
| Rajendra Nayak | f265dc4 | 2009-06-09 22:30:41 +0530 | [diff] [blame] | 317 | 	/* Restore normal SDRC POWER settings */ | 
| Paul Walmsley | 3047454 | 2011-10-06 13:43:23 -0600 | [diff] [blame] | 318 | 	if (cpu_is_omap3430() && omap_rev() >= OMAP3430_REV_ES3_0 && | 
 | 319 | 	    (omap_type() == OMAP2_DEVICE_TYPE_EMU || | 
 | 320 | 	     omap_type() == OMAP2_DEVICE_TYPE_SEC) && | 
| Tero Kristo | 13a6fe0f | 2008-10-13 13:17:06 +0300 | [diff] [blame] | 321 | 	    core_next_state == PWRDM_POWER_OFF) | 
 | 322 | 		sdrc_write_reg(sdrc_pwr, SDRC_POWER); | 
 | 323 |  | 
| Kevin Hilman | 658ce97 | 2008-11-04 20:50:52 -0800 | [diff] [blame] | 324 | 	/* CORE */ | 
| Rajendra Nayak | fa3c2a4 | 2008-09-26 17:49:22 +0530 | [diff] [blame] | 325 | 	if (core_next_state < PWRDM_POWER_ON) { | 
| Rajendra Nayak | 2f5939c | 2008-09-26 17:50:07 +0530 | [diff] [blame] | 326 | 		core_prev_state = pwrdm_read_prev_pwrst(core_pwrdm); | 
 | 327 | 		if (core_prev_state == PWRDM_POWER_OFF) { | 
 | 328 | 			omap3_core_restore_context(); | 
| Paul Walmsley | f0611a5 | 2010-12-21 15:30:56 -0700 | [diff] [blame] | 329 | 			omap3_cm_restore_context(); | 
| Rajendra Nayak | 2f5939c | 2008-09-26 17:50:07 +0530 | [diff] [blame] | 330 | 			omap3_sram_restore_context(); | 
| Kalle Jokiniemi | 8a917d2 | 2009-05-13 13:32:11 +0300 | [diff] [blame] | 331 | 			omap2_sms_restore_context(); | 
| Rajendra Nayak | 2f5939c | 2008-09-26 17:50:07 +0530 | [diff] [blame] | 332 | 		} | 
| Kevin Hilman | 658ce97 | 2008-11-04 20:50:52 -0800 | [diff] [blame] | 333 | 		if (core_next_state == PWRDM_POWER_OFF) | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 334 | 			omap2_prm_clear_mod_reg_bits(OMAP3430_AUTO_OFF_MASK, | 
| Kevin Hilman | 658ce97 | 2008-11-04 20:50:52 -0800 | [diff] [blame] | 335 | 					       OMAP3430_GR_MOD, | 
 | 336 | 					       OMAP3_PRM_VOLTCTRL_OFFSET); | 
 | 337 | 	} | 
| Tero Kristo | f18cc2f | 2009-10-23 19:03:50 +0300 | [diff] [blame] | 338 | 	omap3_intc_resume_idle(); | 
| Kevin Hilman | 658ce97 | 2008-11-04 20:50:52 -0800 | [diff] [blame] | 339 |  | 
| Kevin Hilman | e0e29fd | 2012-08-07 11:28:06 -0700 | [diff] [blame] | 340 | 	pwrdm_post_transition(NULL); | 
| Kevin Hilman | 658ce97 | 2008-11-04 20:50:52 -0800 | [diff] [blame] | 341 |  | 
| Kevin Hilman | e0e29fd | 2012-08-07 11:28:06 -0700 | [diff] [blame] | 342 | 	/* PER */ | 
 | 343 | 	if (per_next_state < PWRDM_POWER_ON) | 
 | 344 | 		omap2_gpio_resume_after_idle(); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 345 | } | 
 | 346 |  | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 347 | static void omap3_pm_idle(void) | 
 | 348 | { | 
| Nicolas Pitre | 0bcd24b | 2012-01-04 16:27:48 -0500 | [diff] [blame] | 349 | 	if (omap_irq_pending()) | 
| Santosh Shilimkar | 6b85638 | 2013-02-11 19:29:45 +0530 | [diff] [blame] | 350 | 		return; | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 351 |  | 
| Jean Pihet | 5e7c58d | 2011-03-03 11:25:43 +0100 | [diff] [blame] | 352 | 	trace_cpu_idle(1, smp_processor_id()); | 
 | 353 |  | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 354 | 	omap_sram_idle(); | 
 | 355 |  | 
| Jean Pihet | 5e7c58d | 2011-03-03 11:25:43 +0100 | [diff] [blame] | 356 | 	trace_cpu_idle(PWR_EVENT_EXIT, smp_processor_id()); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 357 | } | 
 | 358 |  | 
| Kevin Hilman | 10f90ed | 2009-06-24 11:39:18 -0700 | [diff] [blame] | 359 | #ifdef CONFIG_SUSPEND | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 360 | static int omap3_pm_suspend(void) | 
 | 361 | { | 
 | 362 | 	struct power_state *pwrst; | 
 | 363 | 	int state, ret = 0; | 
 | 364 |  | 
 | 365 | 	/* Read current next_pwrsts */ | 
 | 366 | 	list_for_each_entry(pwrst, &pwrst_list, node) | 
 | 367 | 		pwrst->saved_state = pwrdm_read_next_pwrst(pwrst->pwrdm); | 
 | 368 | 	/* Set ones wanted by suspend */ | 
 | 369 | 	list_for_each_entry(pwrst, &pwrst_list, node) { | 
| Santosh Shilimkar | eb6a2c7 | 2010-09-15 01:04:01 +0530 | [diff] [blame] | 370 | 		if (omap_set_pwrdm_state(pwrst->pwrdm, pwrst->next_state)) | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 371 | 			goto restore; | 
 | 372 | 		if (pwrdm_clear_all_prev_pwrst(pwrst->pwrdm)) | 
 | 373 | 			goto restore; | 
 | 374 | 	} | 
 | 375 |  | 
| Tero Kristo | 2bbe3af | 2009-10-23 19:03:48 +0300 | [diff] [blame] | 376 | 	omap3_intc_suspend(); | 
 | 377 |  | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 378 | 	omap_sram_idle(); | 
 | 379 |  | 
 | 380 | restore: | 
 | 381 | 	/* Restore next_pwrsts */ | 
 | 382 | 	list_for_each_entry(pwrst, &pwrst_list, node) { | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 383 | 		state = pwrdm_read_prev_pwrst(pwrst->pwrdm); | 
 | 384 | 		if (state > pwrst->next_state) { | 
| Paul Walmsley | 7852ec0 | 2012-07-26 00:54:26 -0600 | [diff] [blame] | 385 | 			pr_info("Powerdomain (%s) didn't enter target state %d\n", | 
 | 386 | 				pwrst->pwrdm->name, pwrst->next_state); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 387 | 			ret = -1; | 
 | 388 | 		} | 
| Santosh Shilimkar | eb6a2c7 | 2010-09-15 01:04:01 +0530 | [diff] [blame] | 389 | 		omap_set_pwrdm_state(pwrst->pwrdm, pwrst->saved_state); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 390 | 	} | 
 | 391 | 	if (ret) | 
| Mark A. Greer | 9817985 | 2012-03-17 18:22:48 -0700 | [diff] [blame] | 392 | 		pr_err("Could not enter target state in pm_suspend\n"); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 393 | 	else | 
| Mark A. Greer | 9817985 | 2012-03-17 18:22:48 -0700 | [diff] [blame] | 394 | 		pr_info("Successfully put all powerdomains to target state\n"); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 395 |  | 
 | 396 | 	return ret; | 
 | 397 | } | 
 | 398 |  | 
| Kevin Hilman | 10f90ed | 2009-06-24 11:39:18 -0700 | [diff] [blame] | 399 | #endif /* CONFIG_SUSPEND */ | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 400 |  | 
| Kevin Hilman | 1155e42 | 2008-11-25 11:48:24 -0800 | [diff] [blame] | 401 |  | 
 | 402 | /** | 
 | 403 |  * omap3_iva_idle(): ensure IVA is in idle so it can be put into | 
 | 404 |  *                   retention | 
 | 405 |  * | 
 | 406 |  * In cases where IVA2 is activated by bootcode, it may prevent | 
 | 407 |  * full-chip retention or off-mode because it is not idle.  This | 
 | 408 |  * function forces the IVA2 into idle state so it can go | 
 | 409 |  * into retention/off and thus allow full-chip retention/off. | 
 | 410 |  * | 
 | 411 |  **/ | 
 | 412 | static void __init omap3_iva_idle(void) | 
 | 413 | { | 
 | 414 | 	/* ensure IVA2 clock is disabled */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 415 | 	omap2_cm_write_mod_reg(0, OMAP3430_IVA2_MOD, CM_FCLKEN); | 
| Kevin Hilman | 1155e42 | 2008-11-25 11:48:24 -0800 | [diff] [blame] | 416 |  | 
 | 417 | 	/* if no clock activity, nothing else to do */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 418 | 	if (!(omap2_cm_read_mod_reg(OMAP3430_IVA2_MOD, OMAP3430_CM_CLKSTST) & | 
| Kevin Hilman | 1155e42 | 2008-11-25 11:48:24 -0800 | [diff] [blame] | 419 | 	      OMAP3430_CLKACTIVITY_IVA2_MASK)) | 
 | 420 | 		return; | 
 | 421 |  | 
 | 422 | 	/* Reset IVA2 */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 423 | 	omap2_prm_write_mod_reg(OMAP3430_RST1_IVA2_MASK | | 
| Paul Walmsley | 2bc4ef7 | 2010-05-18 18:47:24 -0600 | [diff] [blame] | 424 | 			  OMAP3430_RST2_IVA2_MASK | | 
 | 425 | 			  OMAP3430_RST3_IVA2_MASK, | 
| Abhijit Pagare | 3790300 | 2010-01-26 20:12:51 -0700 | [diff] [blame] | 426 | 			  OMAP3430_IVA2_MOD, OMAP2_RM_RSTCTRL); | 
| Kevin Hilman | 1155e42 | 2008-11-25 11:48:24 -0800 | [diff] [blame] | 427 |  | 
 | 428 | 	/* Enable IVA2 clock */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 429 | 	omap2_cm_write_mod_reg(OMAP3430_CM_FCLKEN_IVA2_EN_IVA2_MASK, | 
| Kevin Hilman | 1155e42 | 2008-11-25 11:48:24 -0800 | [diff] [blame] | 430 | 			 OMAP3430_IVA2_MOD, CM_FCLKEN); | 
 | 431 |  | 
 | 432 | 	/* Set IVA2 boot mode to 'idle' */ | 
 | 433 | 	omap_ctrl_writel(OMAP3_IVA2_BOOTMOD_IDLE, | 
 | 434 | 			 OMAP343X_CONTROL_IVA2_BOOTMOD); | 
 | 435 |  | 
 | 436 | 	/* Un-reset IVA2 */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 437 | 	omap2_prm_write_mod_reg(0, OMAP3430_IVA2_MOD, OMAP2_RM_RSTCTRL); | 
| Kevin Hilman | 1155e42 | 2008-11-25 11:48:24 -0800 | [diff] [blame] | 438 |  | 
 | 439 | 	/* Disable IVA2 clock */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 440 | 	omap2_cm_write_mod_reg(0, OMAP3430_IVA2_MOD, CM_FCLKEN); | 
| Kevin Hilman | 1155e42 | 2008-11-25 11:48:24 -0800 | [diff] [blame] | 441 |  | 
 | 442 | 	/* Reset IVA2 */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 443 | 	omap2_prm_write_mod_reg(OMAP3430_RST1_IVA2_MASK | | 
| Paul Walmsley | 2bc4ef7 | 2010-05-18 18:47:24 -0600 | [diff] [blame] | 444 | 			  OMAP3430_RST2_IVA2_MASK | | 
 | 445 | 			  OMAP3430_RST3_IVA2_MASK, | 
| Abhijit Pagare | 3790300 | 2010-01-26 20:12:51 -0700 | [diff] [blame] | 446 | 			  OMAP3430_IVA2_MOD, OMAP2_RM_RSTCTRL); | 
| Kevin Hilman | 1155e42 | 2008-11-25 11:48:24 -0800 | [diff] [blame] | 447 | } | 
 | 448 |  | 
| Kevin Hilman | 8111b22 | 2009-04-28 15:27:44 -0700 | [diff] [blame] | 449 | static void __init omap3_d2d_idle(void) | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 450 | { | 
| Kevin Hilman | 8111b22 | 2009-04-28 15:27:44 -0700 | [diff] [blame] | 451 | 	u16 mask, padconf; | 
 | 452 |  | 
 | 453 | 	/* In a stand alone OMAP3430 where there is not a stacked | 
 | 454 | 	 * modem for the D2D Idle Ack and D2D MStandby must be pulled | 
 | 455 | 	 * high. S CONTROL_PADCONF_SAD2D_IDLEACK and | 
 | 456 | 	 * CONTROL_PADCONF_SAD2D_MSTDBY to have a pull up. */ | 
 | 457 | 	mask = (1 << 4) | (1 << 3); /* pull-up, enabled */ | 
 | 458 | 	padconf = omap_ctrl_readw(OMAP3_PADCONF_SAD2D_MSTANDBY); | 
 | 459 | 	padconf |= mask; | 
 | 460 | 	omap_ctrl_writew(padconf, OMAP3_PADCONF_SAD2D_MSTANDBY); | 
 | 461 |  | 
 | 462 | 	padconf = omap_ctrl_readw(OMAP3_PADCONF_SAD2D_IDLEACK); | 
 | 463 | 	padconf |= mask; | 
 | 464 | 	omap_ctrl_writew(padconf, OMAP3_PADCONF_SAD2D_IDLEACK); | 
 | 465 |  | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 466 | 	/* reset modem */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 467 | 	omap2_prm_write_mod_reg(OMAP3430_RM_RSTCTRL_CORE_MODEM_SW_RSTPWRON_MASK | | 
| Paul Walmsley | 2bc4ef7 | 2010-05-18 18:47:24 -0600 | [diff] [blame] | 468 | 			  OMAP3430_RM_RSTCTRL_CORE_MODEM_SW_RST_MASK, | 
| Abhijit Pagare | 3790300 | 2010-01-26 20:12:51 -0700 | [diff] [blame] | 469 | 			  CORE_MOD, OMAP2_RM_RSTCTRL); | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 470 | 	omap2_prm_write_mod_reg(0, CORE_MOD, OMAP2_RM_RSTCTRL); | 
| Kevin Hilman | 8111b22 | 2009-04-28 15:27:44 -0700 | [diff] [blame] | 471 | } | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 472 |  | 
| Kevin Hilman | 8111b22 | 2009-04-28 15:27:44 -0700 | [diff] [blame] | 473 | static void __init prcm_setup_regs(void) | 
 | 474 | { | 
| Govindraj.R | e586368 | 2010-09-27 20:20:25 +0530 | [diff] [blame] | 475 | 	u32 omap3630_en_uart4_mask = cpu_is_omap3630() ? | 
 | 476 | 					OMAP3630_EN_UART4_MASK : 0; | 
 | 477 | 	u32 omap3630_grpsel_uart4_mask = cpu_is_omap3630() ? | 
 | 478 | 					OMAP3630_GRPSEL_UART4_MASK : 0; | 
 | 479 |  | 
| Paul Walmsley | 4ef70c0 | 2011-02-25 15:39:30 -0700 | [diff] [blame] | 480 | 	/* XXX This should be handled by hwmod code or SCM init code */ | 
| Paul Walmsley | 2fd0f75 | 2010-05-18 18:40:23 -0600 | [diff] [blame] | 481 | 	omap_ctrl_writel(OMAP3430_AUTOIDLE_MASK, OMAP2_CONTROL_SYSCONFIG); | 
| Tero Kristo | b296c81 | 2009-10-23 19:03:49 +0300 | [diff] [blame] | 482 |  | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 483 | 	/* | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 484 | 	 * Enable control of expternal oscillator through | 
 | 485 | 	 * sys_clkreq. In the long run clock framework should | 
 | 486 | 	 * take care of this. | 
 | 487 | 	 */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 488 | 	omap2_prm_rmw_mod_reg_bits(OMAP_AUTOEXTCLKMODE_MASK, | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 489 | 			     1 << OMAP_AUTOEXTCLKMODE_SHIFT, | 
 | 490 | 			     OMAP3430_GR_MOD, | 
 | 491 | 			     OMAP3_PRM_CLKSRC_CTRL_OFFSET); | 
 | 492 |  | 
 | 493 | 	/* setup wakup source */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 494 | 	omap2_prm_write_mod_reg(OMAP3430_EN_IO_MASK | OMAP3430_EN_GPIO1_MASK | | 
| Paul Walmsley | 2fd0f75 | 2010-05-18 18:40:23 -0600 | [diff] [blame] | 495 | 			  OMAP3430_EN_GPT1_MASK | OMAP3430_EN_GPT12_MASK, | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 496 | 			  WKUP_MOD, PM_WKEN); | 
 | 497 | 	/* No need to write EN_IO, that is always enabled */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 498 | 	omap2_prm_write_mod_reg(OMAP3430_GRPSEL_GPIO1_MASK | | 
| Paul Walmsley | 275f675 | 2010-05-18 18:40:23 -0600 | [diff] [blame] | 499 | 			  OMAP3430_GRPSEL_GPT1_MASK | | 
 | 500 | 			  OMAP3430_GRPSEL_GPT12_MASK, | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 501 | 			  WKUP_MOD, OMAP3430_PM_MPUGRPSEL); | 
| Kevin Hilman | 1155e42 | 2008-11-25 11:48:24 -0800 | [diff] [blame] | 502 |  | 
| Subramani Venkatesh | b92c572 | 2009-12-22 15:07:50 +0530 | [diff] [blame] | 503 | 	/* Enable PM_WKEN to support DSS LPR */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 504 | 	omap2_prm_write_mod_reg(OMAP3430_PM_WKEN_DSS_EN_DSS_MASK, | 
| Subramani Venkatesh | b92c572 | 2009-12-22 15:07:50 +0530 | [diff] [blame] | 505 | 				OMAP3430_DSS_MOD, PM_WKEN); | 
 | 506 |  | 
| Kevin Hilman | b427f92 | 2009-10-22 14:48:13 -0700 | [diff] [blame] | 507 | 	/* Enable wakeups in PER */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 508 | 	omap2_prm_write_mod_reg(omap3630_en_uart4_mask | | 
| Govindraj.R | e586368 | 2010-09-27 20:20:25 +0530 | [diff] [blame] | 509 | 			  OMAP3430_EN_GPIO2_MASK | OMAP3430_EN_GPIO3_MASK | | 
| Paul Walmsley | 2fd0f75 | 2010-05-18 18:40:23 -0600 | [diff] [blame] | 510 | 			  OMAP3430_EN_GPIO4_MASK | OMAP3430_EN_GPIO5_MASK | | 
 | 511 | 			  OMAP3430_EN_GPIO6_MASK | OMAP3430_EN_UART3_MASK | | 
 | 512 | 			  OMAP3430_EN_MCBSP2_MASK | OMAP3430_EN_MCBSP3_MASK | | 
 | 513 | 			  OMAP3430_EN_MCBSP4_MASK, | 
| Kevin Hilman | b427f92 | 2009-10-22 14:48:13 -0700 | [diff] [blame] | 514 | 			  OMAP3430_PER_MOD, PM_WKEN); | 
| Kevin Hilman | eb350f7 | 2009-09-10 15:53:08 +0000 | [diff] [blame] | 515 | 	/* and allow them to wake up MPU */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 516 | 	omap2_prm_write_mod_reg(omap3630_grpsel_uart4_mask | | 
| Govindraj.R | e586368 | 2010-09-27 20:20:25 +0530 | [diff] [blame] | 517 | 			  OMAP3430_GRPSEL_GPIO2_MASK | | 
| Paul Walmsley | 275f675 | 2010-05-18 18:40:23 -0600 | [diff] [blame] | 518 | 			  OMAP3430_GRPSEL_GPIO3_MASK | | 
 | 519 | 			  OMAP3430_GRPSEL_GPIO4_MASK | | 
 | 520 | 			  OMAP3430_GRPSEL_GPIO5_MASK | | 
 | 521 | 			  OMAP3430_GRPSEL_GPIO6_MASK | | 
 | 522 | 			  OMAP3430_GRPSEL_UART3_MASK | | 
 | 523 | 			  OMAP3430_GRPSEL_MCBSP2_MASK | | 
 | 524 | 			  OMAP3430_GRPSEL_MCBSP3_MASK | | 
 | 525 | 			  OMAP3430_GRPSEL_MCBSP4_MASK, | 
| Kevin Hilman | eb350f7 | 2009-09-10 15:53:08 +0000 | [diff] [blame] | 526 | 			  OMAP3430_PER_MOD, OMAP3430_PM_MPUGRPSEL); | 
 | 527 |  | 
| Kevin Hilman | d3fd329 | 2009-05-05 16:34:25 -0700 | [diff] [blame] | 528 | 	/* Don't attach IVA interrupts */ | 
| Mark A. Greer | a819c4f | 2012-04-19 11:17:45 -0700 | [diff] [blame] | 529 | 	if (omap3_has_iva()) { | 
 | 530 | 		omap2_prm_write_mod_reg(0, WKUP_MOD, OMAP3430_PM_IVAGRPSEL); | 
 | 531 | 		omap2_prm_write_mod_reg(0, CORE_MOD, OMAP3430_PM_IVAGRPSEL1); | 
 | 532 | 		omap2_prm_write_mod_reg(0, CORE_MOD, OMAP3430ES2_PM_IVAGRPSEL3); | 
 | 533 | 		omap2_prm_write_mod_reg(0, OMAP3430_PER_MOD, | 
 | 534 | 					OMAP3430_PM_IVAGRPSEL); | 
 | 535 | 	} | 
| Kevin Hilman | d3fd329 | 2009-05-05 16:34:25 -0700 | [diff] [blame] | 536 |  | 
| Kevin Hilman | b1340d1 | 2009-04-27 16:14:54 -0700 | [diff] [blame] | 537 | 	/* Clear any pending 'reset' flags */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 538 | 	omap2_prm_write_mod_reg(0xffffffff, MPU_MOD, OMAP2_RM_RSTST); | 
 | 539 | 	omap2_prm_write_mod_reg(0xffffffff, CORE_MOD, OMAP2_RM_RSTST); | 
 | 540 | 	omap2_prm_write_mod_reg(0xffffffff, OMAP3430_PER_MOD, OMAP2_RM_RSTST); | 
 | 541 | 	omap2_prm_write_mod_reg(0xffffffff, OMAP3430_EMU_MOD, OMAP2_RM_RSTST); | 
 | 542 | 	omap2_prm_write_mod_reg(0xffffffff, OMAP3430_NEON_MOD, OMAP2_RM_RSTST); | 
 | 543 | 	omap2_prm_write_mod_reg(0xffffffff, OMAP3430_DSS_MOD, OMAP2_RM_RSTST); | 
 | 544 | 	omap2_prm_write_mod_reg(0xffffffff, OMAP3430ES2_USBHOST_MOD, OMAP2_RM_RSTST); | 
| Kevin Hilman | b1340d1 | 2009-04-27 16:14:54 -0700 | [diff] [blame] | 545 |  | 
| Kevin Hilman | 014c46d | 2009-04-27 07:50:23 -0700 | [diff] [blame] | 546 | 	/* Clear any pending PRCM interrupts */ | 
| Paul Walmsley | c4d7e58 | 2010-12-21 21:05:14 -0700 | [diff] [blame] | 547 | 	omap2_prm_write_mod_reg(0, OCP_MOD, OMAP3_PRM_IRQSTATUS_MPU_OFFSET); | 
| Kevin Hilman | 014c46d | 2009-04-27 07:50:23 -0700 | [diff] [blame] | 548 |  | 
| Mark A. Greer | a819c4f | 2012-04-19 11:17:45 -0700 | [diff] [blame] | 549 | 	if (omap3_has_iva()) | 
 | 550 | 		omap3_iva_idle(); | 
 | 551 |  | 
| Kevin Hilman | 8111b22 | 2009-04-28 15:27:44 -0700 | [diff] [blame] | 552 | 	omap3_d2d_idle(); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 553 | } | 
 | 554 |  | 
| Kevin Hilman | c40552b | 2009-10-06 14:25:09 -0700 | [diff] [blame] | 555 | void omap3_pm_off_mode_enable(int enable) | 
 | 556 | { | 
 | 557 | 	struct power_state *pwrst; | 
 | 558 | 	u32 state; | 
 | 559 |  | 
 | 560 | 	if (enable) | 
 | 561 | 		state = PWRDM_POWER_OFF; | 
 | 562 | 	else | 
 | 563 | 		state = PWRDM_POWER_RET; | 
 | 564 |  | 
 | 565 | 	list_for_each_entry(pwrst, &pwrst_list, node) { | 
| Eduardo Valentin | cc1b602 | 2010-12-20 14:05:09 -0600 | [diff] [blame] | 566 | 		if (IS_PM34XX_ERRATUM(PM_SDRC_WAKEUP_ERRATUM_i583) && | 
 | 567 | 				pwrst->pwrdm == core_pwrdm && | 
 | 568 | 				state == PWRDM_POWER_OFF) { | 
 | 569 | 			pwrst->next_state = PWRDM_POWER_RET; | 
| Ricardo Salveti de Araujo | e16b41b | 2011-01-31 11:35:25 -0200 | [diff] [blame] | 570 | 			pr_warn("%s: Core OFF disabled due to errata i583\n", | 
| Eduardo Valentin | cc1b602 | 2010-12-20 14:05:09 -0600 | [diff] [blame] | 571 | 				__func__); | 
 | 572 | 		} else { | 
 | 573 | 			pwrst->next_state = state; | 
 | 574 | 		} | 
 | 575 | 		omap_set_pwrdm_state(pwrst->pwrdm, pwrst->next_state); | 
| Kevin Hilman | c40552b | 2009-10-06 14:25:09 -0700 | [diff] [blame] | 576 | 	} | 
 | 577 | } | 
 | 578 |  | 
| Tero Kristo | 68d4778 | 2008-11-26 12:26:24 +0200 | [diff] [blame] | 579 | int omap3_pm_get_suspend_state(struct powerdomain *pwrdm) | 
 | 580 | { | 
 | 581 | 	struct power_state *pwrst; | 
 | 582 |  | 
 | 583 | 	list_for_each_entry(pwrst, &pwrst_list, node) { | 
 | 584 | 		if (pwrst->pwrdm == pwrdm) | 
 | 585 | 			return pwrst->next_state; | 
 | 586 | 	} | 
 | 587 | 	return -EINVAL; | 
 | 588 | } | 
 | 589 |  | 
 | 590 | int omap3_pm_set_suspend_state(struct powerdomain *pwrdm, int state) | 
 | 591 | { | 
 | 592 | 	struct power_state *pwrst; | 
 | 593 |  | 
 | 594 | 	list_for_each_entry(pwrst, &pwrst_list, node) { | 
 | 595 | 		if (pwrst->pwrdm == pwrdm) { | 
 | 596 | 			pwrst->next_state = state; | 
 | 597 | 			return 0; | 
 | 598 | 		} | 
 | 599 | 	} | 
 | 600 | 	return -EINVAL; | 
 | 601 | } | 
 | 602 |  | 
| Peter 'p2' De Schrijver | a23456e | 2008-10-15 18:13:47 +0300 | [diff] [blame] | 603 | static int __init pwrdms_setup(struct powerdomain *pwrdm, void *unused) | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 604 | { | 
 | 605 | 	struct power_state *pwrst; | 
 | 606 |  | 
 | 607 | 	if (!pwrdm->pwrsts) | 
 | 608 | 		return 0; | 
 | 609 |  | 
| Ming Lei | d3d381c | 2009-08-22 21:20:26 +0800 | [diff] [blame] | 610 | 	pwrst = kmalloc(sizeof(struct power_state), GFP_ATOMIC); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 611 | 	if (!pwrst) | 
 | 612 | 		return -ENOMEM; | 
 | 613 | 	pwrst->pwrdm = pwrdm; | 
 | 614 | 	pwrst->next_state = PWRDM_POWER_RET; | 
 | 615 | 	list_add(&pwrst->node, &pwrst_list); | 
 | 616 |  | 
 | 617 | 	if (pwrdm_has_hdwr_sar(pwrdm)) | 
 | 618 | 		pwrdm_enable_hdwr_sar(pwrdm); | 
 | 619 |  | 
| Santosh Shilimkar | eb6a2c7 | 2010-09-15 01:04:01 +0530 | [diff] [blame] | 620 | 	return omap_set_pwrdm_state(pwrst->pwrdm, pwrst->next_state); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 621 | } | 
 | 622 |  | 
 | 623 | /* | 
| Jean Pihet | 46e130d | 2011-06-29 18:40:23 +0200 | [diff] [blame] | 624 |  * Push functions to SRAM | 
 | 625 |  * | 
 | 626 |  * The minimum set of functions is pushed to SRAM for execution: | 
 | 627 |  * - omap3_do_wfi for erratum i581 WA, | 
 | 628 |  * - save_secure_ram_context for security extensions. | 
 | 629 |  */ | 
| Rajendra Nayak | 3231fc8 | 2008-09-26 17:49:14 +0530 | [diff] [blame] | 630 | void omap_push_sram_idle(void) | 
 | 631 | { | 
| Jean Pihet | 46e130d | 2011-06-29 18:40:23 +0200 | [diff] [blame] | 632 | 	omap3_do_wfi_sram = omap_sram_push(omap3_do_wfi, omap3_do_wfi_sz); | 
 | 633 |  | 
| Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 634 | 	if (omap_type() != OMAP2_DEVICE_TYPE_GP) | 
 | 635 | 		_omap_save_secure_sram = omap_sram_push(save_secure_ram_context, | 
 | 636 | 				save_secure_ram_context_sz); | 
| Rajendra Nayak | 3231fc8 | 2008-09-26 17:49:14 +0530 | [diff] [blame] | 637 | } | 
 | 638 |  | 
| Nishanth Menon | 8cdfd83 | 2010-12-20 14:05:05 -0600 | [diff] [blame] | 639 | static void __init pm_errata_configure(void) | 
 | 640 | { | 
| Peter 'p2' De Schrijver | c4236d2 | 2010-12-20 14:05:07 -0600 | [diff] [blame] | 641 | 	if (cpu_is_omap3630()) { | 
| Nishanth Menon | 458e999 | 2010-12-20 14:05:06 -0600 | [diff] [blame] | 642 | 		pm34xx_errata |= PM_RTA_ERRATUM_i608; | 
| Peter 'p2' De Schrijver | c4236d2 | 2010-12-20 14:05:07 -0600 | [diff] [blame] | 643 | 		/* Enable the l2 cache toggling in sleep logic */ | 
 | 644 | 		enable_omap3630_toggle_l2_on_restore(); | 
| Eduardo Valentin | cc1b602 | 2010-12-20 14:05:09 -0600 | [diff] [blame] | 645 | 		if (omap_rev() < OMAP3630_REV_ES1_2) | 
| Paul Walmsley | 856c3c5 | 2012-10-16 00:08:53 -0600 | [diff] [blame] | 646 | 			pm34xx_errata |= (PM_SDRC_WAKEUP_ERRATUM_i583 | | 
 | 647 | 					  PM_PER_MEMORIES_ERRATUM_i582); | 
 | 648 | 	} else if (cpu_is_omap34xx()) { | 
 | 649 | 		pm34xx_errata |= PM_PER_MEMORIES_ERRATUM_i582; | 
| Peter 'p2' De Schrijver | c4236d2 | 2010-12-20 14:05:07 -0600 | [diff] [blame] | 650 | 	} | 
| Nishanth Menon | 8cdfd83 | 2010-12-20 14:05:05 -0600 | [diff] [blame] | 651 | } | 
 | 652 |  | 
| Shawn Guo | bbd707a | 2012-04-26 16:06:50 +0800 | [diff] [blame] | 653 | int __init omap3_pm_init(void) | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 654 | { | 
 | 655 | 	struct power_state *pwrst, *tmp; | 
| Paul Walmsley | 856c3c5 | 2012-10-16 00:08:53 -0600 | [diff] [blame] | 656 | 	struct clockdomain *neon_clkdm, *mpu_clkdm, *per_clkdm, *wkup_clkdm; | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 657 | 	int ret; | 
 | 658 |  | 
| Paul Walmsley | b02b917 | 2011-10-06 17:18:45 -0600 | [diff] [blame] | 659 | 	if (!omap3_has_io_chain_ctrl()) | 
 | 660 | 		pr_warning("PM: no software I/O chain control; some wakeups may be lost\n"); | 
 | 661 |  | 
| Nishanth Menon | 8cdfd83 | 2010-12-20 14:05:05 -0600 | [diff] [blame] | 662 | 	pm_errata_configure(); | 
 | 663 |  | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 664 | 	/* XXX prcm_setup_regs needs to be before enabling hw | 
 | 665 | 	 * supervised mode for powerdomains */ | 
 | 666 | 	prcm_setup_regs(); | 
 | 667 |  | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 668 | 	ret = request_irq(omap_prcm_event_to_irq("wkup"), | 
 | 669 | 		_prcm_int_handle_wakeup, IRQF_NO_SUSPEND, "pm_wkup", NULL); | 
 | 670 |  | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 671 | 	if (ret) { | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 672 | 		pr_err("pm: Failed to request pm_wkup irq\n"); | 
 | 673 | 		goto err1; | 
 | 674 | 	} | 
 | 675 |  | 
 | 676 | 	/* IO interrupt is shared with mux code */ | 
 | 677 | 	ret = request_irq(omap_prcm_event_to_irq("io"), | 
 | 678 | 		_prcm_int_handle_io, IRQF_SHARED | IRQF_NO_SUSPEND, "pm_io", | 
 | 679 | 		omap3_pm_init); | 
| Kevin Hilman | 99b59df | 2012-04-27 16:05:51 -0700 | [diff] [blame] | 680 | 	enable_irq(omap_prcm_event_to_irq("io")); | 
| Tero Kristo | 22f5137 | 2011-12-16 14:36:59 -0700 | [diff] [blame] | 681 |  | 
 | 682 | 	if (ret) { | 
 | 683 | 		pr_err("pm: Failed to request pm_io irq\n"); | 
| Mark A. Greer | ce229c5 | 2012-03-17 18:22:47 -0700 | [diff] [blame] | 684 | 		goto err2; | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 685 | 	} | 
 | 686 |  | 
| Peter 'p2' De Schrijver | a23456e | 2008-10-15 18:13:47 +0300 | [diff] [blame] | 687 | 	ret = pwrdm_for_each(pwrdms_setup, NULL); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 688 | 	if (ret) { | 
| Mark A. Greer | 9817985 | 2012-03-17 18:22:48 -0700 | [diff] [blame] | 689 | 		pr_err("Failed to setup powerdomains\n"); | 
| Mark A. Greer | ce229c5 | 2012-03-17 18:22:47 -0700 | [diff] [blame] | 690 | 		goto err3; | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 691 | 	} | 
 | 692 |  | 
| Paul Walmsley | 92206fd | 2012-02-02 02:38:50 -0700 | [diff] [blame] | 693 | 	(void) clkdm_for_each(omap_pm_clkdms_setup, NULL); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 694 |  | 
 | 695 | 	mpu_pwrdm = pwrdm_lookup("mpu_pwrdm"); | 
 | 696 | 	if (mpu_pwrdm == NULL) { | 
| Mark A. Greer | 9817985 | 2012-03-17 18:22:48 -0700 | [diff] [blame] | 697 | 		pr_err("Failed to get mpu_pwrdm\n"); | 
| Mark A. Greer | ce229c5 | 2012-03-17 18:22:47 -0700 | [diff] [blame] | 698 | 		ret = -EINVAL; | 
 | 699 | 		goto err3; | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 700 | 	} | 
 | 701 |  | 
| Rajendra Nayak | fa3c2a4 | 2008-09-26 17:49:22 +0530 | [diff] [blame] | 702 | 	neon_pwrdm = pwrdm_lookup("neon_pwrdm"); | 
 | 703 | 	per_pwrdm = pwrdm_lookup("per_pwrdm"); | 
 | 704 | 	core_pwrdm = pwrdm_lookup("core_pwrdm"); | 
 | 705 |  | 
| Paul Walmsley | 55ed969 | 2010-01-26 20:12:59 -0700 | [diff] [blame] | 706 | 	neon_clkdm = clkdm_lookup("neon_clkdm"); | 
 | 707 | 	mpu_clkdm = clkdm_lookup("mpu_clkdm"); | 
| Paul Walmsley | 856c3c5 | 2012-10-16 00:08:53 -0600 | [diff] [blame] | 708 | 	per_clkdm = clkdm_lookup("per_clkdm"); | 
 | 709 | 	wkup_clkdm = clkdm_lookup("wkup_clkdm"); | 
| Paul Walmsley | 55ed969 | 2010-01-26 20:12:59 -0700 | [diff] [blame] | 710 |  | 
| Kevin Hilman | 10f90ed | 2009-06-24 11:39:18 -0700 | [diff] [blame] | 711 | #ifdef CONFIG_SUSPEND | 
| Paul Walmsley | 1416408 | 2012-02-02 02:30:50 -0700 | [diff] [blame] | 712 | 	omap_pm_suspend = omap3_pm_suspend; | 
 | 713 | #endif | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 714 |  | 
| Nicolas Pitre | 0bcd24b | 2012-01-04 16:27:48 -0500 | [diff] [blame] | 715 | 	arm_pm_idle = omap3_pm_idle; | 
| Kalle Jokiniemi | 0343371 | 2008-09-26 11:04:20 +0300 | [diff] [blame] | 716 | 	omap3_idle_init(); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 717 |  | 
| Nishanth Menon | 458e999 | 2010-12-20 14:05:06 -0600 | [diff] [blame] | 718 | 	/* | 
 | 719 | 	 * RTA is disabled during initialization as per erratum i608 | 
 | 720 | 	 * it is safer to disable RTA by the bootloader, but we would like | 
 | 721 | 	 * to be doubly sure here and prevent any mishaps. | 
 | 722 | 	 */ | 
 | 723 | 	if (IS_PM34XX_ERRATUM(PM_RTA_ERRATUM_i608)) | 
 | 724 | 		omap3630_ctrl_disable_rta(); | 
 | 725 |  | 
| Paul Walmsley | 856c3c5 | 2012-10-16 00:08:53 -0600 | [diff] [blame] | 726 | 	/* | 
 | 727 | 	 * The UART3/4 FIFO and the sidetone memory in McBSP2/3 are | 
 | 728 | 	 * not correctly reset when the PER powerdomain comes back | 
 | 729 | 	 * from OFF or OSWR when the CORE powerdomain is kept active. | 
 | 730 | 	 * See OMAP36xx Erratum i582 "PER Domain reset issue after | 
 | 731 | 	 * Domain-OFF/OSWR Wakeup".  This wakeup dependency is not a | 
 | 732 | 	 * complete workaround.  The kernel must also prevent the PER | 
 | 733 | 	 * powerdomain from going to OSWR/OFF while the CORE | 
 | 734 | 	 * powerdomain is not going to OSWR/OFF.  And if PER last | 
 | 735 | 	 * power state was off while CORE last power state was ON, the | 
 | 736 | 	 * UART3/4 and McBSP2/3 SIDETONE devices need to run a | 
 | 737 | 	 * self-test using their loopback tests; if that fails, those | 
 | 738 | 	 * devices are unusable until the PER/CORE can complete a transition | 
 | 739 | 	 * from ON to OSWR/OFF and then back to ON. | 
 | 740 | 	 * | 
 | 741 | 	 * XXX Technically this workaround is only needed if off-mode | 
 | 742 | 	 * or OSWR is enabled. | 
 | 743 | 	 */ | 
 | 744 | 	if (IS_PM34XX_ERRATUM(PM_PER_MEMORIES_ERRATUM_i582)) | 
 | 745 | 		clkdm_add_wkdep(per_clkdm, wkup_clkdm); | 
 | 746 |  | 
| Paul Walmsley | 55ed969 | 2010-01-26 20:12:59 -0700 | [diff] [blame] | 747 | 	clkdm_add_wkdep(neon_clkdm, mpu_clkdm); | 
| Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 748 | 	if (omap_type() != OMAP2_DEVICE_TYPE_GP) { | 
 | 749 | 		omap3_secure_ram_storage = | 
 | 750 | 			kmalloc(0x803F, GFP_KERNEL); | 
 | 751 | 		if (!omap3_secure_ram_storage) | 
| Paul Walmsley | 7852ec0 | 2012-07-26 00:54:26 -0600 | [diff] [blame] | 752 | 			pr_err("Memory allocation failed when allocating for secure sram context\n"); | 
| Tero Kristo | 27d59a4 | 2008-10-13 13:15:00 +0300 | [diff] [blame] | 753 |  | 
| Tero Kristo | 9d97140 | 2008-12-12 11:20:05 +0200 | [diff] [blame] | 754 | 		local_irq_disable(); | 
| Tero Kristo | 9d97140 | 2008-12-12 11:20:05 +0200 | [diff] [blame] | 755 |  | 
 | 756 | 		omap_dma_global_context_save(); | 
| Kevin Hilman | 617fcc9 | 2011-01-25 16:40:01 -0800 | [diff] [blame] | 757 | 		omap3_save_secure_ram_context(); | 
| Tero Kristo | 9d97140 | 2008-12-12 11:20:05 +0200 | [diff] [blame] | 758 | 		omap_dma_global_context_restore(); | 
 | 759 |  | 
 | 760 | 		local_irq_enable(); | 
| Tero Kristo | 9d97140 | 2008-12-12 11:20:05 +0200 | [diff] [blame] | 761 | 	} | 
 | 762 |  | 
 | 763 | 	omap3_save_scratchpad_contents(); | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 764 | 	return ret; | 
| Mark A. Greer | ce229c5 | 2012-03-17 18:22:47 -0700 | [diff] [blame] | 765 |  | 
 | 766 | err3: | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 767 | 	list_for_each_entry_safe(pwrst, tmp, &pwrst_list, node) { | 
 | 768 | 		list_del(&pwrst->node); | 
 | 769 | 		kfree(pwrst); | 
 | 770 | 	} | 
| Mark A. Greer | ce229c5 | 2012-03-17 18:22:47 -0700 | [diff] [blame] | 771 | 	free_irq(omap_prcm_event_to_irq("io"), omap3_pm_init); | 
 | 772 | err2: | 
 | 773 | 	free_irq(omap_prcm_event_to_irq("wkup"), NULL); | 
 | 774 | err1: | 
| Kevin Hilman | 8bd2294 | 2009-05-28 10:56:16 -0700 | [diff] [blame] | 775 | 	return ret; | 
 | 776 | } |