Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 1 | #ifndef __SH_CLOCK_H |
| 2 | #define __SH_CLOCK_H |
| 3 | |
| 4 | #include <linux/list.h> |
| 5 | #include <linux/seq_file.h> |
| 6 | #include <linux/cpufreq.h> |
Paul Mundt | 28085bc | 2010-10-15 16:46:37 +0900 | [diff] [blame] | 7 | #include <linux/types.h> |
| 8 | #include <linux/kref.h> |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 9 | #include <linux/clk.h> |
| 10 | #include <linux/err.h> |
| 11 | |
| 12 | struct clk; |
| 13 | |
Paul Mundt | 28085bc | 2010-10-15 16:46:37 +0900 | [diff] [blame] | 14 | struct clk_mapping { |
| 15 | phys_addr_t phys; |
| 16 | void __iomem *base; |
| 17 | unsigned long len; |
| 18 | struct kref ref; |
| 19 | }; |
| 20 | |
Magnus Damm | 84c36ff | 2012-02-29 22:18:19 +0900 | [diff] [blame] | 21 | struct sh_clk_ops { |
Paul Mundt | 549015c | 2010-11-15 18:48:25 +0900 | [diff] [blame] | 22 | #ifdef CONFIG_SH_CLK_CPG_LEGACY |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 23 | void (*init)(struct clk *clk); |
Paul Mundt | 549015c | 2010-11-15 18:48:25 +0900 | [diff] [blame] | 24 | #endif |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 25 | int (*enable)(struct clk *clk); |
| 26 | void (*disable)(struct clk *clk); |
| 27 | unsigned long (*recalc)(struct clk *clk); |
Paul Mundt | 35a96c7 | 2010-11-15 18:18:32 +0900 | [diff] [blame] | 28 | int (*set_rate)(struct clk *clk, unsigned long rate); |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 29 | int (*set_parent)(struct clk *clk, struct clk *parent); |
| 30 | long (*round_rate)(struct clk *clk, unsigned long rate); |
| 31 | }; |
| 32 | |
Paul Mundt | 1111cc1 | 2012-05-25 15:21:43 +0900 | [diff] [blame] | 33 | #define SH_CLK_DIV_MSK(div) ((1 << (div)) - 1) |
| 34 | #define SH_CLK_DIV4_MSK SH_CLK_DIV_MSK(4) |
| 35 | #define SH_CLK_DIV6_MSK SH_CLK_DIV_MSK(6) |
| 36 | |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 37 | struct clk { |
| 38 | struct list_head node; |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 39 | struct clk *parent; |
Guennadi Liakhovetski | b5272b50 | 2010-07-21 10:13:06 +0000 | [diff] [blame] | 40 | struct clk **parent_table; /* list of parents to */ |
| 41 | unsigned short parent_num; /* choose between */ |
| 42 | unsigned char src_shift; /* source clock field in the */ |
| 43 | unsigned char src_width; /* configuration register */ |
Magnus Damm | 84c36ff | 2012-02-29 22:18:19 +0900 | [diff] [blame] | 44 | struct sh_clk_ops *ops; |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 45 | |
| 46 | struct list_head children; |
| 47 | struct list_head sibling; /* node for children */ |
| 48 | |
| 49 | int usecount; |
| 50 | |
| 51 | unsigned long rate; |
| 52 | unsigned long flags; |
| 53 | |
| 54 | void __iomem *enable_reg; |
| 55 | unsigned int enable_bit; |
Magnus Damm | eda2030 | 2011-12-08 22:58:54 +0900 | [diff] [blame] | 56 | void __iomem *mapped_reg; |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 57 | |
Paul Mundt | 1111cc1 | 2012-05-25 15:21:43 +0900 | [diff] [blame] | 58 | unsigned int div_mask; |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 59 | unsigned long arch_flags; |
| 60 | void *priv; |
Paul Mundt | 28085bc | 2010-10-15 16:46:37 +0900 | [diff] [blame] | 61 | struct clk_mapping *mapping; |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 62 | struct cpufreq_frequency_table *freq_table; |
Paul Mundt | f586903 | 2010-10-15 18:17:35 +0900 | [diff] [blame] | 63 | unsigned int nr_freqs; |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 64 | }; |
| 65 | |
Paul Mundt | 4d6ddb0 | 2012-04-11 12:05:50 +0900 | [diff] [blame] | 66 | #define CLK_ENABLE_ON_INIT BIT(0) |
| 67 | |
| 68 | #define CLK_ENABLE_REG_32BIT BIT(1) /* default access size */ |
| 69 | #define CLK_ENABLE_REG_16BIT BIT(2) |
| 70 | #define CLK_ENABLE_REG_8BIT BIT(3) |
| 71 | |
Paul Mundt | 764f4e4 | 2012-05-25 16:34:48 +0900 | [diff] [blame] | 72 | #define CLK_MASK_DIV_ON_DISABLE BIT(4) |
| 73 | |
Paul Mundt | 4d6ddb0 | 2012-04-11 12:05:50 +0900 | [diff] [blame] | 74 | #define CLK_ENABLE_REG_MASK (CLK_ENABLE_REG_32BIT | \ |
| 75 | CLK_ENABLE_REG_16BIT | \ |
| 76 | CLK_ENABLE_REG_8BIT) |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 77 | |
Paul Mundt | a71ba09 | 2010-05-13 18:42:25 +0900 | [diff] [blame] | 78 | /* drivers/sh/clk.c */ |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 79 | unsigned long followparent_recalc(struct clk *); |
| 80 | void recalculate_root_clocks(void); |
| 81 | void propagate_rate(struct clk *); |
| 82 | int clk_reparent(struct clk *child, struct clk *parent); |
| 83 | int clk_register(struct clk *); |
| 84 | void clk_unregister(struct clk *); |
Magnus Damm | 8b5ee11 | 2010-05-11 13:29:25 +0000 | [diff] [blame] | 85 | void clk_enable_init_clocks(void); |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 86 | |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 87 | struct clk_div_mult_table { |
| 88 | unsigned int *divisors; |
| 89 | unsigned int nr_divisors; |
| 90 | unsigned int *multipliers; |
| 91 | unsigned int nr_multipliers; |
| 92 | }; |
| 93 | |
| 94 | struct cpufreq_frequency_table; |
| 95 | void clk_rate_table_build(struct clk *clk, |
| 96 | struct cpufreq_frequency_table *freq_table, |
| 97 | int nr_freqs, |
| 98 | struct clk_div_mult_table *src_table, |
| 99 | unsigned long *bitmap); |
| 100 | |
| 101 | long clk_rate_table_round(struct clk *clk, |
| 102 | struct cpufreq_frequency_table *freq_table, |
| 103 | unsigned long rate); |
| 104 | |
| 105 | int clk_rate_table_find(struct clk *clk, |
| 106 | struct cpufreq_frequency_table *freq_table, |
| 107 | unsigned long rate); |
| 108 | |
Paul Mundt | 8e122db | 2010-10-15 18:33:24 +0900 | [diff] [blame] | 109 | long clk_rate_div_range_round(struct clk *clk, unsigned int div_min, |
| 110 | unsigned int div_max, unsigned long rate); |
| 111 | |
Kuninori Morimoto | dd2c0ca | 2011-09-19 18:51:13 -0700 | [diff] [blame] | 112 | long clk_rate_mult_range_round(struct clk *clk, unsigned int mult_min, |
| 113 | unsigned int mult_max, unsigned long rate); |
| 114 | |
Guennadi Liakhovetski | 6af26c6 | 2010-11-02 11:27:24 +0000 | [diff] [blame] | 115 | long clk_round_parent(struct clk *clk, unsigned long target, |
| 116 | unsigned long *best_freq, unsigned long *parent_freq, |
| 117 | unsigned int div_min, unsigned int div_max); |
| 118 | |
Paul Mundt | 4d6ddb0 | 2012-04-11 12:05:50 +0900 | [diff] [blame] | 119 | #define SH_CLK_MSTP(_parent, _enable_reg, _enable_bit, _flags) \ |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 120 | { \ |
| 121 | .parent = _parent, \ |
| 122 | .enable_reg = (void __iomem *)_enable_reg, \ |
| 123 | .enable_bit = _enable_bit, \ |
| 124 | .flags = _flags, \ |
| 125 | } |
| 126 | |
Paul Mundt | 4d6ddb0 | 2012-04-11 12:05:50 +0900 | [diff] [blame] | 127 | #define SH_CLK_MSTP32(_p, _r, _b, _f) \ |
| 128 | SH_CLK_MSTP(_p, _r, _b, _f | CLK_ENABLE_REG_32BIT) |
| 129 | |
| 130 | #define SH_CLK_MSTP16(_p, _r, _b, _f) \ |
| 131 | SH_CLK_MSTP(_p, _r, _b, _f | CLK_ENABLE_REG_16BIT) |
| 132 | |
| 133 | #define SH_CLK_MSTP8(_p, _r, _b, _f) \ |
| 134 | SH_CLK_MSTP(_p, _r, _b, _f | CLK_ENABLE_REG_8BIT) |
| 135 | |
| 136 | int sh_clk_mstp_register(struct clk *clks, int nr); |
| 137 | |
| 138 | /* |
| 139 | * MSTP registration never really cared about access size, despite the |
| 140 | * original enable/disable pairs assuming a 32-bit access. Clocks are |
| 141 | * responsible for defining their access sizes either directly or via the |
| 142 | * clock definition wrappers. |
| 143 | */ |
| 144 | static inline int __deprecated sh_clk_mstp32_register(struct clk *clks, int nr) |
| 145 | { |
| 146 | return sh_clk_mstp_register(clks, nr); |
| 147 | } |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 148 | |
| 149 | #define SH_CLK_DIV4(_parent, _reg, _shift, _div_bitmap, _flags) \ |
| 150 | { \ |
| 151 | .parent = _parent, \ |
| 152 | .enable_reg = (void __iomem *)_reg, \ |
| 153 | .enable_bit = _shift, \ |
| 154 | .arch_flags = _div_bitmap, \ |
Paul Mundt | 1111cc1 | 2012-05-25 15:21:43 +0900 | [diff] [blame] | 155 | .div_mask = SH_CLK_DIV4_MSK, \ |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 156 | .flags = _flags, \ |
| 157 | } |
| 158 | |
Paul Mundt | a60977a | 2012-05-25 14:59:26 +0900 | [diff] [blame] | 159 | struct clk_div_table { |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 160 | struct clk_div_mult_table *div_mult_table; |
| 161 | void (*kick)(struct clk *clk); |
| 162 | }; |
| 163 | |
Paul Mundt | a60977a | 2012-05-25 14:59:26 +0900 | [diff] [blame] | 164 | #define clk_div4_table clk_div_table |
| 165 | |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 166 | int sh_clk_div4_register(struct clk *clks, int nr, |
| 167 | struct clk_div4_table *table); |
| 168 | int sh_clk_div4_enable_register(struct clk *clks, int nr, |
| 169 | struct clk_div4_table *table); |
| 170 | int sh_clk_div4_reparent_register(struct clk *clks, int nr, |
| 171 | struct clk_div4_table *table); |
| 172 | |
Kuninori Morimoto | 56242a1 | 2011-11-21 21:33:18 -0800 | [diff] [blame] | 173 | #define SH_CLK_DIV6_EXT(_reg, _flags, _parents, \ |
Guennadi Liakhovetski | b3dd51a | 2010-07-21 10:13:10 +0000 | [diff] [blame] | 174 | _num_parents, _src_shift, _src_width) \ |
| 175 | { \ |
Guennadi Liakhovetski | b3dd51a | 2010-07-21 10:13:10 +0000 | [diff] [blame] | 176 | .enable_reg = (void __iomem *)_reg, \ |
Paul Mundt | 75f5f8a | 2012-05-25 15:26:01 +0900 | [diff] [blame] | 177 | .enable_bit = 0, /* unused */ \ |
Paul Mundt | 764f4e4 | 2012-05-25 16:34:48 +0900 | [diff] [blame] | 178 | .flags = _flags | CLK_MASK_DIV_ON_DISABLE, \ |
Paul Mundt | 1111cc1 | 2012-05-25 15:21:43 +0900 | [diff] [blame] | 179 | .div_mask = SH_CLK_DIV6_MSK, \ |
Guennadi Liakhovetski | b3dd51a | 2010-07-21 10:13:10 +0000 | [diff] [blame] | 180 | .parent_table = _parents, \ |
| 181 | .parent_num = _num_parents, \ |
| 182 | .src_shift = _src_shift, \ |
| 183 | .src_width = _src_width, \ |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 184 | } |
| 185 | |
Guennadi Liakhovetski | b3dd51a | 2010-07-21 10:13:10 +0000 | [diff] [blame] | 186 | #define SH_CLK_DIV6(_parent, _reg, _flags) \ |
Kuninori Morimoto | 56242a1 | 2011-11-21 21:33:18 -0800 | [diff] [blame] | 187 | { \ |
| 188 | .parent = _parent, \ |
| 189 | .enable_reg = (void __iomem *)_reg, \ |
Paul Mundt | 75f5f8a | 2012-05-25 15:26:01 +0900 | [diff] [blame] | 190 | .enable_bit = 0, /* unused */ \ |
Paul Mundt | 1111cc1 | 2012-05-25 15:21:43 +0900 | [diff] [blame] | 191 | .div_mask = SH_CLK_DIV6_MSK, \ |
Paul Mundt | 764f4e4 | 2012-05-25 16:34:48 +0900 | [diff] [blame] | 192 | .flags = _flags | CLK_MASK_DIV_ON_DISABLE, \ |
Kuninori Morimoto | 56242a1 | 2011-11-21 21:33:18 -0800 | [diff] [blame] | 193 | } |
Guennadi Liakhovetski | b3dd51a | 2010-07-21 10:13:10 +0000 | [diff] [blame] | 194 | |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 195 | int sh_clk_div6_register(struct clk *clks, int nr); |
Guennadi Liakhovetski | b3dd51a | 2010-07-21 10:13:10 +0000 | [diff] [blame] | 196 | int sh_clk_div6_reparent_register(struct clk *clks, int nr); |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 197 | |
Kuninori Morimoto | 1522043 | 2011-07-06 02:54:11 +0000 | [diff] [blame] | 198 | #define CLKDEV_CON_ID(_id, _clk) { .con_id = _id, .clk = _clk } |
| 199 | #define CLKDEV_DEV_ID(_id, _clk) { .dev_id = _id, .clk = _clk } |
| 200 | #define CLKDEV_ICK_ID(_cid, _did, _clk) { .con_id = _cid, .dev_id = _did, .clk = _clk } |
| 201 | |
Kuninori Morimoto | 9d626ec | 2012-10-30 20:06:55 -0700 | [diff] [blame] | 202 | /* .enable_reg will be updated to .mapping on sh_clk_fsidiv_register() */ |
| 203 | #define SH_CLK_FSIDIV(_reg, _parent) \ |
| 204 | { \ |
| 205 | .enable_reg = (void __iomem *)_reg, \ |
| 206 | .parent = _parent, \ |
| 207 | } |
| 208 | |
| 209 | int sh_clk_fsidiv_register(struct clk *clks, int nr); |
| 210 | |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 211 | #endif /* __SH_CLOCK_H */ |