Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 1 | #ifndef __SH_CLOCK_H |
| 2 | #define __SH_CLOCK_H |
| 3 | |
| 4 | #include <linux/list.h> |
| 5 | #include <linux/seq_file.h> |
| 6 | #include <linux/cpufreq.h> |
Paul Mundt | 28085bc | 2010-10-15 16:46:37 +0900 | [diff] [blame] | 7 | #include <linux/types.h> |
| 8 | #include <linux/kref.h> |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 9 | #include <linux/clk.h> |
| 10 | #include <linux/err.h> |
| 11 | |
| 12 | struct clk; |
| 13 | |
Paul Mundt | 28085bc | 2010-10-15 16:46:37 +0900 | [diff] [blame] | 14 | struct clk_mapping { |
| 15 | phys_addr_t phys; |
| 16 | void __iomem *base; |
| 17 | unsigned long len; |
| 18 | struct kref ref; |
| 19 | }; |
| 20 | |
Magnus Damm | e348282 | 2012-02-29 22:16:13 +0900 | [diff] [blame] | 21 | |
Magnus Damm | 84c36ff | 2012-02-29 22:18:19 +0900 | [diff] [blame] | 22 | struct sh_clk_ops { |
Paul Mundt | 549015c | 2010-11-15 18:48:25 +0900 | [diff] [blame] | 23 | #ifdef CONFIG_SH_CLK_CPG_LEGACY |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 24 | void (*init)(struct clk *clk); |
Paul Mundt | 549015c | 2010-11-15 18:48:25 +0900 | [diff] [blame] | 25 | #endif |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 26 | int (*enable)(struct clk *clk); |
| 27 | void (*disable)(struct clk *clk); |
| 28 | unsigned long (*recalc)(struct clk *clk); |
Paul Mundt | 35a96c7 | 2010-11-15 18:18:32 +0900 | [diff] [blame] | 29 | int (*set_rate)(struct clk *clk, unsigned long rate); |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 30 | int (*set_parent)(struct clk *clk, struct clk *parent); |
| 31 | long (*round_rate)(struct clk *clk, unsigned long rate); |
| 32 | }; |
| 33 | |
| 34 | struct clk { |
| 35 | struct list_head node; |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 36 | struct clk *parent; |
Guennadi Liakhovetski | b5272b50 | 2010-07-21 10:13:06 +0000 | [diff] [blame] | 37 | struct clk **parent_table; /* list of parents to */ |
| 38 | unsigned short parent_num; /* choose between */ |
| 39 | unsigned char src_shift; /* source clock field in the */ |
| 40 | unsigned char src_width; /* configuration register */ |
Magnus Damm | 84c36ff | 2012-02-29 22:18:19 +0900 | [diff] [blame] | 41 | struct sh_clk_ops *ops; |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 42 | |
| 43 | struct list_head children; |
| 44 | struct list_head sibling; /* node for children */ |
| 45 | |
| 46 | int usecount; |
| 47 | |
| 48 | unsigned long rate; |
| 49 | unsigned long flags; |
| 50 | |
| 51 | void __iomem *enable_reg; |
| 52 | unsigned int enable_bit; |
Magnus Damm | eda2030 | 2011-12-08 22:58:54 +0900 | [diff] [blame] | 53 | void __iomem *mapped_reg; |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 54 | |
| 55 | unsigned long arch_flags; |
| 56 | void *priv; |
Paul Mundt | 28085bc | 2010-10-15 16:46:37 +0900 | [diff] [blame] | 57 | struct clk_mapping *mapping; |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 58 | struct cpufreq_frequency_table *freq_table; |
Paul Mundt | f586903 | 2010-10-15 18:17:35 +0900 | [diff] [blame] | 59 | unsigned int nr_freqs; |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 60 | }; |
| 61 | |
Paul Mundt | 4d6ddb0 | 2012-04-11 12:05:50 +0900 | [diff] [blame] | 62 | #define CLK_ENABLE_ON_INIT BIT(0) |
| 63 | |
| 64 | #define CLK_ENABLE_REG_32BIT BIT(1) /* default access size */ |
| 65 | #define CLK_ENABLE_REG_16BIT BIT(2) |
| 66 | #define CLK_ENABLE_REG_8BIT BIT(3) |
| 67 | |
| 68 | #define CLK_ENABLE_REG_MASK (CLK_ENABLE_REG_32BIT | \ |
| 69 | CLK_ENABLE_REG_16BIT | \ |
| 70 | CLK_ENABLE_REG_8BIT) |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 71 | |
Paul Mundt | a71ba09 | 2010-05-13 18:42:25 +0900 | [diff] [blame] | 72 | /* drivers/sh/clk.c */ |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 73 | unsigned long followparent_recalc(struct clk *); |
| 74 | void recalculate_root_clocks(void); |
| 75 | void propagate_rate(struct clk *); |
| 76 | int clk_reparent(struct clk *child, struct clk *parent); |
| 77 | int clk_register(struct clk *); |
| 78 | void clk_unregister(struct clk *); |
Magnus Damm | 8b5ee11 | 2010-05-11 13:29:25 +0000 | [diff] [blame] | 79 | void clk_enable_init_clocks(void); |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 80 | |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 81 | struct clk_div_mult_table { |
| 82 | unsigned int *divisors; |
| 83 | unsigned int nr_divisors; |
| 84 | unsigned int *multipliers; |
| 85 | unsigned int nr_multipliers; |
| 86 | }; |
| 87 | |
| 88 | struct cpufreq_frequency_table; |
| 89 | void clk_rate_table_build(struct clk *clk, |
| 90 | struct cpufreq_frequency_table *freq_table, |
| 91 | int nr_freqs, |
| 92 | struct clk_div_mult_table *src_table, |
| 93 | unsigned long *bitmap); |
| 94 | |
| 95 | long clk_rate_table_round(struct clk *clk, |
| 96 | struct cpufreq_frequency_table *freq_table, |
| 97 | unsigned long rate); |
| 98 | |
| 99 | int clk_rate_table_find(struct clk *clk, |
| 100 | struct cpufreq_frequency_table *freq_table, |
| 101 | unsigned long rate); |
| 102 | |
Paul Mundt | 8e122db | 2010-10-15 18:33:24 +0900 | [diff] [blame] | 103 | long clk_rate_div_range_round(struct clk *clk, unsigned int div_min, |
| 104 | unsigned int div_max, unsigned long rate); |
| 105 | |
Kuninori Morimoto | dd2c0ca | 2011-09-19 18:51:13 -0700 | [diff] [blame] | 106 | long clk_rate_mult_range_round(struct clk *clk, unsigned int mult_min, |
| 107 | unsigned int mult_max, unsigned long rate); |
| 108 | |
Guennadi Liakhovetski | 6af26c6 | 2010-11-02 11:27:24 +0000 | [diff] [blame] | 109 | long clk_round_parent(struct clk *clk, unsigned long target, |
| 110 | unsigned long *best_freq, unsigned long *parent_freq, |
| 111 | unsigned int div_min, unsigned int div_max); |
| 112 | |
Paul Mundt | 4d6ddb0 | 2012-04-11 12:05:50 +0900 | [diff] [blame] | 113 | #define SH_CLK_MSTP(_parent, _enable_reg, _enable_bit, _flags) \ |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 114 | { \ |
| 115 | .parent = _parent, \ |
| 116 | .enable_reg = (void __iomem *)_enable_reg, \ |
| 117 | .enable_bit = _enable_bit, \ |
| 118 | .flags = _flags, \ |
| 119 | } |
| 120 | |
Paul Mundt | 4d6ddb0 | 2012-04-11 12:05:50 +0900 | [diff] [blame] | 121 | #define SH_CLK_MSTP32(_p, _r, _b, _f) \ |
| 122 | SH_CLK_MSTP(_p, _r, _b, _f | CLK_ENABLE_REG_32BIT) |
| 123 | |
| 124 | #define SH_CLK_MSTP16(_p, _r, _b, _f) \ |
| 125 | SH_CLK_MSTP(_p, _r, _b, _f | CLK_ENABLE_REG_16BIT) |
| 126 | |
| 127 | #define SH_CLK_MSTP8(_p, _r, _b, _f) \ |
| 128 | SH_CLK_MSTP(_p, _r, _b, _f | CLK_ENABLE_REG_8BIT) |
| 129 | |
| 130 | int sh_clk_mstp_register(struct clk *clks, int nr); |
| 131 | |
| 132 | /* |
| 133 | * MSTP registration never really cared about access size, despite the |
| 134 | * original enable/disable pairs assuming a 32-bit access. Clocks are |
| 135 | * responsible for defining their access sizes either directly or via the |
| 136 | * clock definition wrappers. |
| 137 | */ |
| 138 | static inline int __deprecated sh_clk_mstp32_register(struct clk *clks, int nr) |
| 139 | { |
| 140 | return sh_clk_mstp_register(clks, nr); |
| 141 | } |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 142 | |
| 143 | #define SH_CLK_DIV4(_parent, _reg, _shift, _div_bitmap, _flags) \ |
| 144 | { \ |
| 145 | .parent = _parent, \ |
| 146 | .enable_reg = (void __iomem *)_reg, \ |
| 147 | .enable_bit = _shift, \ |
| 148 | .arch_flags = _div_bitmap, \ |
| 149 | .flags = _flags, \ |
| 150 | } |
| 151 | |
| 152 | struct clk_div4_table { |
| 153 | struct clk_div_mult_table *div_mult_table; |
| 154 | void (*kick)(struct clk *clk); |
| 155 | }; |
| 156 | |
| 157 | int sh_clk_div4_register(struct clk *clks, int nr, |
| 158 | struct clk_div4_table *table); |
| 159 | int sh_clk_div4_enable_register(struct clk *clks, int nr, |
| 160 | struct clk_div4_table *table); |
| 161 | int sh_clk_div4_reparent_register(struct clk *clks, int nr, |
| 162 | struct clk_div4_table *table); |
| 163 | |
Kuninori Morimoto | 56242a1 | 2011-11-21 21:33:18 -0800 | [diff] [blame] | 164 | #define SH_CLK_DIV6_EXT(_reg, _flags, _parents, \ |
Guennadi Liakhovetski | b3dd51a | 2010-07-21 10:13:10 +0000 | [diff] [blame] | 165 | _num_parents, _src_shift, _src_width) \ |
| 166 | { \ |
Guennadi Liakhovetski | b3dd51a | 2010-07-21 10:13:10 +0000 | [diff] [blame] | 167 | .enable_reg = (void __iomem *)_reg, \ |
| 168 | .flags = _flags, \ |
| 169 | .parent_table = _parents, \ |
| 170 | .parent_num = _num_parents, \ |
| 171 | .src_shift = _src_shift, \ |
| 172 | .src_width = _src_width, \ |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 173 | } |
| 174 | |
Guennadi Liakhovetski | b3dd51a | 2010-07-21 10:13:10 +0000 | [diff] [blame] | 175 | #define SH_CLK_DIV6(_parent, _reg, _flags) \ |
Kuninori Morimoto | 56242a1 | 2011-11-21 21:33:18 -0800 | [diff] [blame] | 176 | { \ |
| 177 | .parent = _parent, \ |
| 178 | .enable_reg = (void __iomem *)_reg, \ |
| 179 | .flags = _flags, \ |
| 180 | } |
Guennadi Liakhovetski | b3dd51a | 2010-07-21 10:13:10 +0000 | [diff] [blame] | 181 | |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 182 | int sh_clk_div6_register(struct clk *clks, int nr); |
Guennadi Liakhovetski | b3dd51a | 2010-07-21 10:13:10 +0000 | [diff] [blame] | 183 | int sh_clk_div6_reparent_register(struct clk *clks, int nr); |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 184 | |
Kuninori Morimoto | 1522043 | 2011-07-06 02:54:11 +0000 | [diff] [blame] | 185 | #define CLKDEV_CON_ID(_id, _clk) { .con_id = _id, .clk = _clk } |
| 186 | #define CLKDEV_DEV_ID(_id, _clk) { .dev_id = _id, .clk = _clk } |
| 187 | #define CLKDEV_ICK_ID(_cid, _did, _clk) { .con_id = _cid, .dev_id = _did, .clk = _clk } |
| 188 | |
Magnus Damm | d28bdf0 | 2010-05-11 13:29:17 +0000 | [diff] [blame] | 189 | #endif /* __SH_CLOCK_H */ |