Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 Sascha Hauer, Pengutronix <s.hauer@pengutronix.de> |
| 3 | * Copyright (C) 2011 Richard Zhao, Linaro <richard.zhao@linaro.org> |
| 4 | * Copyright (C) 2011-2012 Mike Turquette, Linaro Ltd <mturquette@linaro.org> |
| 5 | * |
| 6 | * This program is free software; you can redistribute it and/or modify |
| 7 | * it under the terms of the GNU General Public License version 2 as |
| 8 | * published by the Free Software Foundation. |
| 9 | * |
| 10 | * Simple multiplexer clock implementation |
| 11 | */ |
| 12 | |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 13 | #include <linux/clk-provider.h> |
| 14 | #include <linux/module.h> |
| 15 | #include <linux/slab.h> |
| 16 | #include <linux/io.h> |
| 17 | #include <linux/err.h> |
| 18 | |
| 19 | /* |
| 20 | * DOC: basic adjustable multiplexer clock that cannot gate |
| 21 | * |
| 22 | * Traits of this clock: |
| 23 | * prepare - clk_prepare only ensures that parents are prepared |
| 24 | * enable - clk_enable only ensures that parents are enabled |
| 25 | * rate - rate is only affected by parent switching. No clk_set_rate support |
| 26 | * parent - parent is adjustable through clk_set_parent |
| 27 | */ |
| 28 | |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 29 | static u8 clk_mux_get_parent(struct clk_hw *hw) |
| 30 | { |
| 31 | struct clk_mux *mux = to_clk_mux(hw); |
Stephen Boyd | 497295a | 2015-06-25 16:53:23 -0700 | [diff] [blame] | 32 | int num_parents = clk_hw_get_num_parents(hw); |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 33 | u32 val; |
| 34 | |
| 35 | /* |
| 36 | * FIXME need a mux-specific flag to determine if val is bitwise or numeric |
| 37 | * e.g. sys_clkin_ck's clksel field is 3 bits wide, but ranges from 0x1 |
| 38 | * to 0x7 (index starts at one) |
| 39 | * OTOH, pmd_trace_clk_mux_ck uses a separate bit for each clock, so |
| 40 | * val = 0x4 really means "bit 2, index starts at bit 0" |
| 41 | */ |
Gerhard Sittig | aa514ce | 2013-07-22 14:14:40 +0200 | [diff] [blame] | 42 | val = clk_readl(mux->reg) >> mux->shift; |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 43 | val &= mux->mask; |
| 44 | |
| 45 | if (mux->table) { |
| 46 | int i; |
| 47 | |
| 48 | for (i = 0; i < num_parents; i++) |
| 49 | if (mux->table[i] == val) |
| 50 | return i; |
| 51 | return -EINVAL; |
| 52 | } |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 53 | |
| 54 | if (val && (mux->flags & CLK_MUX_INDEX_BIT)) |
| 55 | val = ffs(val) - 1; |
| 56 | |
| 57 | if (val && (mux->flags & CLK_MUX_INDEX_ONE)) |
| 58 | val--; |
| 59 | |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 60 | if (val >= num_parents) |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 61 | return -EINVAL; |
| 62 | |
| 63 | return val; |
| 64 | } |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 65 | |
| 66 | static int clk_mux_set_parent(struct clk_hw *hw, u8 index) |
| 67 | { |
| 68 | struct clk_mux *mux = to_clk_mux(hw); |
| 69 | u32 val; |
| 70 | unsigned long flags = 0; |
| 71 | |
Masahiro Yamada | 3837bd2 | 2015-11-05 17:59:39 +0900 | [diff] [blame] | 72 | if (mux->table) { |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 73 | index = mux->table[index]; |
Masahiro Yamada | 3837bd2 | 2015-11-05 17:59:39 +0900 | [diff] [blame] | 74 | } else { |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 75 | if (mux->flags & CLK_MUX_INDEX_BIT) |
Hans de Goede | 6793b3c | 2014-11-19 14:48:59 +0100 | [diff] [blame] | 76 | index = 1 << index; |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 77 | |
| 78 | if (mux->flags & CLK_MUX_INDEX_ONE) |
| 79 | index++; |
| 80 | } |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 81 | |
| 82 | if (mux->lock) |
| 83 | spin_lock_irqsave(mux->lock, flags); |
Stephen Boyd | 661e218 | 2015-07-24 12:21:12 -0700 | [diff] [blame] | 84 | else |
| 85 | __acquire(mux->lock); |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 86 | |
Haojian Zhuang | ba492e9 | 2013-06-08 22:47:17 +0800 | [diff] [blame] | 87 | if (mux->flags & CLK_MUX_HIWORD_MASK) { |
| 88 | val = mux->mask << (mux->shift + 16); |
| 89 | } else { |
Gerhard Sittig | aa514ce | 2013-07-22 14:14:40 +0200 | [diff] [blame] | 90 | val = clk_readl(mux->reg); |
Haojian Zhuang | ba492e9 | 2013-06-08 22:47:17 +0800 | [diff] [blame] | 91 | val &= ~(mux->mask << mux->shift); |
| 92 | } |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 93 | val |= index << mux->shift; |
Gerhard Sittig | aa514ce | 2013-07-22 14:14:40 +0200 | [diff] [blame] | 94 | clk_writel(val, mux->reg); |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 95 | |
| 96 | if (mux->lock) |
| 97 | spin_unlock_irqrestore(mux->lock, flags); |
Stephen Boyd | 661e218 | 2015-07-24 12:21:12 -0700 | [diff] [blame] | 98 | else |
| 99 | __release(mux->lock); |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 100 | |
| 101 | return 0; |
| 102 | } |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 103 | |
Shawn Guo | 822c250 | 2012-03-27 15:23:22 +0800 | [diff] [blame] | 104 | const struct clk_ops clk_mux_ops = { |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 105 | .get_parent = clk_mux_get_parent, |
| 106 | .set_parent = clk_mux_set_parent, |
James Hogan | e366fdd | 2013-07-29 12:25:02 +0100 | [diff] [blame] | 107 | .determine_rate = __clk_mux_determine_rate, |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 108 | }; |
| 109 | EXPORT_SYMBOL_GPL(clk_mux_ops); |
| 110 | |
Tomasz Figa | c57acd1 | 2013-07-23 01:49:18 +0200 | [diff] [blame] | 111 | const struct clk_ops clk_mux_ro_ops = { |
| 112 | .get_parent = clk_mux_get_parent, |
| 113 | }; |
| 114 | EXPORT_SYMBOL_GPL(clk_mux_ro_ops); |
| 115 | |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 116 | struct clk_hw *clk_hw_register_mux_table(struct device *dev, const char *name, |
Sascha Hauer | 2893c37 | 2015-03-31 20:16:52 +0200 | [diff] [blame] | 117 | const char * const *parent_names, u8 num_parents, |
| 118 | unsigned long flags, |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 119 | void __iomem *reg, u8 shift, u32 mask, |
| 120 | u8 clk_mux_flags, u32 *table, spinlock_t *lock) |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 121 | { |
| 122 | struct clk_mux *mux; |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 123 | struct clk_hw *hw; |
Saravana Kannan | 0197b3e | 2012-04-25 22:58:56 -0700 | [diff] [blame] | 124 | struct clk_init_data init; |
Haojian Zhuang | ba492e9 | 2013-06-08 22:47:17 +0800 | [diff] [blame] | 125 | u8 width = 0; |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 126 | int ret; |
Haojian Zhuang | ba492e9 | 2013-06-08 22:47:17 +0800 | [diff] [blame] | 127 | |
| 128 | if (clk_mux_flags & CLK_MUX_HIWORD_MASK) { |
| 129 | width = fls(mask) - ffs(mask) + 1; |
| 130 | if (width + shift > 16) { |
| 131 | pr_err("mux value exceeds LOWORD field\n"); |
| 132 | return ERR_PTR(-EINVAL); |
| 133 | } |
| 134 | } |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 135 | |
Mike Turquette | 27d5459 | 2012-03-26 17:51:03 -0700 | [diff] [blame] | 136 | /* allocate the mux */ |
Shawn Guo | 10363b5 | 2012-03-27 15:23:20 +0800 | [diff] [blame] | 137 | mux = kzalloc(sizeof(struct clk_mux), GFP_KERNEL); |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 138 | if (!mux) { |
| 139 | pr_err("%s: could not allocate mux clk\n", __func__); |
| 140 | return ERR_PTR(-ENOMEM); |
| 141 | } |
| 142 | |
Saravana Kannan | 0197b3e | 2012-04-25 22:58:56 -0700 | [diff] [blame] | 143 | init.name = name; |
Tomasz Figa | c57acd1 | 2013-07-23 01:49:18 +0200 | [diff] [blame] | 144 | if (clk_mux_flags & CLK_MUX_READ_ONLY) |
| 145 | init.ops = &clk_mux_ro_ops; |
| 146 | else |
| 147 | init.ops = &clk_mux_ops; |
Rajendra Nayak | f7d8caa | 2012-06-01 14:02:47 +0530 | [diff] [blame] | 148 | init.flags = flags | CLK_IS_BASIC; |
Saravana Kannan | 0197b3e | 2012-04-25 22:58:56 -0700 | [diff] [blame] | 149 | init.parent_names = parent_names; |
| 150 | init.num_parents = num_parents; |
| 151 | |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 152 | /* struct clk_mux assignments */ |
| 153 | mux->reg = reg; |
| 154 | mux->shift = shift; |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 155 | mux->mask = mask; |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 156 | mux->flags = clk_mux_flags; |
| 157 | mux->lock = lock; |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 158 | mux->table = table; |
Mike Turquette | 31df9db | 2012-05-06 18:48:11 -0700 | [diff] [blame] | 159 | mux->hw.init = &init; |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 160 | |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 161 | hw = &mux->hw; |
| 162 | ret = clk_hw_register(dev, hw); |
| 163 | if (ret) { |
Mike Turquette | 27d5459 | 2012-03-26 17:51:03 -0700 | [diff] [blame] | 164 | kfree(mux); |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 165 | hw = ERR_PTR(ret); |
| 166 | } |
Mike Turquette | 27d5459 | 2012-03-26 17:51:03 -0700 | [diff] [blame] | 167 | |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 168 | return hw; |
| 169 | } |
| 170 | EXPORT_SYMBOL_GPL(clk_hw_register_mux_table); |
| 171 | |
| 172 | struct clk *clk_register_mux_table(struct device *dev, const char *name, |
| 173 | const char * const *parent_names, u8 num_parents, |
| 174 | unsigned long flags, |
| 175 | void __iomem *reg, u8 shift, u32 mask, |
| 176 | u8 clk_mux_flags, u32 *table, spinlock_t *lock) |
| 177 | { |
| 178 | struct clk_hw *hw; |
| 179 | |
| 180 | hw = clk_hw_register_mux_table(dev, name, parent_names, num_parents, |
| 181 | flags, reg, shift, mask, clk_mux_flags, |
| 182 | table, lock); |
| 183 | if (IS_ERR(hw)) |
| 184 | return ERR_CAST(hw); |
| 185 | return hw->clk; |
Mike Turquette | 9d9f78e | 2012-03-15 23:11:20 -0700 | [diff] [blame] | 186 | } |
Mike Turquette | 5cfe10b | 2013-08-15 19:06:29 -0700 | [diff] [blame] | 187 | EXPORT_SYMBOL_GPL(clk_register_mux_table); |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 188 | |
| 189 | struct clk *clk_register_mux(struct device *dev, const char *name, |
Sascha Hauer | 2893c37 | 2015-03-31 20:16:52 +0200 | [diff] [blame] | 190 | const char * const *parent_names, u8 num_parents, |
| 191 | unsigned long flags, |
Peter De Schrijver | ce4f331 | 2013-03-22 14:07:53 +0200 | [diff] [blame] | 192 | void __iomem *reg, u8 shift, u8 width, |
| 193 | u8 clk_mux_flags, spinlock_t *lock) |
| 194 | { |
| 195 | u32 mask = BIT(width) - 1; |
| 196 | |
| 197 | return clk_register_mux_table(dev, name, parent_names, num_parents, |
| 198 | flags, reg, shift, mask, clk_mux_flags, |
| 199 | NULL, lock); |
| 200 | } |
Mike Turquette | 5cfe10b | 2013-08-15 19:06:29 -0700 | [diff] [blame] | 201 | EXPORT_SYMBOL_GPL(clk_register_mux); |
Krzysztof Kozlowski | 4e3c021 | 2015-01-05 10:52:40 +0100 | [diff] [blame] | 202 | |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 203 | struct clk_hw *clk_hw_register_mux(struct device *dev, const char *name, |
| 204 | const char * const *parent_names, u8 num_parents, |
| 205 | unsigned long flags, |
| 206 | void __iomem *reg, u8 shift, u8 width, |
| 207 | u8 clk_mux_flags, spinlock_t *lock) |
| 208 | { |
| 209 | u32 mask = BIT(width) - 1; |
| 210 | |
| 211 | return clk_hw_register_mux_table(dev, name, parent_names, num_parents, |
| 212 | flags, reg, shift, mask, clk_mux_flags, |
| 213 | NULL, lock); |
| 214 | } |
| 215 | EXPORT_SYMBOL_GPL(clk_hw_register_mux); |
| 216 | |
Krzysztof Kozlowski | 4e3c021 | 2015-01-05 10:52:40 +0100 | [diff] [blame] | 217 | void clk_unregister_mux(struct clk *clk) |
| 218 | { |
| 219 | struct clk_mux *mux; |
| 220 | struct clk_hw *hw; |
| 221 | |
| 222 | hw = __clk_get_hw(clk); |
| 223 | if (!hw) |
| 224 | return; |
| 225 | |
| 226 | mux = to_clk_mux(hw); |
| 227 | |
| 228 | clk_unregister(clk); |
| 229 | kfree(mux); |
| 230 | } |
| 231 | EXPORT_SYMBOL_GPL(clk_unregister_mux); |
Stephen Boyd | 264b317 | 2016-02-07 00:05:48 -0800 | [diff] [blame] | 232 | |
| 233 | void clk_hw_unregister_mux(struct clk_hw *hw) |
| 234 | { |
| 235 | struct clk_mux *mux; |
| 236 | |
| 237 | mux = to_clk_mux(hw); |
| 238 | |
| 239 | clk_hw_unregister(hw); |
| 240 | kfree(mux); |
| 241 | } |
| 242 | EXPORT_SYMBOL_GPL(clk_hw_unregister_mux); |