Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 1 | /* |
| 2 | * mmp mix(div and mux) clock operation source file |
| 3 | * |
| 4 | * Copyright (C) 2014 Marvell |
| 5 | * Chao Xie <chao.xie@marvell.com> |
| 6 | * |
| 7 | * This file is licensed under the terms of the GNU General Public |
| 8 | * License version 2. This program is licensed "as is" without any |
| 9 | * warranty of any kind, whether express or implied. |
| 10 | */ |
| 11 | |
| 12 | #include <linux/clk-provider.h> |
| 13 | #include <linux/slab.h> |
| 14 | #include <linux/io.h> |
| 15 | #include <linux/err.h> |
| 16 | |
| 17 | #include "clk.h" |
| 18 | |
| 19 | /* |
| 20 | * The mix clock is a clock combined mux and div type clock. |
| 21 | * Because the div field and mux field need to be set at same |
| 22 | * time, we can not divide it into 2 types of clock |
| 23 | */ |
| 24 | |
| 25 | #define to_clk_mix(hw) container_of(hw, struct mmp_clk_mix, hw) |
| 26 | |
| 27 | static unsigned int _get_maxdiv(struct mmp_clk_mix *mix) |
| 28 | { |
| 29 | unsigned int div_mask = (1 << mix->reg_info.width_div) - 1; |
| 30 | unsigned int maxdiv = 0; |
| 31 | struct clk_div_table *clkt; |
| 32 | |
| 33 | if (mix->div_flags & CLK_DIVIDER_ONE_BASED) |
| 34 | return div_mask; |
| 35 | if (mix->div_flags & CLK_DIVIDER_POWER_OF_TWO) |
| 36 | return 1 << div_mask; |
| 37 | if (mix->div_table) { |
| 38 | for (clkt = mix->div_table; clkt->div; clkt++) |
| 39 | if (clkt->div > maxdiv) |
| 40 | maxdiv = clkt->div; |
| 41 | return maxdiv; |
| 42 | } |
| 43 | return div_mask + 1; |
| 44 | } |
| 45 | |
| 46 | static unsigned int _get_div(struct mmp_clk_mix *mix, unsigned int val) |
| 47 | { |
| 48 | struct clk_div_table *clkt; |
| 49 | |
| 50 | if (mix->div_flags & CLK_DIVIDER_ONE_BASED) |
| 51 | return val; |
| 52 | if (mix->div_flags & CLK_DIVIDER_POWER_OF_TWO) |
| 53 | return 1 << val; |
| 54 | if (mix->div_table) { |
| 55 | for (clkt = mix->div_table; clkt->div; clkt++) |
| 56 | if (clkt->val == val) |
| 57 | return clkt->div; |
| 58 | if (clkt->div == 0) |
| 59 | return 0; |
| 60 | } |
| 61 | return val + 1; |
| 62 | } |
| 63 | |
| 64 | static unsigned int _get_mux(struct mmp_clk_mix *mix, unsigned int val) |
| 65 | { |
Stephen Boyd | 497295a | 2015-06-25 16:53:23 -0700 | [diff] [blame] | 66 | int num_parents = clk_hw_get_num_parents(&mix->hw); |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 67 | int i; |
| 68 | |
| 69 | if (mix->mux_flags & CLK_MUX_INDEX_BIT) |
| 70 | return ffs(val) - 1; |
| 71 | if (mix->mux_flags & CLK_MUX_INDEX_ONE) |
| 72 | return val - 1; |
| 73 | if (mix->mux_table) { |
| 74 | for (i = 0; i < num_parents; i++) |
| 75 | if (mix->mux_table[i] == val) |
| 76 | return i; |
| 77 | if (i == num_parents) |
| 78 | return 0; |
| 79 | } |
| 80 | |
| 81 | return val; |
| 82 | } |
| 83 | static unsigned int _get_div_val(struct mmp_clk_mix *mix, unsigned int div) |
| 84 | { |
| 85 | struct clk_div_table *clkt; |
| 86 | |
| 87 | if (mix->div_flags & CLK_DIVIDER_ONE_BASED) |
| 88 | return div; |
| 89 | if (mix->div_flags & CLK_DIVIDER_POWER_OF_TWO) |
| 90 | return __ffs(div); |
| 91 | if (mix->div_table) { |
| 92 | for (clkt = mix->div_table; clkt->div; clkt++) |
| 93 | if (clkt->div == div) |
| 94 | return clkt->val; |
| 95 | if (clkt->div == 0) |
| 96 | return 0; |
| 97 | } |
| 98 | |
| 99 | return div - 1; |
| 100 | } |
| 101 | |
| 102 | static unsigned int _get_mux_val(struct mmp_clk_mix *mix, unsigned int mux) |
| 103 | { |
| 104 | if (mix->mux_table) |
| 105 | return mix->mux_table[mux]; |
| 106 | |
| 107 | return mux; |
| 108 | } |
| 109 | |
| 110 | static void _filter_clk_table(struct mmp_clk_mix *mix, |
| 111 | struct mmp_clk_mix_clk_table *table, |
| 112 | unsigned int table_size) |
| 113 | { |
| 114 | int i; |
| 115 | struct mmp_clk_mix_clk_table *item; |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 116 | struct clk_hw *parent, *hw; |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 117 | unsigned long parent_rate; |
| 118 | |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 119 | hw = &mix->hw; |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 120 | |
| 121 | for (i = 0; i < table_size; i++) { |
| 122 | item = &table[i]; |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 123 | parent = clk_hw_get_parent_by_index(hw, item->parent_index); |
| 124 | parent_rate = clk_hw_get_rate(parent); |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 125 | if (parent_rate % item->rate) { |
| 126 | item->valid = 0; |
| 127 | } else { |
| 128 | item->divisor = parent_rate / item->rate; |
| 129 | item->valid = 1; |
| 130 | } |
| 131 | } |
| 132 | } |
| 133 | |
| 134 | static int _set_rate(struct mmp_clk_mix *mix, u32 mux_val, u32 div_val, |
| 135 | unsigned int change_mux, unsigned int change_div) |
| 136 | { |
| 137 | struct mmp_clk_mix_reg_info *ri = &mix->reg_info; |
| 138 | u8 width, shift; |
| 139 | u32 mux_div, fc_req; |
| 140 | int ret, timeout = 50; |
| 141 | unsigned long flags = 0; |
| 142 | |
| 143 | if (!change_mux && !change_div) |
| 144 | return -EINVAL; |
| 145 | |
| 146 | if (mix->lock) |
| 147 | spin_lock_irqsave(mix->lock, flags); |
| 148 | |
| 149 | if (mix->type == MMP_CLK_MIX_TYPE_V1 |
| 150 | || mix->type == MMP_CLK_MIX_TYPE_V2) |
| 151 | mux_div = readl(ri->reg_clk_ctrl); |
| 152 | else |
| 153 | mux_div = readl(ri->reg_clk_sel); |
| 154 | |
| 155 | if (change_div) { |
| 156 | width = ri->width_div; |
| 157 | shift = ri->shift_div; |
| 158 | mux_div &= ~MMP_CLK_BITS_MASK(width, shift); |
| 159 | mux_div |= MMP_CLK_BITS_SET_VAL(div_val, width, shift); |
| 160 | } |
| 161 | |
| 162 | if (change_mux) { |
| 163 | width = ri->width_mux; |
| 164 | shift = ri->shift_mux; |
| 165 | mux_div &= ~MMP_CLK_BITS_MASK(width, shift); |
| 166 | mux_div |= MMP_CLK_BITS_SET_VAL(mux_val, width, shift); |
| 167 | } |
| 168 | |
| 169 | if (mix->type == MMP_CLK_MIX_TYPE_V1) { |
| 170 | writel(mux_div, ri->reg_clk_ctrl); |
| 171 | } else if (mix->type == MMP_CLK_MIX_TYPE_V2) { |
| 172 | mux_div |= (1 << ri->bit_fc); |
| 173 | writel(mux_div, ri->reg_clk_ctrl); |
| 174 | |
| 175 | do { |
| 176 | fc_req = readl(ri->reg_clk_ctrl); |
| 177 | timeout--; |
| 178 | if (!(fc_req & (1 << ri->bit_fc))) |
| 179 | break; |
| 180 | } while (timeout); |
| 181 | |
| 182 | if (timeout == 0) { |
| 183 | pr_err("%s:%s cannot do frequency change\n", |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 184 | __func__, clk_hw_get_name(&mix->hw)); |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 185 | ret = -EBUSY; |
| 186 | goto error; |
| 187 | } |
| 188 | } else { |
| 189 | fc_req = readl(ri->reg_clk_ctrl); |
| 190 | fc_req |= 1 << ri->bit_fc; |
| 191 | writel(fc_req, ri->reg_clk_ctrl); |
| 192 | writel(mux_div, ri->reg_clk_sel); |
| 193 | fc_req &= ~(1 << ri->bit_fc); |
| 194 | } |
| 195 | |
| 196 | ret = 0; |
| 197 | error: |
| 198 | if (mix->lock) |
| 199 | spin_unlock_irqrestore(mix->lock, flags); |
| 200 | |
| 201 | return ret; |
| 202 | } |
| 203 | |
Boris Brezillon | 0817b62 | 2015-07-07 20:48:08 +0200 | [diff] [blame] | 204 | static int mmp_clk_mix_determine_rate(struct clk_hw *hw, |
| 205 | struct clk_rate_request *req) |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 206 | { |
| 207 | struct mmp_clk_mix *mix = to_clk_mix(hw); |
| 208 | struct mmp_clk_mix_clk_table *item; |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 209 | struct clk_hw *parent, *parent_best; |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 210 | unsigned long parent_rate, mix_rate, mix_rate_best, parent_rate_best; |
| 211 | unsigned long gap, gap_best; |
| 212 | u32 div_val_max; |
| 213 | unsigned int div; |
| 214 | int i, j; |
| 215 | |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 216 | |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 217 | mix_rate_best = 0; |
| 218 | parent_rate_best = 0; |
Boris Brezillon | 57d866e | 2015-07-09 22:39:38 +0200 | [diff] [blame] | 219 | gap_best = ULONG_MAX; |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 220 | parent_best = NULL; |
| 221 | |
| 222 | if (mix->table) { |
| 223 | for (i = 0; i < mix->table_size; i++) { |
| 224 | item = &mix->table[i]; |
| 225 | if (item->valid == 0) |
| 226 | continue; |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 227 | parent = clk_hw_get_parent_by_index(hw, |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 228 | item->parent_index); |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 229 | parent_rate = clk_hw_get_rate(parent); |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 230 | mix_rate = parent_rate / item->divisor; |
Boris Brezillon | 0817b62 | 2015-07-07 20:48:08 +0200 | [diff] [blame] | 231 | gap = abs(mix_rate - req->rate); |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 232 | if (parent_best == NULL || gap < gap_best) { |
| 233 | parent_best = parent; |
| 234 | parent_rate_best = parent_rate; |
| 235 | mix_rate_best = mix_rate; |
| 236 | gap_best = gap; |
| 237 | if (gap_best == 0) |
| 238 | goto found; |
| 239 | } |
| 240 | } |
| 241 | } else { |
Stephen Boyd | 497295a | 2015-06-25 16:53:23 -0700 | [diff] [blame] | 242 | for (i = 0; i < clk_hw_get_num_parents(hw); i++) { |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 243 | parent = clk_hw_get_parent_by_index(hw, i); |
| 244 | parent_rate = clk_hw_get_rate(parent); |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 245 | div_val_max = _get_maxdiv(mix); |
| 246 | for (j = 0; j < div_val_max; j++) { |
| 247 | div = _get_div(mix, j); |
| 248 | mix_rate = parent_rate / div; |
Boris Brezillon | 0817b62 | 2015-07-07 20:48:08 +0200 | [diff] [blame] | 249 | gap = abs(mix_rate - req->rate); |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 250 | if (parent_best == NULL || gap < gap_best) { |
| 251 | parent_best = parent; |
| 252 | parent_rate_best = parent_rate; |
| 253 | mix_rate_best = mix_rate; |
| 254 | gap_best = gap; |
| 255 | if (gap_best == 0) |
| 256 | goto found; |
| 257 | } |
| 258 | } |
| 259 | } |
| 260 | } |
| 261 | |
| 262 | found: |
Boris Brezillon | 57d866e | 2015-07-09 22:39:38 +0200 | [diff] [blame] | 263 | if (!parent_best) |
| 264 | return -EINVAL; |
| 265 | |
Boris Brezillon | 0817b62 | 2015-07-07 20:48:08 +0200 | [diff] [blame] | 266 | req->best_parent_rate = parent_rate_best; |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 267 | req->best_parent_hw = parent_best; |
Boris Brezillon | 0817b62 | 2015-07-07 20:48:08 +0200 | [diff] [blame] | 268 | req->rate = mix_rate_best; |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 269 | |
Boris Brezillon | 0817b62 | 2015-07-07 20:48:08 +0200 | [diff] [blame] | 270 | return 0; |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 271 | } |
| 272 | |
| 273 | static int mmp_clk_mix_set_rate_and_parent(struct clk_hw *hw, |
| 274 | unsigned long rate, |
| 275 | unsigned long parent_rate, |
| 276 | u8 index) |
| 277 | { |
| 278 | struct mmp_clk_mix *mix = to_clk_mix(hw); |
| 279 | unsigned int div; |
| 280 | u32 div_val, mux_val; |
| 281 | |
| 282 | div = parent_rate / rate; |
| 283 | div_val = _get_div_val(mix, div); |
| 284 | mux_val = _get_mux_val(mix, index); |
| 285 | |
| 286 | return _set_rate(mix, mux_val, div_val, 1, 1); |
| 287 | } |
| 288 | |
| 289 | static u8 mmp_clk_mix_get_parent(struct clk_hw *hw) |
| 290 | { |
| 291 | struct mmp_clk_mix *mix = to_clk_mix(hw); |
| 292 | struct mmp_clk_mix_reg_info *ri = &mix->reg_info; |
| 293 | unsigned long flags = 0; |
| 294 | u32 mux_div = 0; |
| 295 | u8 width, shift; |
| 296 | u32 mux_val; |
| 297 | |
| 298 | if (mix->lock) |
| 299 | spin_lock_irqsave(mix->lock, flags); |
| 300 | |
| 301 | if (mix->type == MMP_CLK_MIX_TYPE_V1 |
| 302 | || mix->type == MMP_CLK_MIX_TYPE_V2) |
| 303 | mux_div = readl(ri->reg_clk_ctrl); |
| 304 | else |
| 305 | mux_div = readl(ri->reg_clk_sel); |
| 306 | |
| 307 | if (mix->lock) |
| 308 | spin_unlock_irqrestore(mix->lock, flags); |
| 309 | |
| 310 | width = mix->reg_info.width_mux; |
| 311 | shift = mix->reg_info.shift_mux; |
| 312 | |
| 313 | mux_val = MMP_CLK_BITS_GET_VAL(mux_div, width, shift); |
| 314 | |
| 315 | return _get_mux(mix, mux_val); |
| 316 | } |
| 317 | |
| 318 | static unsigned long mmp_clk_mix_recalc_rate(struct clk_hw *hw, |
| 319 | unsigned long parent_rate) |
| 320 | { |
| 321 | struct mmp_clk_mix *mix = to_clk_mix(hw); |
| 322 | struct mmp_clk_mix_reg_info *ri = &mix->reg_info; |
| 323 | unsigned long flags = 0; |
| 324 | u32 mux_div = 0; |
| 325 | u8 width, shift; |
| 326 | unsigned int div; |
| 327 | |
| 328 | if (mix->lock) |
| 329 | spin_lock_irqsave(mix->lock, flags); |
| 330 | |
| 331 | if (mix->type == MMP_CLK_MIX_TYPE_V1 |
| 332 | || mix->type == MMP_CLK_MIX_TYPE_V2) |
| 333 | mux_div = readl(ri->reg_clk_ctrl); |
| 334 | else |
| 335 | mux_div = readl(ri->reg_clk_sel); |
| 336 | |
| 337 | if (mix->lock) |
| 338 | spin_unlock_irqrestore(mix->lock, flags); |
| 339 | |
| 340 | width = mix->reg_info.width_div; |
| 341 | shift = mix->reg_info.shift_div; |
| 342 | |
| 343 | div = _get_div(mix, MMP_CLK_BITS_GET_VAL(mux_div, width, shift)); |
| 344 | |
| 345 | return parent_rate / div; |
| 346 | } |
| 347 | |
| 348 | static int mmp_clk_set_parent(struct clk_hw *hw, u8 index) |
| 349 | { |
| 350 | struct mmp_clk_mix *mix = to_clk_mix(hw); |
| 351 | struct mmp_clk_mix_clk_table *item; |
| 352 | int i; |
| 353 | u32 div_val, mux_val; |
| 354 | |
| 355 | if (mix->table) { |
| 356 | for (i = 0; i < mix->table_size; i++) { |
| 357 | item = &mix->table[i]; |
| 358 | if (item->valid == 0) |
| 359 | continue; |
| 360 | if (item->parent_index == index) |
| 361 | break; |
| 362 | } |
| 363 | if (i < mix->table_size) { |
| 364 | div_val = _get_div_val(mix, item->divisor); |
| 365 | mux_val = _get_mux_val(mix, item->parent_index); |
| 366 | } else |
| 367 | return -EINVAL; |
| 368 | } else { |
| 369 | mux_val = _get_mux_val(mix, index); |
| 370 | div_val = 0; |
| 371 | } |
| 372 | |
| 373 | return _set_rate(mix, mux_val, div_val, 1, div_val ? 1 : 0); |
| 374 | } |
| 375 | |
| 376 | static int mmp_clk_set_rate(struct clk_hw *hw, unsigned long rate, |
| 377 | unsigned long best_parent_rate) |
| 378 | { |
| 379 | struct mmp_clk_mix *mix = to_clk_mix(hw); |
| 380 | struct mmp_clk_mix_clk_table *item; |
| 381 | unsigned long parent_rate; |
| 382 | unsigned int best_divisor; |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 383 | struct clk_hw *parent; |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 384 | int i; |
| 385 | |
| 386 | best_divisor = best_parent_rate / rate; |
| 387 | |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 388 | if (mix->table) { |
| 389 | for (i = 0; i < mix->table_size; i++) { |
| 390 | item = &mix->table[i]; |
| 391 | if (item->valid == 0) |
| 392 | continue; |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 393 | parent = clk_hw_get_parent_by_index(hw, |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 394 | item->parent_index); |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 395 | parent_rate = clk_hw_get_rate(parent); |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 396 | if (parent_rate == best_parent_rate |
| 397 | && item->divisor == best_divisor) |
| 398 | break; |
| 399 | } |
| 400 | if (i < mix->table_size) |
| 401 | return _set_rate(mix, |
| 402 | _get_mux_val(mix, item->parent_index), |
| 403 | _get_div_val(mix, item->divisor), |
| 404 | 1, 1); |
| 405 | else |
| 406 | return -EINVAL; |
| 407 | } else { |
Stephen Boyd | 497295a | 2015-06-25 16:53:23 -0700 | [diff] [blame] | 408 | for (i = 0; i < clk_hw_get_num_parents(hw); i++) { |
Stephen Boyd | aef28cb | 2015-07-30 17:20:57 -0700 | [diff] [blame] | 409 | parent = clk_hw_get_parent_by_index(hw, i); |
| 410 | parent_rate = clk_hw_get_rate(parent); |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 411 | if (parent_rate == best_parent_rate) |
| 412 | break; |
| 413 | } |
Stephen Boyd | 497295a | 2015-06-25 16:53:23 -0700 | [diff] [blame] | 414 | if (i < clk_hw_get_num_parents(hw)) |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 415 | return _set_rate(mix, _get_mux_val(mix, i), |
| 416 | _get_div_val(mix, best_divisor), 1, 1); |
| 417 | else |
| 418 | return -EINVAL; |
| 419 | } |
| 420 | } |
| 421 | |
| 422 | static void mmp_clk_mix_init(struct clk_hw *hw) |
| 423 | { |
| 424 | struct mmp_clk_mix *mix = to_clk_mix(hw); |
| 425 | |
| 426 | if (mix->table) |
| 427 | _filter_clk_table(mix, mix->table, mix->table_size); |
| 428 | } |
| 429 | |
| 430 | const struct clk_ops mmp_clk_mix_ops = { |
| 431 | .determine_rate = mmp_clk_mix_determine_rate, |
| 432 | .set_rate_and_parent = mmp_clk_mix_set_rate_and_parent, |
| 433 | .set_rate = mmp_clk_set_rate, |
| 434 | .set_parent = mmp_clk_set_parent, |
| 435 | .get_parent = mmp_clk_mix_get_parent, |
| 436 | .recalc_rate = mmp_clk_mix_recalc_rate, |
| 437 | .init = mmp_clk_mix_init, |
| 438 | }; |
| 439 | |
| 440 | struct clk *mmp_clk_register_mix(struct device *dev, |
| 441 | const char *name, |
| 442 | const char **parent_names, |
| 443 | u8 num_parents, |
| 444 | unsigned long flags, |
| 445 | struct mmp_clk_mix_config *config, |
| 446 | spinlock_t *lock) |
| 447 | { |
| 448 | struct mmp_clk_mix *mix; |
| 449 | struct clk *clk; |
| 450 | struct clk_init_data init; |
| 451 | size_t table_bytes; |
| 452 | |
| 453 | mix = kzalloc(sizeof(*mix), GFP_KERNEL); |
| 454 | if (!mix) { |
| 455 | pr_err("%s:%s: could not allocate mmp mix clk\n", |
| 456 | __func__, name); |
| 457 | return ERR_PTR(-ENOMEM); |
| 458 | } |
| 459 | |
| 460 | init.name = name; |
| 461 | init.flags = flags | CLK_GET_RATE_NOCACHE; |
| 462 | init.parent_names = parent_names; |
| 463 | init.num_parents = num_parents; |
| 464 | init.ops = &mmp_clk_mix_ops; |
| 465 | |
| 466 | memcpy(&mix->reg_info, &config->reg_info, sizeof(config->reg_info)); |
| 467 | if (config->table) { |
| 468 | table_bytes = sizeof(*config->table) * config->table_size; |
Andrzej Hajda | e8f35aa | 2015-08-07 09:59:16 +0200 | [diff] [blame] | 469 | mix->table = kmemdup(config->table, table_bytes, GFP_KERNEL); |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 470 | if (!mix->table) { |
| 471 | pr_err("%s:%s: could not allocate mmp mix table\n", |
| 472 | __func__, name); |
| 473 | kfree(mix); |
| 474 | return ERR_PTR(-ENOMEM); |
| 475 | } |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 476 | mix->table_size = config->table_size; |
| 477 | } |
| 478 | |
| 479 | if (config->mux_table) { |
| 480 | table_bytes = sizeof(u32) * num_parents; |
Andrzej Hajda | e8f35aa | 2015-08-07 09:59:16 +0200 | [diff] [blame] | 481 | mix->mux_table = kmemdup(config->mux_table, table_bytes, |
| 482 | GFP_KERNEL); |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 483 | if (!mix->mux_table) { |
| 484 | pr_err("%s:%s: could not allocate mmp mix mux-table\n", |
| 485 | __func__, name); |
| 486 | kfree(mix->table); |
| 487 | kfree(mix); |
| 488 | return ERR_PTR(-ENOMEM); |
| 489 | } |
Chao Xie | ee81f4e | 2014-10-31 10:13:45 +0800 | [diff] [blame] | 490 | } |
| 491 | |
| 492 | mix->div_flags = config->div_flags; |
| 493 | mix->mux_flags = config->mux_flags; |
| 494 | mix->lock = lock; |
| 495 | mix->hw.init = &init; |
| 496 | |
| 497 | if (config->reg_info.bit_fc >= 32) |
| 498 | mix->type = MMP_CLK_MIX_TYPE_V1; |
| 499 | else if (config->reg_info.reg_clk_sel) |
| 500 | mix->type = MMP_CLK_MIX_TYPE_V3; |
| 501 | else |
| 502 | mix->type = MMP_CLK_MIX_TYPE_V2; |
| 503 | clk = clk_register(dev, &mix->hw); |
| 504 | |
| 505 | if (IS_ERR(clk)) { |
| 506 | kfree(mix->mux_table); |
| 507 | kfree(mix->table); |
| 508 | kfree(mix); |
| 509 | } |
| 510 | |
| 511 | return clk; |
| 512 | } |