Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2012 Freescale Semiconductor, Inc. |
| 3 | * Copyright 2012 Linaro Ltd. |
| 4 | * |
| 5 | * The code contained herein is licensed under the GNU General Public |
| 6 | * License. You may obtain a copy of the GNU General Public License |
| 7 | * Version 2 or later at the following locations: |
| 8 | * |
| 9 | * http://www.opensource.org/licenses/gpl-license.html |
| 10 | * http://www.gnu.org/copyleft/gpl.html |
| 11 | */ |
| 12 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 13 | #include <linux/clk-provider.h> |
Shawn Guo | 322503a | 2013-10-30 15:12:55 +0800 | [diff] [blame] | 14 | #include <linux/delay.h> |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 15 | #include <linux/io.h> |
| 16 | #include <linux/slab.h> |
| 17 | #include <linux/jiffies.h> |
| 18 | #include <linux/err.h> |
| 19 | #include "clk.h" |
| 20 | |
| 21 | #define PLL_NUM_OFFSET 0x10 |
| 22 | #define PLL_DENOM_OFFSET 0x20 |
| 23 | |
Nikita Yushchenko | c77cbdd1 | 2016-12-19 11:12:09 +0300 | [diff] [blame] | 24 | #define PLL_VF610_NUM_OFFSET 0x20 |
| 25 | #define PLL_VF610_DENOM_OFFSET 0x30 |
| 26 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 27 | #define BM_PLL_POWER (0x1 << 12) |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 28 | #define BM_PLL_LOCK (0x1 << 31) |
Frank Li | f539474 | 2015-05-19 02:45:02 +0800 | [diff] [blame] | 29 | #define IMX7_ENET_PLL_POWER (0x1 << 5) |
Fabio Estevam | ad14972 | 2017-05-15 08:55:05 -0300 | [diff] [blame] | 30 | #define IMX7_DDR_PLL_POWER (0x1 << 20) |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 31 | |
| 32 | /** |
| 33 | * struct clk_pllv3 - IMX PLL clock version 3 |
| 34 | * @clk_hw: clock source |
| 35 | * @base: base address of PLL registers |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 36 | * @power_bit: pll power bit mask |
| 37 | * @powerup_set: set power_bit to power up the PLL |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 38 | * @div_mask: mask of divider bits |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 39 | * @div_shift: shift of divider bits |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 40 | * |
| 41 | * IMX PLL clock version 3, found on i.MX6 series. Divider for pllv3 |
| 42 | * is actually a multiplier, and always sits at bit 0. |
| 43 | */ |
| 44 | struct clk_pllv3 { |
| 45 | struct clk_hw hw; |
| 46 | void __iomem *base; |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 47 | u32 power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 48 | bool powerup_set; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 49 | u32 div_mask; |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 50 | u32 div_shift; |
Stefan Agner | 585a60f | 2016-01-29 14:49:24 -0800 | [diff] [blame] | 51 | unsigned long ref_clock; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 52 | }; |
| 53 | |
| 54 | #define to_clk_pllv3(_hw) container_of(_hw, struct clk_pllv3, hw) |
| 55 | |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 56 | static int clk_pllv3_wait_lock(struct clk_pllv3 *pll) |
| 57 | { |
| 58 | unsigned long timeout = jiffies + msecs_to_jiffies(10); |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 59 | u32 val = readl_relaxed(pll->base) & pll->power_bit; |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 60 | |
| 61 | /* No need to wait for lock when pll is not powered up */ |
| 62 | if ((pll->powerup_set && !val) || (!pll->powerup_set && val)) |
| 63 | return 0; |
| 64 | |
| 65 | /* Wait for PLL to lock */ |
| 66 | do { |
| 67 | if (readl_relaxed(pll->base) & BM_PLL_LOCK) |
| 68 | break; |
| 69 | if (time_after(jiffies, timeout)) |
| 70 | break; |
| 71 | usleep_range(50, 500); |
| 72 | } while (1); |
| 73 | |
| 74 | return readl_relaxed(pll->base) & BM_PLL_LOCK ? 0 : -ETIMEDOUT; |
| 75 | } |
| 76 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 77 | static int clk_pllv3_prepare(struct clk_hw *hw) |
| 78 | { |
| 79 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 80 | u32 val; |
| 81 | |
| 82 | val = readl_relaxed(pll->base); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 83 | if (pll->powerup_set) |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 84 | val |= pll->power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 85 | else |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 86 | val &= ~pll->power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 87 | writel_relaxed(val, pll->base); |
| 88 | |
Dmitry Voytik | c400f7a | 2014-11-06 22:49:32 +0400 | [diff] [blame] | 89 | return clk_pllv3_wait_lock(pll); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 90 | } |
| 91 | |
| 92 | static void clk_pllv3_unprepare(struct clk_hw *hw) |
| 93 | { |
| 94 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 95 | u32 val; |
| 96 | |
| 97 | val = readl_relaxed(pll->base); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 98 | if (pll->powerup_set) |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 99 | val &= ~pll->power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 100 | else |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 101 | val |= pll->power_bit; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 102 | writel_relaxed(val, pll->base); |
| 103 | } |
| 104 | |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 105 | static int clk_pllv3_is_prepared(struct clk_hw *hw) |
| 106 | { |
| 107 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 108 | |
| 109 | if (readl_relaxed(pll->base) & BM_PLL_LOCK) |
| 110 | return 1; |
| 111 | |
| 112 | return 0; |
| 113 | } |
| 114 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 115 | static unsigned long clk_pllv3_recalc_rate(struct clk_hw *hw, |
| 116 | unsigned long parent_rate) |
| 117 | { |
| 118 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 119 | u32 div = (readl_relaxed(pll->base) >> pll->div_shift) & pll->div_mask; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 120 | |
| 121 | return (div == 1) ? parent_rate * 22 : parent_rate * 20; |
| 122 | } |
| 123 | |
| 124 | static long clk_pllv3_round_rate(struct clk_hw *hw, unsigned long rate, |
| 125 | unsigned long *prate) |
| 126 | { |
| 127 | unsigned long parent_rate = *prate; |
| 128 | |
| 129 | return (rate >= parent_rate * 22) ? parent_rate * 22 : |
| 130 | parent_rate * 20; |
| 131 | } |
| 132 | |
| 133 | static int clk_pllv3_set_rate(struct clk_hw *hw, unsigned long rate, |
| 134 | unsigned long parent_rate) |
| 135 | { |
| 136 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 137 | u32 val, div; |
| 138 | |
| 139 | if (rate == parent_rate * 22) |
| 140 | div = 1; |
| 141 | else if (rate == parent_rate * 20) |
| 142 | div = 0; |
| 143 | else |
| 144 | return -EINVAL; |
| 145 | |
| 146 | val = readl_relaxed(pll->base); |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 147 | val &= ~(pll->div_mask << pll->div_shift); |
| 148 | val |= (div << pll->div_shift); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 149 | writel_relaxed(val, pll->base); |
| 150 | |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 151 | return clk_pllv3_wait_lock(pll); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 152 | } |
| 153 | |
| 154 | static const struct clk_ops clk_pllv3_ops = { |
| 155 | .prepare = clk_pllv3_prepare, |
| 156 | .unprepare = clk_pllv3_unprepare, |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 157 | .is_prepared = clk_pllv3_is_prepared, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 158 | .recalc_rate = clk_pllv3_recalc_rate, |
| 159 | .round_rate = clk_pllv3_round_rate, |
| 160 | .set_rate = clk_pllv3_set_rate, |
| 161 | }; |
| 162 | |
| 163 | static unsigned long clk_pllv3_sys_recalc_rate(struct clk_hw *hw, |
| 164 | unsigned long parent_rate) |
| 165 | { |
| 166 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 167 | u32 div = readl_relaxed(pll->base) & pll->div_mask; |
| 168 | |
| 169 | return parent_rate * div / 2; |
| 170 | } |
| 171 | |
| 172 | static long clk_pllv3_sys_round_rate(struct clk_hw *hw, unsigned long rate, |
| 173 | unsigned long *prate) |
| 174 | { |
| 175 | unsigned long parent_rate = *prate; |
| 176 | unsigned long min_rate = parent_rate * 54 / 2; |
| 177 | unsigned long max_rate = parent_rate * 108 / 2; |
| 178 | u32 div; |
| 179 | |
| 180 | if (rate > max_rate) |
| 181 | rate = max_rate; |
| 182 | else if (rate < min_rate) |
| 183 | rate = min_rate; |
| 184 | div = rate * 2 / parent_rate; |
| 185 | |
| 186 | return parent_rate * div / 2; |
| 187 | } |
| 188 | |
| 189 | static int clk_pllv3_sys_set_rate(struct clk_hw *hw, unsigned long rate, |
| 190 | unsigned long parent_rate) |
| 191 | { |
| 192 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 193 | unsigned long min_rate = parent_rate * 54 / 2; |
| 194 | unsigned long max_rate = parent_rate * 108 / 2; |
| 195 | u32 val, div; |
| 196 | |
| 197 | if (rate < min_rate || rate > max_rate) |
| 198 | return -EINVAL; |
| 199 | |
| 200 | div = rate * 2 / parent_rate; |
| 201 | val = readl_relaxed(pll->base); |
| 202 | val &= ~pll->div_mask; |
| 203 | val |= div; |
| 204 | writel_relaxed(val, pll->base); |
| 205 | |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 206 | return clk_pllv3_wait_lock(pll); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 207 | } |
| 208 | |
| 209 | static const struct clk_ops clk_pllv3_sys_ops = { |
| 210 | .prepare = clk_pllv3_prepare, |
| 211 | .unprepare = clk_pllv3_unprepare, |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 212 | .is_prepared = clk_pllv3_is_prepared, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 213 | .recalc_rate = clk_pllv3_sys_recalc_rate, |
| 214 | .round_rate = clk_pllv3_sys_round_rate, |
| 215 | .set_rate = clk_pllv3_sys_set_rate, |
| 216 | }; |
| 217 | |
| 218 | static unsigned long clk_pllv3_av_recalc_rate(struct clk_hw *hw, |
| 219 | unsigned long parent_rate) |
| 220 | { |
| 221 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 222 | u32 mfn = readl_relaxed(pll->base + PLL_NUM_OFFSET); |
| 223 | u32 mfd = readl_relaxed(pll->base + PLL_DENOM_OFFSET); |
| 224 | u32 div = readl_relaxed(pll->base) & pll->div_mask; |
Anson Huang | ba7f4f5 | 2016-06-08 22:33:31 +0800 | [diff] [blame] | 225 | u64 temp64 = (u64)parent_rate; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 226 | |
Anson Huang | ba7f4f5 | 2016-06-08 22:33:31 +0800 | [diff] [blame] | 227 | temp64 *= mfn; |
| 228 | do_div(temp64, mfd); |
| 229 | |
Emil Lundmark | 5c2f117 | 2016-10-12 12:31:40 +0200 | [diff] [blame] | 230 | return parent_rate * div + (unsigned long)temp64; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 231 | } |
| 232 | |
| 233 | static long clk_pllv3_av_round_rate(struct clk_hw *hw, unsigned long rate, |
| 234 | unsigned long *prate) |
| 235 | { |
| 236 | unsigned long parent_rate = *prate; |
| 237 | unsigned long min_rate = parent_rate * 27; |
| 238 | unsigned long max_rate = parent_rate * 54; |
| 239 | u32 div; |
| 240 | u32 mfn, mfd = 1000000; |
Emil Lundmark | c5a8045 | 2016-10-12 12:31:41 +0200 | [diff] [blame] | 241 | u32 max_mfd = 0x3FFFFFFF; |
Anson Huang | 7a5568c | 2015-05-08 00:16:51 +0800 | [diff] [blame] | 242 | u64 temp64; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 243 | |
| 244 | if (rate > max_rate) |
| 245 | rate = max_rate; |
| 246 | else if (rate < min_rate) |
| 247 | rate = min_rate; |
| 248 | |
Emil Lundmark | c5a8045 | 2016-10-12 12:31:41 +0200 | [diff] [blame] | 249 | if (parent_rate <= max_mfd) |
| 250 | mfd = parent_rate; |
| 251 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 252 | div = rate / parent_rate; |
| 253 | temp64 = (u64) (rate - div * parent_rate); |
| 254 | temp64 *= mfd; |
| 255 | do_div(temp64, parent_rate); |
| 256 | mfn = temp64; |
| 257 | |
Emil Lundmark | 5c2f117 | 2016-10-12 12:31:40 +0200 | [diff] [blame] | 258 | temp64 = (u64)parent_rate; |
| 259 | temp64 *= mfn; |
| 260 | do_div(temp64, mfd); |
| 261 | |
| 262 | return parent_rate * div + (unsigned long)temp64; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 263 | } |
| 264 | |
| 265 | static int clk_pllv3_av_set_rate(struct clk_hw *hw, unsigned long rate, |
| 266 | unsigned long parent_rate) |
| 267 | { |
| 268 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 269 | unsigned long min_rate = parent_rate * 27; |
| 270 | unsigned long max_rate = parent_rate * 54; |
| 271 | u32 val, div; |
| 272 | u32 mfn, mfd = 1000000; |
Emil Lundmark | c5a8045 | 2016-10-12 12:31:41 +0200 | [diff] [blame] | 273 | u32 max_mfd = 0x3FFFFFFF; |
Anson Huang | 7a5568c | 2015-05-08 00:16:51 +0800 | [diff] [blame] | 274 | u64 temp64; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 275 | |
| 276 | if (rate < min_rate || rate > max_rate) |
| 277 | return -EINVAL; |
| 278 | |
Emil Lundmark | c5a8045 | 2016-10-12 12:31:41 +0200 | [diff] [blame] | 279 | if (parent_rate <= max_mfd) |
| 280 | mfd = parent_rate; |
| 281 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 282 | div = rate / parent_rate; |
| 283 | temp64 = (u64) (rate - div * parent_rate); |
| 284 | temp64 *= mfd; |
| 285 | do_div(temp64, parent_rate); |
| 286 | mfn = temp64; |
| 287 | |
| 288 | val = readl_relaxed(pll->base); |
| 289 | val &= ~pll->div_mask; |
| 290 | val |= div; |
| 291 | writel_relaxed(val, pll->base); |
| 292 | writel_relaxed(mfn, pll->base + PLL_NUM_OFFSET); |
| 293 | writel_relaxed(mfd, pll->base + PLL_DENOM_OFFSET); |
| 294 | |
Shawn Guo | bc3b84d | 2013-10-30 15:56:22 +0800 | [diff] [blame] | 295 | return clk_pllv3_wait_lock(pll); |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 296 | } |
| 297 | |
| 298 | static const struct clk_ops clk_pllv3_av_ops = { |
| 299 | .prepare = clk_pllv3_prepare, |
| 300 | .unprepare = clk_pllv3_unprepare, |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 301 | .is_prepared = clk_pllv3_is_prepared, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 302 | .recalc_rate = clk_pllv3_av_recalc_rate, |
| 303 | .round_rate = clk_pllv3_av_round_rate, |
| 304 | .set_rate = clk_pllv3_av_set_rate, |
| 305 | }; |
| 306 | |
Nikita Yushchenko | c77cbdd1 | 2016-12-19 11:12:09 +0300 | [diff] [blame] | 307 | struct clk_pllv3_vf610_mf { |
| 308 | u32 mfi; /* integer part, can be 20 or 22 */ |
| 309 | u32 mfn; /* numerator, 30-bit value */ |
| 310 | u32 mfd; /* denominator, 30-bit value, must be less than mfn */ |
| 311 | }; |
| 312 | |
| 313 | static unsigned long clk_pllv3_vf610_mf_to_rate(unsigned long parent_rate, |
| 314 | struct clk_pllv3_vf610_mf mf) |
| 315 | { |
| 316 | u64 temp64; |
| 317 | |
| 318 | temp64 = parent_rate; |
| 319 | temp64 *= mf.mfn; |
| 320 | do_div(temp64, mf.mfd); |
| 321 | |
| 322 | return (parent_rate * mf.mfi) + temp64; |
| 323 | } |
| 324 | |
| 325 | static struct clk_pllv3_vf610_mf clk_pllv3_vf610_rate_to_mf( |
| 326 | unsigned long parent_rate, unsigned long rate) |
| 327 | { |
| 328 | struct clk_pllv3_vf610_mf mf; |
| 329 | u64 temp64; |
| 330 | |
| 331 | mf.mfi = (rate >= 22 * parent_rate) ? 22 : 20; |
| 332 | mf.mfd = 0x3fffffff; /* use max supported value for best accuracy */ |
| 333 | |
| 334 | if (rate <= parent_rate * mf.mfi) |
| 335 | mf.mfn = 0; |
| 336 | else if (rate >= parent_rate * (mf.mfi + 1)) |
| 337 | mf.mfn = mf.mfd - 1; |
| 338 | else { |
| 339 | /* rate = parent_rate * (mfi + mfn/mfd) */ |
| 340 | temp64 = rate - parent_rate * mf.mfi; |
| 341 | temp64 *= mf.mfd; |
| 342 | do_div(temp64, parent_rate); |
| 343 | mf.mfn = temp64; |
| 344 | } |
| 345 | |
| 346 | return mf; |
| 347 | } |
| 348 | |
| 349 | static unsigned long clk_pllv3_vf610_recalc_rate(struct clk_hw *hw, |
| 350 | unsigned long parent_rate) |
| 351 | { |
| 352 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 353 | struct clk_pllv3_vf610_mf mf; |
| 354 | |
| 355 | mf.mfn = readl_relaxed(pll->base + PLL_VF610_NUM_OFFSET); |
| 356 | mf.mfd = readl_relaxed(pll->base + PLL_VF610_DENOM_OFFSET); |
| 357 | mf.mfi = (readl_relaxed(pll->base) & pll->div_mask) ? 22 : 20; |
| 358 | |
| 359 | return clk_pllv3_vf610_mf_to_rate(parent_rate, mf); |
| 360 | } |
| 361 | |
| 362 | static long clk_pllv3_vf610_round_rate(struct clk_hw *hw, unsigned long rate, |
| 363 | unsigned long *prate) |
| 364 | { |
| 365 | struct clk_pllv3_vf610_mf mf = clk_pllv3_vf610_rate_to_mf(*prate, rate); |
| 366 | |
| 367 | return clk_pllv3_vf610_mf_to_rate(*prate, mf); |
| 368 | } |
| 369 | |
| 370 | static int clk_pllv3_vf610_set_rate(struct clk_hw *hw, unsigned long rate, |
| 371 | unsigned long parent_rate) |
| 372 | { |
| 373 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 374 | struct clk_pllv3_vf610_mf mf = |
| 375 | clk_pllv3_vf610_rate_to_mf(parent_rate, rate); |
| 376 | u32 val; |
| 377 | |
| 378 | val = readl_relaxed(pll->base); |
| 379 | if (mf.mfi == 20) |
| 380 | val &= ~pll->div_mask; /* clear bit for mfi=20 */ |
| 381 | else |
| 382 | val |= pll->div_mask; /* set bit for mfi=22 */ |
| 383 | writel_relaxed(val, pll->base); |
| 384 | |
| 385 | writel_relaxed(mf.mfn, pll->base + PLL_VF610_NUM_OFFSET); |
| 386 | writel_relaxed(mf.mfd, pll->base + PLL_VF610_DENOM_OFFSET); |
| 387 | |
| 388 | return clk_pllv3_wait_lock(pll); |
| 389 | } |
| 390 | |
| 391 | static const struct clk_ops clk_pllv3_vf610_ops = { |
| 392 | .prepare = clk_pllv3_prepare, |
| 393 | .unprepare = clk_pllv3_unprepare, |
| 394 | .is_prepared = clk_pllv3_is_prepared, |
| 395 | .recalc_rate = clk_pllv3_vf610_recalc_rate, |
| 396 | .round_rate = clk_pllv3_vf610_round_rate, |
| 397 | .set_rate = clk_pllv3_vf610_set_rate, |
| 398 | }; |
| 399 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 400 | static unsigned long clk_pllv3_enet_recalc_rate(struct clk_hw *hw, |
| 401 | unsigned long parent_rate) |
| 402 | { |
Stefan Agner | 585a60f | 2016-01-29 14:49:24 -0800 | [diff] [blame] | 403 | struct clk_pllv3 *pll = to_clk_pllv3(hw); |
| 404 | |
| 405 | return pll->ref_clock; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 406 | } |
| 407 | |
| 408 | static const struct clk_ops clk_pllv3_enet_ops = { |
| 409 | .prepare = clk_pllv3_prepare, |
| 410 | .unprepare = clk_pllv3_unprepare, |
Bai Ping | 4824b61 | 2015-11-25 00:06:53 +0800 | [diff] [blame] | 411 | .is_prepared = clk_pllv3_is_prepared, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 412 | .recalc_rate = clk_pllv3_enet_recalc_rate, |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 413 | }; |
| 414 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 415 | struct clk *imx_clk_pllv3(enum imx_pllv3_type type, const char *name, |
| 416 | const char *parent_name, void __iomem *base, |
Sascha Hauer | 2b25469 | 2012-11-22 10:18:41 +0100 | [diff] [blame] | 417 | u32 div_mask) |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 418 | { |
| 419 | struct clk_pllv3 *pll; |
| 420 | const struct clk_ops *ops; |
| 421 | struct clk *clk; |
| 422 | struct clk_init_data init; |
| 423 | |
| 424 | pll = kzalloc(sizeof(*pll), GFP_KERNEL); |
| 425 | if (!pll) |
| 426 | return ERR_PTR(-ENOMEM); |
| 427 | |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 428 | pll->power_bit = BM_PLL_POWER; |
Frank Li | f539474 | 2015-05-19 02:45:02 +0800 | [diff] [blame] | 429 | |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 430 | switch (type) { |
| 431 | case IMX_PLLV3_SYS: |
| 432 | ops = &clk_pllv3_sys_ops; |
| 433 | break; |
Nikita Yushchenko | c77cbdd1 | 2016-12-19 11:12:09 +0300 | [diff] [blame] | 434 | case IMX_PLLV3_SYS_VF610: |
| 435 | ops = &clk_pllv3_vf610_ops; |
| 436 | break; |
Stefan Agner | 60ad846 | 2014-12-02 17:59:42 +0100 | [diff] [blame] | 437 | case IMX_PLLV3_USB_VF610: |
| 438 | pll->div_shift = 1; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 439 | case IMX_PLLV3_USB: |
| 440 | ops = &clk_pllv3_ops; |
| 441 | pll->powerup_set = true; |
| 442 | break; |
| 443 | case IMX_PLLV3_AV: |
| 444 | ops = &clk_pllv3_av_ops; |
| 445 | break; |
Frank Li | f539474 | 2015-05-19 02:45:02 +0800 | [diff] [blame] | 446 | case IMX_PLLV3_ENET_IMX7: |
Dong Aisheng | c684766 | 2016-06-13 20:24:52 +0800 | [diff] [blame] | 447 | pll->power_bit = IMX7_ENET_PLL_POWER; |
Stefan Agner | 585a60f | 2016-01-29 14:49:24 -0800 | [diff] [blame] | 448 | pll->ref_clock = 1000000000; |
| 449 | ops = &clk_pllv3_enet_ops; |
| 450 | break; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 451 | case IMX_PLLV3_ENET: |
Stefan Agner | 585a60f | 2016-01-29 14:49:24 -0800 | [diff] [blame] | 452 | pll->ref_clock = 500000000; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 453 | ops = &clk_pllv3_enet_ops; |
| 454 | break; |
Fabio Estevam | ad14972 | 2017-05-15 08:55:05 -0300 | [diff] [blame] | 455 | case IMX_PLLV3_DDR_IMX7: |
Fabio Estevam | b608a89 | 2017-06-06 12:45:54 -0300 | [diff] [blame] | 456 | pll->power_bit = IMX7_DDR_PLL_POWER; |
Fabio Estevam | ad14972 | 2017-05-15 08:55:05 -0300 | [diff] [blame] | 457 | ops = &clk_pllv3_av_ops; |
| 458 | break; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 459 | default: |
| 460 | ops = &clk_pllv3_ops; |
| 461 | } |
| 462 | pll->base = base; |
Shawn Guo | a3f6b9d | 2012-04-04 16:02:28 +0800 | [diff] [blame] | 463 | pll->div_mask = div_mask; |
| 464 | |
| 465 | init.name = name; |
| 466 | init.ops = ops; |
| 467 | init.flags = 0; |
| 468 | init.parent_names = &parent_name; |
| 469 | init.num_parents = 1; |
| 470 | |
| 471 | pll->hw.init = &init; |
| 472 | |
| 473 | clk = clk_register(NULL, &pll->hw); |
| 474 | if (IS_ERR(clk)) |
| 475 | kfree(pll); |
| 476 | |
| 477 | return clk; |
| 478 | } |