blob: 545dceec0bbf0230f72163a1d0506f1dc8401b49 [file] [log] [blame]
Stephen Boyde1bd55e2018-12-11 09:57:48 -08001// SPDX-License-Identifier: GPL-2.0
Heikki Krogeruse2d0e902014-05-15 16:40:25 +03002/*
3 * Copyright (C) 2014 Intel Corporation
4 *
Heikki Krogeruse2d0e902014-05-15 16:40:25 +03005 * Adjustable fractional divider clock implementation.
6 * Output rate = (m / n) * parent_rate.
Andy Shevchenko07775912015-09-22 18:54:11 +03007 * Uses rational best approximation algorithm.
Heikki Krogeruse2d0e902014-05-15 16:40:25 +03008 */
9
10#include <linux/clk-provider.h>
11#include <linux/module.h>
12#include <linux/device.h>
13#include <linux/slab.h>
Andy Shevchenko07775912015-09-22 18:54:11 +030014#include <linux/rational.h>
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030015
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030016static unsigned long clk_fd_recalc_rate(struct clk_hw *hw,
17 unsigned long parent_rate)
18{
19 struct clk_fractional_divider *fd = to_clk_fd(hw);
20 unsigned long flags = 0;
Andy Shevchenko07775912015-09-22 18:54:11 +030021 unsigned long m, n;
22 u32 val;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030023 u64 ret;
24
25 if (fd->lock)
26 spin_lock_irqsave(fd->lock, flags);
Stephen Boyd661e2182015-07-24 12:21:12 -070027 else
28 __acquire(fd->lock);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030029
30 val = clk_readl(fd->reg);
31
32 if (fd->lock)
33 spin_unlock_irqrestore(fd->lock, flags);
Stephen Boyd661e2182015-07-24 12:21:12 -070034 else
35 __release(fd->lock);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030036
37 m = (val & fd->mmask) >> fd->mshift;
38 n = (val & fd->nmask) >> fd->nshift;
39
A.s. Donge983da22018-11-14 13:01:39 +000040 if (fd->flags & CLK_FRAC_DIVIDER_ZERO_BASED) {
41 m++;
42 n++;
43 }
44
Heikki Krogerus6b547832015-02-02 15:37:04 +020045 if (!n || !m)
46 return parent_rate;
47
Heiko Stübnerfeaefa0ea2014-08-28 12:46:10 +020048 ret = (u64)parent_rate * m;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030049 do_div(ret, n);
50
51 return ret;
52}
53
Elaine Zhangec52e462017-08-01 18:21:22 +020054static void clk_fd_general_approximation(struct clk_hw *hw, unsigned long rate,
55 unsigned long *parent_rate,
56 unsigned long *m, unsigned long *n)
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030057{
58 struct clk_fractional_divider *fd = to_clk_fd(hw);
Andy Shevchenko07775912015-09-22 18:54:11 +030059 unsigned long scale;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030060
Andy Shevchenko07775912015-09-22 18:54:11 +030061 /*
62 * Get rate closer to *parent_rate to guarantee there is no overflow
63 * for m and n. In the result it will be the nearest rate left shifted
64 * by (scale - fd->nwidth) bits.
65 */
66 scale = fls_long(*parent_rate / rate - 1);
67 if (scale > fd->nwidth)
68 rate <<= scale - fd->nwidth;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030069
Andy Shevchenko07775912015-09-22 18:54:11 +030070 rational_best_approximation(rate, *parent_rate,
71 GENMASK(fd->mwidth - 1, 0), GENMASK(fd->nwidth - 1, 0),
Elaine Zhangec52e462017-08-01 18:21:22 +020072 m, n);
73}
74
75static long clk_fd_round_rate(struct clk_hw *hw, unsigned long rate,
76 unsigned long *parent_rate)
77{
78 struct clk_fractional_divider *fd = to_clk_fd(hw);
79 unsigned long m, n;
80 u64 ret;
81
82 if (!rate || rate >= *parent_rate)
83 return *parent_rate;
84
85 if (fd->approximation)
86 fd->approximation(hw, rate, parent_rate, &m, &n);
87 else
88 clk_fd_general_approximation(hw, rate, parent_rate, &m, &n);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030089
Andy Shevchenko07775912015-09-22 18:54:11 +030090 ret = (u64)*parent_rate * m;
91 do_div(ret, n);
92
93 return ret;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +030094}
95
96static int clk_fd_set_rate(struct clk_hw *hw, unsigned long rate,
97 unsigned long parent_rate)
98{
99 struct clk_fractional_divider *fd = to_clk_fd(hw);
100 unsigned long flags = 0;
Andy Shevchenko07775912015-09-22 18:54:11 +0300101 unsigned long m, n;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300102 u32 val;
103
Andy Shevchenko07775912015-09-22 18:54:11 +0300104 rational_best_approximation(rate, parent_rate,
105 GENMASK(fd->mwidth - 1, 0), GENMASK(fd->nwidth - 1, 0),
106 &m, &n);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300107
A.s. Donge983da22018-11-14 13:01:39 +0000108 if (fd->flags & CLK_FRAC_DIVIDER_ZERO_BASED) {
109 m--;
110 n--;
111 }
112
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300113 if (fd->lock)
114 spin_lock_irqsave(fd->lock, flags);
Stephen Boyd661e2182015-07-24 12:21:12 -0700115 else
116 __acquire(fd->lock);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300117
118 val = clk_readl(fd->reg);
119 val &= ~(fd->mmask | fd->nmask);
120 val |= (m << fd->mshift) | (n << fd->nshift);
121 clk_writel(val, fd->reg);
122
123 if (fd->lock)
124 spin_unlock_irqrestore(fd->lock, flags);
Stephen Boyd661e2182015-07-24 12:21:12 -0700125 else
126 __release(fd->lock);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300127
128 return 0;
129}
130
131const struct clk_ops clk_fractional_divider_ops = {
132 .recalc_rate = clk_fd_recalc_rate,
133 .round_rate = clk_fd_round_rate,
134 .set_rate = clk_fd_set_rate,
135};
136EXPORT_SYMBOL_GPL(clk_fractional_divider_ops);
137
Stephen Boyd39b44cff2016-02-07 00:15:09 -0800138struct clk_hw *clk_hw_register_fractional_divider(struct device *dev,
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300139 const char *name, const char *parent_name, unsigned long flags,
140 void __iomem *reg, u8 mshift, u8 mwidth, u8 nshift, u8 nwidth,
141 u8 clk_divider_flags, spinlock_t *lock)
142{
143 struct clk_fractional_divider *fd;
144 struct clk_init_data init;
Stephen Boyd39b44cff2016-02-07 00:15:09 -0800145 struct clk_hw *hw;
146 int ret;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300147
148 fd = kzalloc(sizeof(*fd), GFP_KERNEL);
Stephen Boydd122db72015-05-14 16:47:10 -0700149 if (!fd)
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300150 return ERR_PTR(-ENOMEM);
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300151
152 init.name = name;
153 init.ops = &clk_fractional_divider_ops;
154 init.flags = flags | CLK_IS_BASIC;
155 init.parent_names = parent_name ? &parent_name : NULL;
156 init.num_parents = parent_name ? 1 : 0;
157
158 fd->reg = reg;
159 fd->mshift = mshift;
Andy Shevchenko934e2532015-09-22 18:54:09 +0300160 fd->mwidth = mwidth;
161 fd->mmask = GENMASK(mwidth - 1, 0) << mshift;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300162 fd->nshift = nshift;
Andy Shevchenko934e2532015-09-22 18:54:09 +0300163 fd->nwidth = nwidth;
164 fd->nmask = GENMASK(nwidth - 1, 0) << nshift;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300165 fd->flags = clk_divider_flags;
166 fd->lock = lock;
167 fd->hw.init = &init;
168
Stephen Boyd39b44cff2016-02-07 00:15:09 -0800169 hw = &fd->hw;
170 ret = clk_hw_register(dev, hw);
171 if (ret) {
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300172 kfree(fd);
Stephen Boyd39b44cff2016-02-07 00:15:09 -0800173 hw = ERR_PTR(ret);
174 }
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300175
Stephen Boyd39b44cff2016-02-07 00:15:09 -0800176 return hw;
177}
178EXPORT_SYMBOL_GPL(clk_hw_register_fractional_divider);
179
180struct clk *clk_register_fractional_divider(struct device *dev,
181 const char *name, const char *parent_name, unsigned long flags,
182 void __iomem *reg, u8 mshift, u8 mwidth, u8 nshift, u8 nwidth,
183 u8 clk_divider_flags, spinlock_t *lock)
184{
185 struct clk_hw *hw;
186
187 hw = clk_hw_register_fractional_divider(dev, name, parent_name, flags,
188 reg, mshift, mwidth, nshift, nwidth, clk_divider_flags,
189 lock);
190 if (IS_ERR(hw))
191 return ERR_CAST(hw);
192 return hw->clk;
Heikki Krogeruse2d0e902014-05-15 16:40:25 +0300193}
194EXPORT_SYMBOL_GPL(clk_register_fractional_divider);
Stephen Boyd39b44cff2016-02-07 00:15:09 -0800195
196void clk_hw_unregister_fractional_divider(struct clk_hw *hw)
197{
198 struct clk_fractional_divider *fd;
199
200 fd = to_clk_fd(hw);
201
202 clk_hw_unregister(hw);
203 kfree(fd);
204}