1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2013, 2018, The Linux Foundation. All rights reserved.
6 #include <linux/kernel.h>
7 #include <linux/bitops.h>
10 #include <linux/export.h>
11 #include <linux/clk-provider.h>
12 #include <linux/delay.h>
13 #include <linux/regmap.h>
14 #include <linux/math64.h>
16 #include <asm/div64.h>
22 #define CMD_UPDATE BIT(0)
23 #define CMD_ROOT_EN BIT(1)
24 #define CMD_DIRTY_CFG BIT(4)
25 #define CMD_DIRTY_N BIT(5)
26 #define CMD_DIRTY_M BIT(6)
27 #define CMD_DIRTY_D BIT(7)
28 #define CMD_ROOT_OFF BIT(31)
31 #define CFG_SRC_DIV_SHIFT 0
32 #define CFG_SRC_SEL_SHIFT 8
33 #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
34 #define CFG_MODE_SHIFT 12
35 #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
36 #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
37 #define CFG_HW_CLK_CTRL_MASK BIT(20)
48 static int clk_rcg2_is_enabled(struct clk_hw *hw)
50 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
54 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
58 return (cmd & CMD_ROOT_OFF) == 0;
61 static u8 clk_rcg2_get_parent(struct clk_hw *hw)
63 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
64 int num_parents = clk_hw_get_num_parents(hw);
68 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
72 cfg &= CFG_SRC_SEL_MASK;
73 cfg >>= CFG_SRC_SEL_SHIFT;
75 for (i = 0; i < num_parents; i++)
76 if (cfg == rcg->parent_map[i].cfg)
80 pr_debug("%s: Clock %s has invalid parent, using default.\n",
81 __func__, clk_hw_get_name(hw));
85 static int update_config(struct clk_rcg2 *rcg)
89 struct clk_hw *hw = &rcg->clkr.hw;
90 const char *name = clk_hw_get_name(hw);
92 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
93 CMD_UPDATE, CMD_UPDATE);
97 /* Wait for update to take effect */
98 for (count = 500; count > 0; count--) {
99 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
102 if (!(cmd & CMD_UPDATE))
107 WARN(1, "%s: rcg didn't update its configuration.", name);
111 static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
113 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
115 u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
117 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
118 CFG_SRC_SEL_MASK, cfg);
122 return update_config(rcg);
126 * Calculate m/n:d rate
129 * rate = ----------- x ---
133 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
151 clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
153 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
154 u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
156 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
158 if (rcg->mnd_width) {
159 mask = BIT(rcg->mnd_width) - 1;
160 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + M_REG, &m);
162 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + N_REG, &n);
166 mode = cfg & CFG_MODE_MASK;
167 mode >>= CFG_MODE_SHIFT;
170 mask = BIT(rcg->hid_width) - 1;
171 hid_div = cfg >> CFG_SRC_DIV_SHIFT;
174 return calc_rate(parent_rate, m, n, mode, hid_div);
177 static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
178 struct clk_rate_request *req,
179 enum freq_policy policy)
181 unsigned long clk_flags, rate = req->rate;
183 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
188 f = qcom_find_freq_floor(f, rate);
191 f = qcom_find_freq(f, rate);
200 index = qcom_find_src_index(hw, rcg->parent_map, f->src);
204 clk_flags = clk_hw_get_flags(hw);
205 p = clk_hw_get_parent_by_index(hw, index);
209 if (clk_flags & CLK_SET_RATE_PARENT) {
215 rate *= f->pre_div + 1;
225 rate = clk_hw_get_rate(p);
227 req->best_parent_hw = p;
228 req->best_parent_rate = rate;
234 static int clk_rcg2_determine_rate(struct clk_hw *hw,
235 struct clk_rate_request *req)
237 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
239 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, CEIL);
242 static int clk_rcg2_determine_floor_rate(struct clk_hw *hw,
243 struct clk_rate_request *req)
245 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
247 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, FLOOR);
250 static int __clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
253 struct clk_hw *hw = &rcg->clkr.hw;
254 int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
259 if (rcg->mnd_width && f->n) {
260 mask = BIT(rcg->mnd_width) - 1;
261 ret = regmap_update_bits(rcg->clkr.regmap,
262 rcg->cmd_rcgr + M_REG, mask, f->m);
266 ret = regmap_update_bits(rcg->clkr.regmap,
267 rcg->cmd_rcgr + N_REG, mask, ~(f->n - f->m));
271 ret = regmap_update_bits(rcg->clkr.regmap,
272 rcg->cmd_rcgr + D_REG, mask, ~f->n);
277 mask = BIT(rcg->hid_width) - 1;
278 mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK | CFG_HW_CLK_CTRL_MASK;
279 cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
280 cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
281 if (rcg->mnd_width && f->n && (f->m != f->n))
282 cfg |= CFG_MODE_DUAL_EDGE;
284 return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
288 static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
292 ret = __clk_rcg2_configure(rcg, f);
296 return update_config(rcg);
299 static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
300 enum freq_policy policy)
302 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
303 const struct freq_tbl *f;
307 f = qcom_find_freq_floor(rcg->freq_tbl, rate);
310 f = qcom_find_freq(rcg->freq_tbl, rate);
319 return clk_rcg2_configure(rcg, f);
322 static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
323 unsigned long parent_rate)
325 return __clk_rcg2_set_rate(hw, rate, CEIL);
328 static int clk_rcg2_set_floor_rate(struct clk_hw *hw, unsigned long rate,
329 unsigned long parent_rate)
331 return __clk_rcg2_set_rate(hw, rate, FLOOR);
334 static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
335 unsigned long rate, unsigned long parent_rate, u8 index)
337 return __clk_rcg2_set_rate(hw, rate, CEIL);
340 static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw *hw,
341 unsigned long rate, unsigned long parent_rate, u8 index)
343 return __clk_rcg2_set_rate(hw, rate, FLOOR);
346 const struct clk_ops clk_rcg2_ops = {
347 .is_enabled = clk_rcg2_is_enabled,
348 .get_parent = clk_rcg2_get_parent,
349 .set_parent = clk_rcg2_set_parent,
350 .recalc_rate = clk_rcg2_recalc_rate,
351 .determine_rate = clk_rcg2_determine_rate,
352 .set_rate = clk_rcg2_set_rate,
353 .set_rate_and_parent = clk_rcg2_set_rate_and_parent,
355 EXPORT_SYMBOL_GPL(clk_rcg2_ops);
357 const struct clk_ops clk_rcg2_floor_ops = {
358 .is_enabled = clk_rcg2_is_enabled,
359 .get_parent = clk_rcg2_get_parent,
360 .set_parent = clk_rcg2_set_parent,
361 .recalc_rate = clk_rcg2_recalc_rate,
362 .determine_rate = clk_rcg2_determine_floor_rate,
363 .set_rate = clk_rcg2_set_floor_rate,
364 .set_rate_and_parent = clk_rcg2_set_floor_rate_and_parent,
366 EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops);
373 static const struct frac_entry frac_table_675m[] = { /* link rate of 270M */
374 { 52, 295 }, /* 119 M */
375 { 11, 57 }, /* 130.25 M */
376 { 63, 307 }, /* 138.50 M */
377 { 11, 50 }, /* 148.50 M */
378 { 47, 206 }, /* 154 M */
379 { 31, 100 }, /* 205.25 M */
380 { 107, 269 }, /* 268.50 M */
384 static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
385 { 31, 211 }, /* 119 M */
386 { 32, 199 }, /* 130.25 M */
387 { 63, 307 }, /* 138.50 M */
388 { 11, 60 }, /* 148.50 M */
389 { 50, 263 }, /* 154 M */
390 { 31, 120 }, /* 205.25 M */
391 { 119, 359 }, /* 268.50 M */
395 static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
396 unsigned long parent_rate)
398 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
399 struct freq_tbl f = *rcg->freq_tbl;
400 const struct frac_entry *frac;
402 s64 src_rate = parent_rate;
404 u32 mask = BIT(rcg->hid_width) - 1;
407 if (src_rate == 810000000)
408 frac = frac_table_810m;
410 frac = frac_table_675m;
412 for (; frac->num; frac++) {
414 request *= frac->den;
415 request = div_s64(request, frac->num);
416 if ((src_rate < (request - delta)) ||
417 (src_rate > (request + delta)))
420 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
423 f.pre_div >>= CFG_SRC_DIV_SHIFT;
428 return clk_rcg2_configure(rcg, &f);
434 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
435 unsigned long rate, unsigned long parent_rate, u8 index)
437 /* Parent index is set statically in frequency table */
438 return clk_edp_pixel_set_rate(hw, rate, parent_rate);
441 static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
442 struct clk_rate_request *req)
444 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
445 const struct freq_tbl *f = rcg->freq_tbl;
446 const struct frac_entry *frac;
449 u32 mask = BIT(rcg->hid_width) - 1;
451 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
453 /* Force the correct parent */
454 req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
455 req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
457 if (req->best_parent_rate == 810000000)
458 frac = frac_table_810m;
460 frac = frac_table_675m;
462 for (; frac->num; frac++) {
464 request *= frac->den;
465 request = div_s64(request, frac->num);
466 if ((req->best_parent_rate < (request - delta)) ||
467 (req->best_parent_rate > (request + delta)))
470 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
472 hid_div >>= CFG_SRC_DIV_SHIFT;
475 req->rate = calc_rate(req->best_parent_rate,
476 frac->num, frac->den,
477 !!frac->den, hid_div);
484 const struct clk_ops clk_edp_pixel_ops = {
485 .is_enabled = clk_rcg2_is_enabled,
486 .get_parent = clk_rcg2_get_parent,
487 .set_parent = clk_rcg2_set_parent,
488 .recalc_rate = clk_rcg2_recalc_rate,
489 .set_rate = clk_edp_pixel_set_rate,
490 .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
491 .determine_rate = clk_edp_pixel_determine_rate,
493 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
495 static int clk_byte_determine_rate(struct clk_hw *hw,
496 struct clk_rate_request *req)
498 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
499 const struct freq_tbl *f = rcg->freq_tbl;
500 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
501 unsigned long parent_rate, div;
502 u32 mask = BIT(rcg->hid_width) - 1;
508 req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
509 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
511 div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
512 div = min_t(u32, div, mask);
514 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
519 static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
520 unsigned long parent_rate)
522 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
523 struct freq_tbl f = *rcg->freq_tbl;
525 u32 mask = BIT(rcg->hid_width) - 1;
527 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
528 div = min_t(u32, div, mask);
532 return clk_rcg2_configure(rcg, &f);
535 static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
536 unsigned long rate, unsigned long parent_rate, u8 index)
538 /* Parent index is set statically in frequency table */
539 return clk_byte_set_rate(hw, rate, parent_rate);
542 const struct clk_ops clk_byte_ops = {
543 .is_enabled = clk_rcg2_is_enabled,
544 .get_parent = clk_rcg2_get_parent,
545 .set_parent = clk_rcg2_set_parent,
546 .recalc_rate = clk_rcg2_recalc_rate,
547 .set_rate = clk_byte_set_rate,
548 .set_rate_and_parent = clk_byte_set_rate_and_parent,
549 .determine_rate = clk_byte_determine_rate,
551 EXPORT_SYMBOL_GPL(clk_byte_ops);
553 static int clk_byte2_determine_rate(struct clk_hw *hw,
554 struct clk_rate_request *req)
556 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
557 unsigned long parent_rate, div;
558 u32 mask = BIT(rcg->hid_width) - 1;
560 unsigned long rate = req->rate;
565 p = req->best_parent_hw;
566 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
568 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
569 div = min_t(u32, div, mask);
571 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
576 static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
577 unsigned long parent_rate)
579 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
580 struct freq_tbl f = { 0 };
582 int i, num_parents = clk_hw_get_num_parents(hw);
583 u32 mask = BIT(rcg->hid_width) - 1;
586 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
587 div = min_t(u32, div, mask);
591 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
592 cfg &= CFG_SRC_SEL_MASK;
593 cfg >>= CFG_SRC_SEL_SHIFT;
595 for (i = 0; i < num_parents; i++) {
596 if (cfg == rcg->parent_map[i].cfg) {
597 f.src = rcg->parent_map[i].src;
598 return clk_rcg2_configure(rcg, &f);
605 static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
606 unsigned long rate, unsigned long parent_rate, u8 index)
608 /* Read the hardware to determine parent during set_rate */
609 return clk_byte2_set_rate(hw, rate, parent_rate);
612 const struct clk_ops clk_byte2_ops = {
613 .is_enabled = clk_rcg2_is_enabled,
614 .get_parent = clk_rcg2_get_parent,
615 .set_parent = clk_rcg2_set_parent,
616 .recalc_rate = clk_rcg2_recalc_rate,
617 .set_rate = clk_byte2_set_rate,
618 .set_rate_and_parent = clk_byte2_set_rate_and_parent,
619 .determine_rate = clk_byte2_determine_rate,
621 EXPORT_SYMBOL_GPL(clk_byte2_ops);
623 static const struct frac_entry frac_table_pixel[] = {
632 static int clk_pixel_determine_rate(struct clk_hw *hw,
633 struct clk_rate_request *req)
635 unsigned long request, src_rate;
637 const struct frac_entry *frac = frac_table_pixel;
639 for (; frac->num; frac++) {
640 request = (req->rate * frac->den) / frac->num;
642 src_rate = clk_hw_round_rate(req->best_parent_hw, request);
643 if ((src_rate < (request - delta)) ||
644 (src_rate > (request + delta)))
647 req->best_parent_rate = src_rate;
648 req->rate = (src_rate * frac->num) / frac->den;
655 static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
656 unsigned long parent_rate)
658 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
659 struct freq_tbl f = { 0 };
660 const struct frac_entry *frac = frac_table_pixel;
661 unsigned long request;
663 u32 mask = BIT(rcg->hid_width) - 1;
665 int i, num_parents = clk_hw_get_num_parents(hw);
667 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
668 cfg &= CFG_SRC_SEL_MASK;
669 cfg >>= CFG_SRC_SEL_SHIFT;
671 for (i = 0; i < num_parents; i++)
672 if (cfg == rcg->parent_map[i].cfg) {
673 f.src = rcg->parent_map[i].src;
677 for (; frac->num; frac++) {
678 request = (rate * frac->den) / frac->num;
680 if ((parent_rate < (request - delta)) ||
681 (parent_rate > (request + delta)))
684 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
687 f.pre_div >>= CFG_SRC_DIV_SHIFT;
692 return clk_rcg2_configure(rcg, &f);
697 static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
698 unsigned long parent_rate, u8 index)
700 return clk_pixel_set_rate(hw, rate, parent_rate);
703 const struct clk_ops clk_pixel_ops = {
704 .is_enabled = clk_rcg2_is_enabled,
705 .get_parent = clk_rcg2_get_parent,
706 .set_parent = clk_rcg2_set_parent,
707 .recalc_rate = clk_rcg2_recalc_rate,
708 .set_rate = clk_pixel_set_rate,
709 .set_rate_and_parent = clk_pixel_set_rate_and_parent,
710 .determine_rate = clk_pixel_determine_rate,
712 EXPORT_SYMBOL_GPL(clk_pixel_ops);
714 static int clk_gfx3d_determine_rate(struct clk_hw *hw,
715 struct clk_rate_request *req)
717 struct clk_rate_request parent_req = { };
718 struct clk_hw *p2, *p8, *p9, *xo;
719 unsigned long p9_rate;
722 xo = clk_hw_get_parent_by_index(hw, 0);
723 if (req->rate == clk_hw_get_rate(xo)) {
724 req->best_parent_hw = xo;
728 p9 = clk_hw_get_parent_by_index(hw, 2);
729 p2 = clk_hw_get_parent_by_index(hw, 3);
730 p8 = clk_hw_get_parent_by_index(hw, 4);
732 /* PLL9 is a fixed rate PLL */
733 p9_rate = clk_hw_get_rate(p9);
735 parent_req.rate = req->rate = min(req->rate, p9_rate);
736 if (req->rate == p9_rate) {
737 req->rate = req->best_parent_rate = p9_rate;
738 req->best_parent_hw = p9;
742 if (req->best_parent_hw == p9) {
743 /* Are we going back to a previously used rate? */
744 if (clk_hw_get_rate(p8) == req->rate)
745 req->best_parent_hw = p8;
747 req->best_parent_hw = p2;
748 } else if (req->best_parent_hw == p8) {
749 req->best_parent_hw = p2;
751 req->best_parent_hw = p8;
754 ret = __clk_determine_rate(req->best_parent_hw, &parent_req);
758 req->rate = req->best_parent_rate = parent_req.rate;
763 static int clk_gfx3d_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
764 unsigned long parent_rate, u8 index)
766 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
770 /* Just mux it, we don't use the division or m/n hardware */
771 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
772 ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
776 return update_config(rcg);
779 static int clk_gfx3d_set_rate(struct clk_hw *hw, unsigned long rate,
780 unsigned long parent_rate)
783 * We should never get here; clk_gfx3d_determine_rate() should always
784 * make us use a different parent than what we're currently using, so
785 * clk_gfx3d_set_rate_and_parent() should always be called.
790 const struct clk_ops clk_gfx3d_ops = {
791 .is_enabled = clk_rcg2_is_enabled,
792 .get_parent = clk_rcg2_get_parent,
793 .set_parent = clk_rcg2_set_parent,
794 .recalc_rate = clk_rcg2_recalc_rate,
795 .set_rate = clk_gfx3d_set_rate,
796 .set_rate_and_parent = clk_gfx3d_set_rate_and_parent,
797 .determine_rate = clk_gfx3d_determine_rate,
799 EXPORT_SYMBOL_GPL(clk_gfx3d_ops);
801 static int clk_rcg2_set_force_enable(struct clk_hw *hw)
803 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
804 const char *name = clk_hw_get_name(hw);
807 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
808 CMD_ROOT_EN, CMD_ROOT_EN);
812 /* wait for RCG to turn ON */
813 for (count = 500; count > 0; count--) {
814 if (clk_rcg2_is_enabled(hw))
820 pr_err("%s: RCG did not turn on\n", name);
824 static int clk_rcg2_clear_force_enable(struct clk_hw *hw)
826 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
828 return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
833 clk_rcg2_shared_force_enable_clear(struct clk_hw *hw, const struct freq_tbl *f)
835 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
838 ret = clk_rcg2_set_force_enable(hw);
842 ret = clk_rcg2_configure(rcg, f);
846 return clk_rcg2_clear_force_enable(hw);
849 static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate,
850 unsigned long parent_rate)
852 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
853 const struct freq_tbl *f;
855 f = qcom_find_freq(rcg->freq_tbl, rate);
860 * In case clock is disabled, update the CFG, M, N and D registers
861 * and don't hit the update bit of CMD register.
863 if (!__clk_is_enabled(hw->clk))
864 return __clk_rcg2_configure(rcg, f);
866 return clk_rcg2_shared_force_enable_clear(hw, f);
869 static int clk_rcg2_shared_set_rate_and_parent(struct clk_hw *hw,
870 unsigned long rate, unsigned long parent_rate, u8 index)
872 return clk_rcg2_shared_set_rate(hw, rate, parent_rate);
875 static int clk_rcg2_shared_enable(struct clk_hw *hw)
877 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
881 * Set the update bit because required configuration has already
882 * been written in clk_rcg2_shared_set_rate()
884 ret = clk_rcg2_set_force_enable(hw);
888 ret = update_config(rcg);
892 return clk_rcg2_clear_force_enable(hw);
895 static void clk_rcg2_shared_disable(struct clk_hw *hw)
897 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
901 * Store current configuration as switching to safe source would clear
902 * the SRC and DIV of CFG register
904 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
907 * Park the RCG at a safe configuration - sourced off of safe source.
908 * Force enable and disable the RCG while configuring it to safeguard
909 * against any update signal coming from the downstream clock.
910 * The current parent is still prepared and enabled at this point, and
911 * the safe source is always on while application processor subsystem
912 * is online. Therefore, the RCG can safely switch its parent.
914 clk_rcg2_set_force_enable(hw);
916 regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
917 rcg->safe_src_index << CFG_SRC_SEL_SHIFT);
921 clk_rcg2_clear_force_enable(hw);
923 /* Write back the stored configuration corresponding to current rate */
924 regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
927 const struct clk_ops clk_rcg2_shared_ops = {
928 .enable = clk_rcg2_shared_enable,
929 .disable = clk_rcg2_shared_disable,
930 .get_parent = clk_rcg2_get_parent,
931 .set_parent = clk_rcg2_set_parent,
932 .recalc_rate = clk_rcg2_recalc_rate,
933 .determine_rate = clk_rcg2_determine_rate,
934 .set_rate = clk_rcg2_shared_set_rate,
935 .set_rate_and_parent = clk_rcg2_shared_set_rate_and_parent,
937 EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops);