aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--drivers/clk/clk-composite.c20
-rw-r--r--drivers/clk/pxa/clk-pxa.c2
-rw-r--r--drivers/clk/st/clk-flexgen.c20
-rw-r--r--drivers/clk/st/clkgen-mux.c14
-rw-r--r--drivers/clk/tegra/clk-periph.c14
5 files changed, 35 insertions, 35 deletions
diff --git a/drivers/clk/clk-composite.c b/drivers/clk/clk-composite.c
index dee81b83c4b3..956b7e54fa1c 100644
--- a/drivers/clk/clk-composite.c
+++ b/drivers/clk/clk-composite.c
@@ -27,7 +27,7 @@ static u8 clk_composite_get_parent(struct clk_hw *hw)
27 const struct clk_ops *mux_ops = composite->mux_ops; 27 const struct clk_ops *mux_ops = composite->mux_ops;
28 struct clk_hw *mux_hw = composite->mux_hw; 28 struct clk_hw *mux_hw = composite->mux_hw;
29 29
30 mux_hw->clk = hw->clk; 30 __clk_hw_set_clk(mux_hw, hw);
31 31
32 return mux_ops->get_parent(mux_hw); 32 return mux_ops->get_parent(mux_hw);
33} 33}
@@ -38,7 +38,7 @@ static int clk_composite_set_parent(struct clk_hw *hw, u8 index)
38 const struct clk_ops *mux_ops = composite->mux_ops; 38 const struct clk_ops *mux_ops = composite->mux_ops;
39 struct clk_hw *mux_hw = composite->mux_hw; 39 struct clk_hw *mux_hw = composite->mux_hw;
40 40
41 mux_hw->clk = hw->clk; 41 __clk_hw_set_clk(mux_hw, hw);
42 42
43 return mux_ops->set_parent(mux_hw, index); 43 return mux_ops->set_parent(mux_hw, index);
44} 44}
@@ -50,7 +50,7 @@ static unsigned long clk_composite_recalc_rate(struct clk_hw *hw,
50 const struct clk_ops *rate_ops = composite->rate_ops; 50 const struct clk_ops *rate_ops = composite->rate_ops;
51 struct clk_hw *rate_hw = composite->rate_hw; 51 struct clk_hw *rate_hw = composite->rate_hw;
52 52
53 rate_hw->clk = hw->clk; 53 __clk_hw_set_clk(rate_hw, hw);
54 54
55 return rate_ops->recalc_rate(rate_hw, parent_rate); 55 return rate_ops->recalc_rate(rate_hw, parent_rate);
56} 56}
@@ -74,7 +74,7 @@ static long clk_composite_determine_rate(struct clk_hw *hw, unsigned long rate,
74 int i; 74 int i;
75 75
76 if (rate_hw && rate_ops && rate_ops->determine_rate) { 76 if (rate_hw && rate_ops && rate_ops->determine_rate) {
77 rate_hw->clk = hw->clk; 77 __clk_hw_set_clk(rate_hw, hw);
78 return rate_ops->determine_rate(rate_hw, rate, min_rate, 78 return rate_ops->determine_rate(rate_hw, rate, min_rate,
79 max_rate, 79 max_rate,
80 best_parent_rate, 80 best_parent_rate,
@@ -120,7 +120,7 @@ static long clk_composite_determine_rate(struct clk_hw *hw, unsigned long rate,
120 120
121 return best_rate; 121 return best_rate;
122 } else if (mux_hw && mux_ops && mux_ops->determine_rate) { 122 } else if (mux_hw && mux_ops && mux_ops->determine_rate) {
123 mux_hw->clk = hw->clk; 123 __clk_hw_set_clk(mux_hw, hw);
124 return mux_ops->determine_rate(mux_hw, rate, min_rate, 124 return mux_ops->determine_rate(mux_hw, rate, min_rate,
125 max_rate, best_parent_rate, 125 max_rate, best_parent_rate,
126 best_parent_p); 126 best_parent_p);
@@ -137,7 +137,7 @@ static long clk_composite_round_rate(struct clk_hw *hw, unsigned long rate,
137 const struct clk_ops *rate_ops = composite->rate_ops; 137 const struct clk_ops *rate_ops = composite->rate_ops;
138 struct clk_hw *rate_hw = composite->rate_hw; 138 struct clk_hw *rate_hw = composite->rate_hw;
139 139
140 rate_hw->clk = hw->clk; 140 __clk_hw_set_clk(rate_hw, hw);
141 141
142 return rate_ops->round_rate(rate_hw, rate, prate); 142 return rate_ops->round_rate(rate_hw, rate, prate);
143} 143}
@@ -149,7 +149,7 @@ static int clk_composite_set_rate(struct clk_hw *hw, unsigned long rate,
149 const struct clk_ops *rate_ops = composite->rate_ops; 149 const struct clk_ops *rate_ops = composite->rate_ops;
150 struct clk_hw *rate_hw = composite->rate_hw; 150 struct clk_hw *rate_hw = composite->rate_hw;
151 151
152 rate_hw->clk = hw->clk; 152 __clk_hw_set_clk(rate_hw, hw);
153 153
154 return rate_ops->set_rate(rate_hw, rate, parent_rate); 154 return rate_ops->set_rate(rate_hw, rate, parent_rate);
155} 155}
@@ -160,7 +160,7 @@ static int clk_composite_is_enabled(struct clk_hw *hw)
160 const struct clk_ops *gate_ops = composite->gate_ops; 160 const struct clk_ops *gate_ops = composite->gate_ops;
161 struct clk_hw *gate_hw = composite->gate_hw; 161 struct clk_hw *gate_hw = composite->gate_hw;
162 162
163 gate_hw->clk = hw->clk; 163 __clk_hw_set_clk(gate_hw, hw);
164 164
165 return gate_ops->is_enabled(gate_hw); 165 return gate_ops->is_enabled(gate_hw);
166} 166}
@@ -171,7 +171,7 @@ static int clk_composite_enable(struct clk_hw *hw)
171 const struct clk_ops *gate_ops = composite->gate_ops; 171 const struct clk_ops *gate_ops = composite->gate_ops;
172 struct clk_hw *gate_hw = composite->gate_hw; 172 struct clk_hw *gate_hw = composite->gate_hw;
173 173
174 gate_hw->clk = hw->clk; 174 __clk_hw_set_clk(gate_hw, hw);
175 175
176 return gate_ops->enable(gate_hw); 176 return gate_ops->enable(gate_hw);
177} 177}
@@ -182,7 +182,7 @@ static void clk_composite_disable(struct clk_hw *hw)
182 const struct clk_ops *gate_ops = composite->gate_ops; 182 const struct clk_ops *gate_ops = composite->gate_ops;
183 struct clk_hw *gate_hw = composite->gate_hw; 183 struct clk_hw *gate_hw = composite->gate_hw;
184 184
185 gate_hw->clk = hw->clk; 185 __clk_hw_set_clk(gate_hw, hw);
186 186
187 gate_ops->disable(gate_hw); 187 gate_ops->disable(gate_hw);
188} 188}
diff --git a/drivers/clk/pxa/clk-pxa.c b/drivers/clk/pxa/clk-pxa.c
index 4e834753ab09..29cee9e8d4d9 100644
--- a/drivers/clk/pxa/clk-pxa.c
+++ b/drivers/clk/pxa/clk-pxa.c
@@ -46,7 +46,7 @@ static unsigned long cken_recalc_rate(struct clk_hw *hw,
46 fix = &pclk->lp; 46 fix = &pclk->lp;
47 else 47 else
48 fix = &pclk->hp; 48 fix = &pclk->hp;
49 fix->hw.clk = hw->clk; 49 __clk_hw_set_clk(&fix->hw, hw);
50 return clk_fixed_factor_ops.recalc_rate(&fix->hw, parent_rate); 50 return clk_fixed_factor_ops.recalc_rate(&fix->hw, parent_rate);
51} 51}
52 52
diff --git a/drivers/clk/st/clk-flexgen.c b/drivers/clk/st/clk-flexgen.c
index 3a484b3cb448..bf12a25eb3a2 100644
--- a/drivers/clk/st/clk-flexgen.c
+++ b/drivers/clk/st/clk-flexgen.c
@@ -37,8 +37,8 @@ static int flexgen_enable(struct clk_hw *hw)
37 struct clk_hw *pgate_hw = &flexgen->pgate.hw; 37 struct clk_hw *pgate_hw = &flexgen->pgate.hw;
38 struct clk_hw *fgate_hw = &flexgen->fgate.hw; 38 struct clk_hw *fgate_hw = &flexgen->fgate.hw;
39 39
40 pgate_hw->clk = hw->clk; 40 __clk_hw_set_clk(pgate_hw, hw);
41 fgate_hw->clk = hw->clk; 41 __clk_hw_set_clk(fgate_hw, hw);
42 42
43 clk_gate_ops.enable(pgate_hw); 43 clk_gate_ops.enable(pgate_hw);
44 44
@@ -54,7 +54,7 @@ static void flexgen_disable(struct clk_hw *hw)
54 struct clk_hw *fgate_hw = &flexgen->fgate.hw; 54 struct clk_hw *fgate_hw = &flexgen->fgate.hw;
55 55
56 /* disable only the final gate */ 56 /* disable only the final gate */
57 fgate_hw->clk = hw->clk; 57 __clk_hw_set_clk(fgate_hw, hw);
58 58
59 clk_gate_ops.disable(fgate_hw); 59 clk_gate_ops.disable(fgate_hw);
60 60
@@ -66,7 +66,7 @@ static int flexgen_is_enabled(struct clk_hw *hw)
66 struct flexgen *flexgen = to_flexgen(hw); 66 struct flexgen *flexgen = to_flexgen(hw);
67 struct clk_hw *fgate_hw = &flexgen->fgate.hw; 67 struct clk_hw *fgate_hw = &flexgen->fgate.hw;
68 68
69 fgate_hw->clk = hw->clk; 69 __clk_hw_set_clk(fgate_hw, hw);
70 70
71 if (!clk_gate_ops.is_enabled(fgate_hw)) 71 if (!clk_gate_ops.is_enabled(fgate_hw))
72 return 0; 72 return 0;
@@ -79,7 +79,7 @@ static u8 flexgen_get_parent(struct clk_hw *hw)
79 struct flexgen *flexgen = to_flexgen(hw); 79 struct flexgen *flexgen = to_flexgen(hw);
80 struct clk_hw *mux_hw = &flexgen->mux.hw; 80 struct clk_hw *mux_hw = &flexgen->mux.hw;
81 81
82 mux_hw->clk = hw->clk; 82 __clk_hw_set_clk(mux_hw, hw);
83 83
84 return clk_mux_ops.get_parent(mux_hw); 84 return clk_mux_ops.get_parent(mux_hw);
85} 85}
@@ -89,7 +89,7 @@ static int flexgen_set_parent(struct clk_hw *hw, u8 index)
89 struct flexgen *flexgen = to_flexgen(hw); 89 struct flexgen *flexgen = to_flexgen(hw);
90 struct clk_hw *mux_hw = &flexgen->mux.hw; 90 struct clk_hw *mux_hw = &flexgen->mux.hw;
91 91
92 mux_hw->clk = hw->clk; 92 __clk_hw_set_clk(mux_hw, hw);
93 93
94 return clk_mux_ops.set_parent(mux_hw, index); 94 return clk_mux_ops.set_parent(mux_hw, index);
95} 95}
@@ -124,8 +124,8 @@ unsigned long flexgen_recalc_rate(struct clk_hw *hw,
124 struct clk_hw *fdiv_hw = &flexgen->fdiv.hw; 124 struct clk_hw *fdiv_hw = &flexgen->fdiv.hw;
125 unsigned long mid_rate; 125 unsigned long mid_rate;
126 126
127 pdiv_hw->clk = hw->clk; 127 __clk_hw_set_clk(pdiv_hw, hw);
128 fdiv_hw->clk = hw->clk; 128 __clk_hw_set_clk(fdiv_hw, hw);
129 129
130 mid_rate = clk_divider_ops.recalc_rate(pdiv_hw, parent_rate); 130 mid_rate = clk_divider_ops.recalc_rate(pdiv_hw, parent_rate);
131 131
@@ -141,8 +141,8 @@ static int flexgen_set_rate(struct clk_hw *hw, unsigned long rate,
141 unsigned long div = 0; 141 unsigned long div = 0;
142 int ret = 0; 142 int ret = 0;
143 143
144 pdiv_hw->clk = hw->clk; 144 __clk_hw_set_clk(pdiv_hw, hw);
145 fdiv_hw->clk = hw->clk; 145 __clk_hw_set_clk(fdiv_hw, hw);
146 146
147 div = clk_best_div(parent_rate, rate); 147 div = clk_best_div(parent_rate, rate);
148 148
diff --git a/drivers/clk/st/clkgen-mux.c b/drivers/clk/st/clkgen-mux.c
index 79dc40b5cc68..9a15ec344a85 100644
--- a/drivers/clk/st/clkgen-mux.c
+++ b/drivers/clk/st/clkgen-mux.c
@@ -94,7 +94,7 @@ static int clkgena_divmux_enable(struct clk_hw *hw)
94 unsigned long timeout; 94 unsigned long timeout;
95 int ret = 0; 95 int ret = 0;
96 96
97 mux_hw->clk = hw->clk; 97 __clk_hw_set_clk(mux_hw, hw);
98 98
99 ret = clk_mux_ops.set_parent(mux_hw, genamux->muxsel); 99 ret = clk_mux_ops.set_parent(mux_hw, genamux->muxsel);
100 if (ret) 100 if (ret)
@@ -116,7 +116,7 @@ static void clkgena_divmux_disable(struct clk_hw *hw)
116 struct clkgena_divmux *genamux = to_clkgena_divmux(hw); 116 struct clkgena_divmux *genamux = to_clkgena_divmux(hw);
117 struct clk_hw *mux_hw = &genamux->mux.hw; 117 struct clk_hw *mux_hw = &genamux->mux.hw;
118 118
119 mux_hw->clk = hw->clk; 119 __clk_hw_set_clk(mux_hw, hw);
120 120
121 clk_mux_ops.set_parent(mux_hw, CKGAX_CLKOPSRC_SWITCH_OFF); 121 clk_mux_ops.set_parent(mux_hw, CKGAX_CLKOPSRC_SWITCH_OFF);
122} 122}
@@ -126,7 +126,7 @@ static int clkgena_divmux_is_enabled(struct clk_hw *hw)
126 struct clkgena_divmux *genamux = to_clkgena_divmux(hw); 126 struct clkgena_divmux *genamux = to_clkgena_divmux(hw);
127 struct clk_hw *mux_hw = &genamux->mux.hw; 127 struct clk_hw *mux_hw = &genamux->mux.hw;
128 128
129 mux_hw->clk = hw->clk; 129 __clk_hw_set_clk(mux_hw, hw);
130 130
131 return (s8)clk_mux_ops.get_parent(mux_hw) > 0; 131 return (s8)clk_mux_ops.get_parent(mux_hw) > 0;
132} 132}
@@ -136,7 +136,7 @@ u8 clkgena_divmux_get_parent(struct clk_hw *hw)
136 struct clkgena_divmux *genamux = to_clkgena_divmux(hw); 136 struct clkgena_divmux *genamux = to_clkgena_divmux(hw);
137 struct clk_hw *mux_hw = &genamux->mux.hw; 137 struct clk_hw *mux_hw = &genamux->mux.hw;
138 138
139 mux_hw->clk = hw->clk; 139 __clk_hw_set_clk(mux_hw, hw);
140 140
141 genamux->muxsel = clk_mux_ops.get_parent(mux_hw); 141 genamux->muxsel = clk_mux_ops.get_parent(mux_hw);
142 if ((s8)genamux->muxsel < 0) { 142 if ((s8)genamux->muxsel < 0) {
@@ -174,7 +174,7 @@ unsigned long clkgena_divmux_recalc_rate(struct clk_hw *hw,
174 struct clkgena_divmux *genamux = to_clkgena_divmux(hw); 174 struct clkgena_divmux *genamux = to_clkgena_divmux(hw);
175 struct clk_hw *div_hw = &genamux->div[genamux->muxsel].hw; 175 struct clk_hw *div_hw = &genamux->div[genamux->muxsel].hw;
176 176
177 div_hw->clk = hw->clk; 177 __clk_hw_set_clk(div_hw, hw);
178 178
179 return clk_divider_ops.recalc_rate(div_hw, parent_rate); 179 return clk_divider_ops.recalc_rate(div_hw, parent_rate);
180} 180}
@@ -185,7 +185,7 @@ static int clkgena_divmux_set_rate(struct clk_hw *hw, unsigned long rate,
185 struct clkgena_divmux *genamux = to_clkgena_divmux(hw); 185 struct clkgena_divmux *genamux = to_clkgena_divmux(hw);
186 struct clk_hw *div_hw = &genamux->div[genamux->muxsel].hw; 186 struct clk_hw *div_hw = &genamux->div[genamux->muxsel].hw;
187 187
188 div_hw->clk = hw->clk; 188 __clk_hw_set_clk(div_hw, hw);
189 189
190 return clk_divider_ops.set_rate(div_hw, rate, parent_rate); 190 return clk_divider_ops.set_rate(div_hw, rate, parent_rate);
191} 191}
@@ -196,7 +196,7 @@ static long clkgena_divmux_round_rate(struct clk_hw *hw, unsigned long rate,
196 struct clkgena_divmux *genamux = to_clkgena_divmux(hw); 196 struct clkgena_divmux *genamux = to_clkgena_divmux(hw);
197 struct clk_hw *div_hw = &genamux->div[genamux->muxsel].hw; 197 struct clk_hw *div_hw = &genamux->div[genamux->muxsel].hw;
198 198
199 div_hw->clk = hw->clk; 199 __clk_hw_set_clk(div_hw, hw);
200 200
201 return clk_divider_ops.round_rate(div_hw, rate, prate); 201 return clk_divider_ops.round_rate(div_hw, rate, prate);
202} 202}
diff --git a/drivers/clk/tegra/clk-periph.c b/drivers/clk/tegra/clk-periph.c
index 9e899c18af86..d84ae49d0e05 100644
--- a/drivers/clk/tegra/clk-periph.c
+++ b/drivers/clk/tegra/clk-periph.c
@@ -28,7 +28,7 @@ static u8 clk_periph_get_parent(struct clk_hw *hw)
28 const struct clk_ops *mux_ops = periph->mux_ops; 28 const struct clk_ops *mux_ops = periph->mux_ops;
29 struct clk_hw *mux_hw = &periph->mux.hw; 29 struct clk_hw *mux_hw = &periph->mux.hw;
30 30
31 mux_hw->clk = hw->clk; 31 __clk_hw_set_clk(mux_hw, hw);
32 32
33 return mux_ops->get_parent(mux_hw); 33 return mux_ops->get_parent(mux_hw);
34} 34}
@@ -39,7 +39,7 @@ static int clk_periph_set_parent(struct clk_hw *hw, u8 index)
39 const struct clk_ops *mux_ops = periph->mux_ops; 39 const struct clk_ops *mux_ops = periph->mux_ops;
40 struct clk_hw *mux_hw = &periph->mux.hw; 40 struct clk_hw *mux_hw = &periph->mux.hw;
41 41
42 mux_hw->clk = hw->clk; 42 __clk_hw_set_clk(mux_hw, hw);
43 43
44 return mux_ops->set_parent(mux_hw, index); 44 return mux_ops->set_parent(mux_hw, index);
45} 45}
@@ -51,7 +51,7 @@ static unsigned long clk_periph_recalc_rate(struct clk_hw *hw,
51 const struct clk_ops *div_ops = periph->div_ops; 51 const struct clk_ops *div_ops = periph->div_ops;
52 struct clk_hw *div_hw = &periph->divider.hw; 52 struct clk_hw *div_hw = &periph->divider.hw;
53 53
54 div_hw->clk = hw->clk; 54 __clk_hw_set_clk(div_hw, hw);
55 55
56 return div_ops->recalc_rate(div_hw, parent_rate); 56 return div_ops->recalc_rate(div_hw, parent_rate);
57} 57}
@@ -63,7 +63,7 @@ static long clk_periph_round_rate(struct clk_hw *hw, unsigned long rate,
63 const struct clk_ops *div_ops = periph->div_ops; 63 const struct clk_ops *div_ops = periph->div_ops;
64 struct clk_hw *div_hw = &periph->divider.hw; 64 struct clk_hw *div_hw = &periph->divider.hw;
65 65
66 div_hw->clk = hw->clk; 66 __clk_hw_set_clk(div_hw, hw);
67 67
68 return div_ops->round_rate(div_hw, rate, prate); 68 return div_ops->round_rate(div_hw, rate, prate);
69} 69}
@@ -75,7 +75,7 @@ static int clk_periph_set_rate(struct clk_hw *hw, unsigned long rate,
75 const struct clk_ops *div_ops = periph->div_ops; 75 const struct clk_ops *div_ops = periph->div_ops;
76 struct clk_hw *div_hw = &periph->divider.hw; 76 struct clk_hw *div_hw = &periph->divider.hw;
77 77
78 div_hw->clk = hw->clk; 78 __clk_hw_set_clk(div_hw, hw);
79 79
80 return div_ops->set_rate(div_hw, rate, parent_rate); 80 return div_ops->set_rate(div_hw, rate, parent_rate);
81} 81}
@@ -86,7 +86,7 @@ static int clk_periph_is_enabled(struct clk_hw *hw)
86 const struct clk_ops *gate_ops = periph->gate_ops; 86 const struct clk_ops *gate_ops = periph->gate_ops;
87 struct clk_hw *gate_hw = &periph->gate.hw; 87 struct clk_hw *gate_hw = &periph->gate.hw;
88 88
89 gate_hw->clk = hw->clk; 89 __clk_hw_set_clk(gate_hw, hw);
90 90
91 return gate_ops->is_enabled(gate_hw); 91 return gate_ops->is_enabled(gate_hw);
92} 92}
@@ -97,7 +97,7 @@ static int clk_periph_enable(struct clk_hw *hw)
97 const struct clk_ops *gate_ops = periph->gate_ops; 97 const struct clk_ops *gate_ops = periph->gate_ops;
98 struct clk_hw *gate_hw = &periph->gate.hw; 98 struct clk_hw *gate_hw = &periph->gate.hw;
99 99
100 gate_hw->clk = hw->clk; 100 __clk_hw_set_clk(gate_hw, hw);
101 101
102 return gate_ops->enable(gate_hw); 102 return gate_ops->enable(gate_hw);
103} 103}