diff options
author | Jaehoon Chung <jh80.chung@samsung.com> | 2017-01-16 01:31:34 -0500 |
---|---|---|
committer | Bjorn Helgaas <bhelgaas@google.com> | 2017-01-28 16:21:06 -0500 |
commit | 4e0a90b381bd8bddf1644591dc585cf4c6ea652e (patch) | |
tree | a6bc65780053c9b85d84b1a6a5053ff8602fc343 | |
parent | 7ce7d89f48834cefece7804d38fc5d85382edf77 (diff) |
PCI: exynos: Rename all pointer names from "exynos_pcie" to "ep"
Rename the simple pointer name as "ep" instead of "exynos_pcie". After
applying this patch, it can save the 10 characthers within one line.
Signed-off-by: Jaehoon Chung <jh80.chung@samsung.com>
Signed-off-by: Bjorn Helgaas <bhelgaas@google.com>
Reviewed-by: Pankaj Dubey <pankaj.dubey@samsung.com>
Acked-by: Krzysztof Kozlowski <krzk@kernel.org>
Acked-by: Jingoo Han <jingoohan1@gmail.com>
-rw-r--r-- | drivers/pci/host/pci-exynos.c | 348 |
1 files changed, 173 insertions, 175 deletions
diff --git a/drivers/pci/host/pci-exynos.c b/drivers/pci/host/pci-exynos.c index f1c544bb8b68..2e99ff5b5b93 100644 --- a/drivers/pci/host/pci-exynos.c +++ b/drivers/pci/host/pci-exynos.c | |||
@@ -102,212 +102,210 @@ struct exynos_pcie { | |||
102 | #define PCIE_PHY_TRSV3_PD_TSV (0x1 << 7) | 102 | #define PCIE_PHY_TRSV3_PD_TSV (0x1 << 7) |
103 | #define PCIE_PHY_TRSV3_LVCC 0x31c | 103 | #define PCIE_PHY_TRSV3_LVCC 0x31c |
104 | 104 | ||
105 | static void exynos_elb_writel(struct exynos_pcie *exynos_pcie, u32 val, u32 reg) | 105 | static void exynos_elb_writel(struct exynos_pcie *ep, u32 val, u32 reg) |
106 | { | 106 | { |
107 | writel(val, exynos_pcie->elbi_base + reg); | 107 | writel(val, ep->elbi_base + reg); |
108 | } | 108 | } |
109 | 109 | ||
110 | static u32 exynos_elb_readl(struct exynos_pcie *exynos_pcie, u32 reg) | 110 | static u32 exynos_elb_readl(struct exynos_pcie *ep, u32 reg) |
111 | { | 111 | { |
112 | return readl(exynos_pcie->elbi_base + reg); | 112 | return readl(ep->elbi_base + reg); |
113 | } | 113 | } |
114 | 114 | ||
115 | static void exynos_phy_writel(struct exynos_pcie *exynos_pcie, u32 val, u32 reg) | 115 | static void exynos_phy_writel(struct exynos_pcie *ep, u32 val, u32 reg) |
116 | { | 116 | { |
117 | writel(val, exynos_pcie->phy_base + reg); | 117 | writel(val, ep->phy_base + reg); |
118 | } | 118 | } |
119 | 119 | ||
120 | static u32 exynos_phy_readl(struct exynos_pcie *exynos_pcie, u32 reg) | 120 | static u32 exynos_phy_readl(struct exynos_pcie *ep, u32 reg) |
121 | { | 121 | { |
122 | return readl(exynos_pcie->phy_base + reg); | 122 | return readl(ep->phy_base + reg); |
123 | } | 123 | } |
124 | 124 | ||
125 | static void exynos_blk_writel(struct exynos_pcie *exynos_pcie, u32 val, u32 reg) | 125 | static void exynos_blk_writel(struct exynos_pcie *ep, u32 val, u32 reg) |
126 | { | 126 | { |
127 | writel(val, exynos_pcie->block_base + reg); | 127 | writel(val, ep->block_base + reg); |
128 | } | 128 | } |
129 | 129 | ||
130 | static u32 exynos_blk_readl(struct exynos_pcie *exynos_pcie, u32 reg) | 130 | static u32 exynos_blk_readl(struct exynos_pcie *ep, u32 reg) |
131 | { | 131 | { |
132 | return readl(exynos_pcie->block_base + reg); | 132 | return readl(ep->block_base + reg); |
133 | } | 133 | } |
134 | 134 | ||
135 | static void exynos_pcie_sideband_dbi_w_mode(struct exynos_pcie *exynos_pcie, | 135 | static void exynos_pcie_sideband_dbi_w_mode(struct exynos_pcie *ep, bool on) |
136 | bool on) | ||
137 | { | 136 | { |
138 | u32 val; | 137 | u32 val; |
139 | 138 | ||
140 | if (on) { | 139 | if (on) { |
141 | val = exynos_elb_readl(exynos_pcie, PCIE_ELBI_SLV_AWMISC); | 140 | val = exynos_elb_readl(ep, PCIE_ELBI_SLV_AWMISC); |
142 | val |= PCIE_ELBI_SLV_DBI_ENABLE; | 141 | val |= PCIE_ELBI_SLV_DBI_ENABLE; |
143 | exynos_elb_writel(exynos_pcie, val, PCIE_ELBI_SLV_AWMISC); | 142 | exynos_elb_writel(ep, val, PCIE_ELBI_SLV_AWMISC); |
144 | } else { | 143 | } else { |
145 | val = exynos_elb_readl(exynos_pcie, PCIE_ELBI_SLV_AWMISC); | 144 | val = exynos_elb_readl(ep, PCIE_ELBI_SLV_AWMISC); |
146 | val &= ~PCIE_ELBI_SLV_DBI_ENABLE; | 145 | val &= ~PCIE_ELBI_SLV_DBI_ENABLE; |
147 | exynos_elb_writel(exynos_pcie, val, PCIE_ELBI_SLV_AWMISC); | 146 | exynos_elb_writel(ep, val, PCIE_ELBI_SLV_AWMISC); |
148 | } | 147 | } |
149 | } | 148 | } |
150 | 149 | ||
151 | static void exynos_pcie_sideband_dbi_r_mode(struct exynos_pcie *exynos_pcie, | 150 | static void exynos_pcie_sideband_dbi_r_mode(struct exynos_pcie *ep, bool on) |
152 | bool on) | ||
153 | { | 151 | { |
154 | u32 val; | 152 | u32 val; |
155 | 153 | ||
156 | if (on) { | 154 | if (on) { |
157 | val = exynos_elb_readl(exynos_pcie, PCIE_ELBI_SLV_ARMISC); | 155 | val = exynos_elb_readl(ep, PCIE_ELBI_SLV_ARMISC); |
158 | val |= PCIE_ELBI_SLV_DBI_ENABLE; | 156 | val |= PCIE_ELBI_SLV_DBI_ENABLE; |
159 | exynos_elb_writel(exynos_pcie, val, PCIE_ELBI_SLV_ARMISC); | 157 | exynos_elb_writel(ep, val, PCIE_ELBI_SLV_ARMISC); |
160 | } else { | 158 | } else { |
161 | val = exynos_elb_readl(exynos_pcie, PCIE_ELBI_SLV_ARMISC); | 159 | val = exynos_elb_readl(ep, PCIE_ELBI_SLV_ARMISC); |
162 | val &= ~PCIE_ELBI_SLV_DBI_ENABLE; | 160 | val &= ~PCIE_ELBI_SLV_DBI_ENABLE; |
163 | exynos_elb_writel(exynos_pcie, val, PCIE_ELBI_SLV_ARMISC); | 161 | exynos_elb_writel(ep, val, PCIE_ELBI_SLV_ARMISC); |
164 | } | 162 | } |
165 | } | 163 | } |
166 | 164 | ||
167 | static void exynos_pcie_assert_core_reset(struct exynos_pcie *exynos_pcie) | 165 | static void exynos_pcie_assert_core_reset(struct exynos_pcie *ep) |
168 | { | 166 | { |
169 | u32 val; | 167 | u32 val; |
170 | 168 | ||
171 | val = exynos_elb_readl(exynos_pcie, PCIE_CORE_RESET); | 169 | val = exynos_elb_readl(ep, PCIE_CORE_RESET); |
172 | val &= ~PCIE_CORE_RESET_ENABLE; | 170 | val &= ~PCIE_CORE_RESET_ENABLE; |
173 | exynos_elb_writel(exynos_pcie, val, PCIE_CORE_RESET); | 171 | exynos_elb_writel(ep, val, PCIE_CORE_RESET); |
174 | exynos_elb_writel(exynos_pcie, 0, PCIE_PWR_RESET); | 172 | exynos_elb_writel(ep, 0, PCIE_PWR_RESET); |
175 | exynos_elb_writel(exynos_pcie, 0, PCIE_STICKY_RESET); | 173 | exynos_elb_writel(ep, 0, PCIE_STICKY_RESET); |
176 | exynos_elb_writel(exynos_pcie, 0, PCIE_NONSTICKY_RESET); | 174 | exynos_elb_writel(ep, 0, PCIE_NONSTICKY_RESET); |
177 | } | 175 | } |
178 | 176 | ||
179 | static void exynos_pcie_deassert_core_reset(struct exynos_pcie *exynos_pcie) | 177 | static void exynos_pcie_deassert_core_reset(struct exynos_pcie *ep) |
180 | { | 178 | { |
181 | u32 val; | 179 | u32 val; |
182 | 180 | ||
183 | val = exynos_elb_readl(exynos_pcie, PCIE_CORE_RESET); | 181 | val = exynos_elb_readl(ep, PCIE_CORE_RESET); |
184 | val |= PCIE_CORE_RESET_ENABLE; | 182 | val |= PCIE_CORE_RESET_ENABLE; |
185 | 183 | ||
186 | exynos_elb_writel(exynos_pcie, val, PCIE_CORE_RESET); | 184 | exynos_elb_writel(ep, val, PCIE_CORE_RESET); |
187 | exynos_elb_writel(exynos_pcie, 1, PCIE_STICKY_RESET); | 185 | exynos_elb_writel(ep, 1, PCIE_STICKY_RESET); |
188 | exynos_elb_writel(exynos_pcie, 1, PCIE_NONSTICKY_RESET); | 186 | exynos_elb_writel(ep, 1, PCIE_NONSTICKY_RESET); |
189 | exynos_elb_writel(exynos_pcie, 1, PCIE_APP_INIT_RESET); | 187 | exynos_elb_writel(ep, 1, PCIE_APP_INIT_RESET); |
190 | exynos_elb_writel(exynos_pcie, 0, PCIE_APP_INIT_RESET); | 188 | exynos_elb_writel(ep, 0, PCIE_APP_INIT_RESET); |
191 | exynos_blk_writel(exynos_pcie, 1, PCIE_PHY_MAC_RESET); | 189 | exynos_blk_writel(ep, 1, PCIE_PHY_MAC_RESET); |
192 | } | 190 | } |
193 | 191 | ||
194 | static void exynos_pcie_assert_phy_reset(struct exynos_pcie *exynos_pcie) | 192 | static void exynos_pcie_assert_phy_reset(struct exynos_pcie *ep) |
195 | { | 193 | { |
196 | exynos_blk_writel(exynos_pcie, 0, PCIE_PHY_MAC_RESET); | 194 | exynos_blk_writel(ep, 0, PCIE_PHY_MAC_RESET); |
197 | exynos_blk_writel(exynos_pcie, 1, PCIE_PHY_GLOBAL_RESET); | 195 | exynos_blk_writel(ep, 1, PCIE_PHY_GLOBAL_RESET); |
198 | } | 196 | } |
199 | 197 | ||
200 | static void exynos_pcie_deassert_phy_reset(struct exynos_pcie *exynos_pcie) | 198 | static void exynos_pcie_deassert_phy_reset(struct exynos_pcie *ep) |
201 | { | 199 | { |
202 | exynos_blk_writel(exynos_pcie, 0, PCIE_PHY_GLOBAL_RESET); | 200 | exynos_blk_writel(ep, 0, PCIE_PHY_GLOBAL_RESET); |
203 | exynos_elb_writel(exynos_pcie, 1, PCIE_PWR_RESET); | 201 | exynos_elb_writel(ep, 1, PCIE_PWR_RESET); |
204 | exynos_blk_writel(exynos_pcie, 0, PCIE_PHY_COMMON_RESET); | 202 | exynos_blk_writel(ep, 0, PCIE_PHY_COMMON_RESET); |
205 | exynos_blk_writel(exynos_pcie, 0, PCIE_PHY_CMN_REG); | 203 | exynos_blk_writel(ep, 0, PCIE_PHY_CMN_REG); |
206 | exynos_blk_writel(exynos_pcie, 0, PCIE_PHY_TRSVREG_RESET); | 204 | exynos_blk_writel(ep, 0, PCIE_PHY_TRSVREG_RESET); |
207 | exynos_blk_writel(exynos_pcie, 0, PCIE_PHY_TRSV_RESET); | 205 | exynos_blk_writel(ep, 0, PCIE_PHY_TRSV_RESET); |
208 | } | 206 | } |
209 | 207 | ||
210 | static void exynos_pcie_power_on_phy(struct exynos_pcie *exynos_pcie) | 208 | static void exynos_pcie_power_on_phy(struct exynos_pcie *ep) |
211 | { | 209 | { |
212 | u32 val; | 210 | u32 val; |
213 | 211 | ||
214 | val = exynos_phy_readl(exynos_pcie, PCIE_PHY_COMMON_POWER); | 212 | val = exynos_phy_readl(ep, PCIE_PHY_COMMON_POWER); |
215 | val &= ~PCIE_PHY_COMMON_PD_CMN; | 213 | val &= ~PCIE_PHY_COMMON_PD_CMN; |
216 | exynos_phy_writel(exynos_pcie, val, PCIE_PHY_COMMON_POWER); | 214 | exynos_phy_writel(ep, val, PCIE_PHY_COMMON_POWER); |
217 | 215 | ||
218 | val = exynos_phy_readl(exynos_pcie, PCIE_PHY_TRSV0_POWER); | 216 | val = exynos_phy_readl(ep, PCIE_PHY_TRSV0_POWER); |
219 | val &= ~PCIE_PHY_TRSV0_PD_TSV; | 217 | val &= ~PCIE_PHY_TRSV0_PD_TSV; |
220 | exynos_phy_writel(exynos_pcie, val, PCIE_PHY_TRSV0_POWER); | 218 | exynos_phy_writel(ep, val, PCIE_PHY_TRSV0_POWER); |
221 | 219 | ||
222 | val = exynos_phy_readl(exynos_pcie, PCIE_PHY_TRSV1_POWER); | 220 | val = exynos_phy_readl(ep, PCIE_PHY_TRSV1_POWER); |
223 | val &= ~PCIE_PHY_TRSV1_PD_TSV; | 221 | val &= ~PCIE_PHY_TRSV1_PD_TSV; |
224 | exynos_phy_writel(exynos_pcie, val, PCIE_PHY_TRSV1_POWER); | 222 | exynos_phy_writel(ep, val, PCIE_PHY_TRSV1_POWER); |
225 | 223 | ||
226 | val = exynos_phy_readl(exynos_pcie, PCIE_PHY_TRSV2_POWER); | 224 | val = exynos_phy_readl(ep, PCIE_PHY_TRSV2_POWER); |
227 | val &= ~PCIE_PHY_TRSV2_PD_TSV; | 225 | val &= ~PCIE_PHY_TRSV2_PD_TSV; |
228 | exynos_phy_writel(exynos_pcie, val, PCIE_PHY_TRSV2_POWER); | 226 | exynos_phy_writel(ep, val, PCIE_PHY_TRSV2_POWER); |
229 | 227 | ||
230 | val = exynos_phy_readl(exynos_pcie, PCIE_PHY_TRSV3_POWER); | 228 | val = exynos_phy_readl(ep, PCIE_PHY_TRSV3_POWER); |
231 | val &= ~PCIE_PHY_TRSV3_PD_TSV; | 229 | val &= ~PCIE_PHY_TRSV3_PD_TSV; |
232 | exynos_phy_writel(exynos_pcie, val, PCIE_PHY_TRSV3_POWER); | 230 | exynos_phy_writel(ep, val, PCIE_PHY_TRSV3_POWER); |
233 | } | 231 | } |
234 | 232 | ||
235 | static void exynos_pcie_power_off_phy(struct exynos_pcie *exynos_pcie) | 233 | static void exynos_pcie_power_off_phy(struct exynos_pcie *ep) |
236 | { | 234 | { |
237 | u32 val; | 235 | u32 val; |
238 | 236 | ||
239 | val = exynos_phy_readl(exynos_pcie, PCIE_PHY_COMMON_POWER); | 237 | val = exynos_phy_readl(ep, PCIE_PHY_COMMON_POWER); |
240 | val |= PCIE_PHY_COMMON_PD_CMN; | 238 | val |= PCIE_PHY_COMMON_PD_CMN; |
241 | exynos_phy_writel(exynos_pcie, val, PCIE_PHY_COMMON_POWER); | 239 | exynos_phy_writel(ep, val, PCIE_PHY_COMMON_POWER); |
242 | 240 | ||
243 | val = exynos_phy_readl(exynos_pcie, PCIE_PHY_TRSV0_POWER); | 241 | val = exynos_phy_readl(ep, PCIE_PHY_TRSV0_POWER); |
244 | val |= PCIE_PHY_TRSV0_PD_TSV; | 242 | val |= PCIE_PHY_TRSV0_PD_TSV; |
245 | exynos_phy_writel(exynos_pcie, val, PCIE_PHY_TRSV0_POWER); | 243 | exynos_phy_writel(ep, val, PCIE_PHY_TRSV0_POWER); |
246 | 244 | ||
247 | val = exynos_phy_readl(exynos_pcie, PCIE_PHY_TRSV1_POWER); | 245 | val = exynos_phy_readl(ep, PCIE_PHY_TRSV1_POWER); |
248 | val |= PCIE_PHY_TRSV1_PD_TSV; | 246 | val |= PCIE_PHY_TRSV1_PD_TSV; |
249 | exynos_phy_writel(exynos_pcie, val, PCIE_PHY_TRSV1_POWER); | 247 | exynos_phy_writel(ep, val, PCIE_PHY_TRSV1_POWER); |
250 | 248 | ||
251 | val = exynos_phy_readl(exynos_pcie, PCIE_PHY_TRSV2_POWER); | 249 | val = exynos_phy_readl(ep, PCIE_PHY_TRSV2_POWER); |
252 | val |= PCIE_PHY_TRSV2_PD_TSV; | 250 | val |= PCIE_PHY_TRSV2_PD_TSV; |
253 | exynos_phy_writel(exynos_pcie, val, PCIE_PHY_TRSV2_POWER); | 251 | exynos_phy_writel(ep, val, PCIE_PHY_TRSV2_POWER); |
254 | 252 | ||
255 | val = exynos_phy_readl(exynos_pcie, PCIE_PHY_TRSV3_POWER); | 253 | val = exynos_phy_readl(ep, PCIE_PHY_TRSV3_POWER); |
256 | val |= PCIE_PHY_TRSV3_PD_TSV; | 254 | val |= PCIE_PHY_TRSV3_PD_TSV; |
257 | exynos_phy_writel(exynos_pcie, val, PCIE_PHY_TRSV3_POWER); | 255 | exynos_phy_writel(ep, val, PCIE_PHY_TRSV3_POWER); |
258 | } | 256 | } |
259 | 257 | ||
260 | static void exynos_pcie_init_phy(struct exynos_pcie *exynos_pcie) | 258 | static void exynos_pcie_init_phy(struct exynos_pcie *ep) |
261 | { | 259 | { |
262 | /* DCC feedback control off */ | 260 | /* DCC feedback control off */ |
263 | exynos_phy_writel(exynos_pcie, 0x29, PCIE_PHY_DCC_FEEDBACK); | 261 | exynos_phy_writel(ep, 0x29, PCIE_PHY_DCC_FEEDBACK); |
264 | 262 | ||
265 | /* set TX/RX impedance */ | 263 | /* set TX/RX impedance */ |
266 | exynos_phy_writel(exynos_pcie, 0xd5, PCIE_PHY_IMPEDANCE); | 264 | exynos_phy_writel(ep, 0xd5, PCIE_PHY_IMPEDANCE); |
267 | 265 | ||
268 | /* set 50Mhz PHY clock */ | 266 | /* set 50Mhz PHY clock */ |
269 | exynos_phy_writel(exynos_pcie, 0x14, PCIE_PHY_PLL_DIV_0); | 267 | exynos_phy_writel(ep, 0x14, PCIE_PHY_PLL_DIV_0); |
270 | exynos_phy_writel(exynos_pcie, 0x12, PCIE_PHY_PLL_DIV_1); | 268 | exynos_phy_writel(ep, 0x12, PCIE_PHY_PLL_DIV_1); |
271 | 269 | ||
272 | /* set TX Differential output for lane 0 */ | 270 | /* set TX Differential output for lane 0 */ |
273 | exynos_phy_writel(exynos_pcie, 0x7f, PCIE_PHY_TRSV0_DRV_LVL); | 271 | exynos_phy_writel(ep, 0x7f, PCIE_PHY_TRSV0_DRV_LVL); |
274 | 272 | ||
275 | /* set TX Pre-emphasis Level Control for lane 0 to minimum */ | 273 | /* set TX Pre-emphasis Level Control for lane 0 to minimum */ |
276 | exynos_phy_writel(exynos_pcie, 0x0, PCIE_PHY_TRSV0_EMP_LVL); | 274 | exynos_phy_writel(ep, 0x0, PCIE_PHY_TRSV0_EMP_LVL); |
277 | 275 | ||
278 | /* set RX clock and data recovery bandwidth */ | 276 | /* set RX clock and data recovery bandwidth */ |
279 | exynos_phy_writel(exynos_pcie, 0xe7, PCIE_PHY_PLL_BIAS); | 277 | exynos_phy_writel(ep, 0xe7, PCIE_PHY_PLL_BIAS); |
280 | exynos_phy_writel(exynos_pcie, 0x82, PCIE_PHY_TRSV0_RXCDR); | 278 | exynos_phy_writel(ep, 0x82, PCIE_PHY_TRSV0_RXCDR); |
281 | exynos_phy_writel(exynos_pcie, 0x82, PCIE_PHY_TRSV1_RXCDR); | 279 | exynos_phy_writel(ep, 0x82, PCIE_PHY_TRSV1_RXCDR); |
282 | exynos_phy_writel(exynos_pcie, 0x82, PCIE_PHY_TRSV2_RXCDR); | 280 | exynos_phy_writel(ep, 0x82, PCIE_PHY_TRSV2_RXCDR); |
283 | exynos_phy_writel(exynos_pcie, 0x82, PCIE_PHY_TRSV3_RXCDR); | 281 | exynos_phy_writel(ep, 0x82, PCIE_PHY_TRSV3_RXCDR); |
284 | 282 | ||
285 | /* change TX Pre-emphasis Level Control for lanes */ | 283 | /* change TX Pre-emphasis Level Control for lanes */ |
286 | exynos_phy_writel(exynos_pcie, 0x39, PCIE_PHY_TRSV0_EMP_LVL); | 284 | exynos_phy_writel(ep, 0x39, PCIE_PHY_TRSV0_EMP_LVL); |
287 | exynos_phy_writel(exynos_pcie, 0x39, PCIE_PHY_TRSV1_EMP_LVL); | 285 | exynos_phy_writel(ep, 0x39, PCIE_PHY_TRSV1_EMP_LVL); |
288 | exynos_phy_writel(exynos_pcie, 0x39, PCIE_PHY_TRSV2_EMP_LVL); | 286 | exynos_phy_writel(ep, 0x39, PCIE_PHY_TRSV2_EMP_LVL); |
289 | exynos_phy_writel(exynos_pcie, 0x39, PCIE_PHY_TRSV3_EMP_LVL); | 287 | exynos_phy_writel(ep, 0x39, PCIE_PHY_TRSV3_EMP_LVL); |
290 | 288 | ||
291 | /* set LVCC */ | 289 | /* set LVCC */ |
292 | exynos_phy_writel(exynos_pcie, 0x20, PCIE_PHY_TRSV0_LVCC); | 290 | exynos_phy_writel(ep, 0x20, PCIE_PHY_TRSV0_LVCC); |
293 | exynos_phy_writel(exynos_pcie, 0xa0, PCIE_PHY_TRSV1_LVCC); | 291 | exynos_phy_writel(ep, 0xa0, PCIE_PHY_TRSV1_LVCC); |
294 | exynos_phy_writel(exynos_pcie, 0xa0, PCIE_PHY_TRSV2_LVCC); | 292 | exynos_phy_writel(ep, 0xa0, PCIE_PHY_TRSV2_LVCC); |
295 | exynos_phy_writel(exynos_pcie, 0xa0, PCIE_PHY_TRSV3_LVCC); | 293 | exynos_phy_writel(ep, 0xa0, PCIE_PHY_TRSV3_LVCC); |
296 | } | 294 | } |
297 | 295 | ||
298 | static void exynos_pcie_assert_reset(struct exynos_pcie *exynos_pcie) | 296 | static void exynos_pcie_assert_reset(struct exynos_pcie *ep) |
299 | { | 297 | { |
300 | struct pcie_port *pp = &exynos_pcie->pp; | 298 | struct pcie_port *pp = &ep->pp; |
301 | struct device *dev = pp->dev; | 299 | struct device *dev = pp->dev; |
302 | 300 | ||
303 | if (exynos_pcie->reset_gpio >= 0) | 301 | if (ep->reset_gpio >= 0) |
304 | devm_gpio_request_one(dev, exynos_pcie->reset_gpio, | 302 | devm_gpio_request_one(dev, ep->reset_gpio, |
305 | GPIOF_OUT_INIT_HIGH, "RESET"); | 303 | GPIOF_OUT_INIT_HIGH, "RESET"); |
306 | } | 304 | } |
307 | 305 | ||
308 | static int exynos_pcie_establish_link(struct exynos_pcie *exynos_pcie) | 306 | static int exynos_pcie_establish_link(struct exynos_pcie *ep) |
309 | { | 307 | { |
310 | struct pcie_port *pp = &exynos_pcie->pp; | 308 | struct pcie_port *pp = &ep->pp; |
311 | struct device *dev = pp->dev; | 309 | struct device *dev = pp->dev; |
312 | u32 val; | 310 | u32 val; |
313 | 311 | ||
@@ -316,142 +314,142 @@ static int exynos_pcie_establish_link(struct exynos_pcie *exynos_pcie) | |||
316 | return 0; | 314 | return 0; |
317 | } | 315 | } |
318 | 316 | ||
319 | exynos_pcie_assert_core_reset(exynos_pcie); | 317 | exynos_pcie_assert_core_reset(ep); |
320 | exynos_pcie_assert_phy_reset(exynos_pcie); | 318 | exynos_pcie_assert_phy_reset(ep); |
321 | exynos_pcie_deassert_phy_reset(exynos_pcie); | 319 | exynos_pcie_deassert_phy_reset(ep); |
322 | exynos_pcie_power_on_phy(exynos_pcie); | 320 | exynos_pcie_power_on_phy(ep); |
323 | exynos_pcie_init_phy(exynos_pcie); | 321 | exynos_pcie_init_phy(ep); |
324 | 322 | ||
325 | /* pulse for common reset */ | 323 | /* pulse for common reset */ |
326 | exynos_blk_writel(exynos_pcie, 1, PCIE_PHY_COMMON_RESET); | 324 | exynos_blk_writel(ep, 1, PCIE_PHY_COMMON_RESET); |
327 | udelay(500); | 325 | udelay(500); |
328 | exynos_blk_writel(exynos_pcie, 0, PCIE_PHY_COMMON_RESET); | 326 | exynos_blk_writel(ep, 0, PCIE_PHY_COMMON_RESET); |
329 | 327 | ||
330 | exynos_pcie_deassert_core_reset(exynos_pcie); | 328 | exynos_pcie_deassert_core_reset(ep); |
331 | dw_pcie_setup_rc(pp); | 329 | dw_pcie_setup_rc(pp); |
332 | exynos_pcie_assert_reset(exynos_pcie); | 330 | exynos_pcie_assert_reset(ep); |
333 | 331 | ||
334 | /* assert LTSSM enable */ | 332 | /* assert LTSSM enable */ |
335 | exynos_elb_writel(exynos_pcie, PCIE_ELBI_LTSSM_ENABLE, | 333 | exynos_elb_writel(ep, PCIE_ELBI_LTSSM_ENABLE, |
336 | PCIE_APP_LTSSM_ENABLE); | 334 | PCIE_APP_LTSSM_ENABLE); |
337 | 335 | ||
338 | /* check if the link is up or not */ | 336 | /* check if the link is up or not */ |
339 | if (!dw_pcie_wait_for_link(pp)) | 337 | if (!dw_pcie_wait_for_link(pp)) |
340 | return 0; | 338 | return 0; |
341 | 339 | ||
342 | while (exynos_phy_readl(exynos_pcie, PCIE_PHY_PLL_LOCKED) == 0) { | 340 | while (exynos_phy_readl(ep, PCIE_PHY_PLL_LOCKED) == 0) { |
343 | val = exynos_blk_readl(exynos_pcie, PCIE_PHY_PLL_LOCKED); | 341 | val = exynos_blk_readl(ep, PCIE_PHY_PLL_LOCKED); |
344 | dev_info(dev, "PLL Locked: 0x%x\n", val); | 342 | dev_info(dev, "PLL Locked: 0x%x\n", val); |
345 | } | 343 | } |
346 | exynos_pcie_power_off_phy(exynos_pcie); | 344 | exynos_pcie_power_off_phy(ep); |
347 | return -ETIMEDOUT; | 345 | return -ETIMEDOUT; |
348 | } | 346 | } |
349 | 347 | ||
350 | static void exynos_pcie_clear_irq_pulse(struct exynos_pcie *exynos_pcie) | 348 | static void exynos_pcie_clear_irq_pulse(struct exynos_pcie *ep) |
351 | { | 349 | { |
352 | u32 val; | 350 | u32 val; |
353 | 351 | ||
354 | val = exynos_elb_readl(exynos_pcie, PCIE_IRQ_PULSE); | 352 | val = exynos_elb_readl(ep, PCIE_IRQ_PULSE); |
355 | exynos_elb_writel(exynos_pcie, val, PCIE_IRQ_PULSE); | 353 | exynos_elb_writel(ep, val, PCIE_IRQ_PULSE); |
356 | } | 354 | } |
357 | 355 | ||
358 | static void exynos_pcie_enable_irq_pulse(struct exynos_pcie *exynos_pcie) | 356 | static void exynos_pcie_enable_irq_pulse(struct exynos_pcie *ep) |
359 | { | 357 | { |
360 | u32 val; | 358 | u32 val; |
361 | 359 | ||
362 | /* enable INTX interrupt */ | 360 | /* enable INTX interrupt */ |
363 | val = IRQ_INTA_ASSERT | IRQ_INTB_ASSERT | | 361 | val = IRQ_INTA_ASSERT | IRQ_INTB_ASSERT | |
364 | IRQ_INTC_ASSERT | IRQ_INTD_ASSERT; | 362 | IRQ_INTC_ASSERT | IRQ_INTD_ASSERT; |
365 | exynos_elb_writel(exynos_pcie, val, PCIE_IRQ_EN_PULSE); | 363 | exynos_elb_writel(ep, val, PCIE_IRQ_EN_PULSE); |
366 | } | 364 | } |
367 | 365 | ||
368 | static irqreturn_t exynos_pcie_irq_handler(int irq, void *arg) | 366 | static irqreturn_t exynos_pcie_irq_handler(int irq, void *arg) |
369 | { | 367 | { |
370 | struct exynos_pcie *exynos_pcie = arg; | 368 | struct exynos_pcie *ep = arg; |
371 | 369 | ||
372 | exynos_pcie_clear_irq_pulse(exynos_pcie); | 370 | exynos_pcie_clear_irq_pulse(ep); |
373 | return IRQ_HANDLED; | 371 | return IRQ_HANDLED; |
374 | } | 372 | } |
375 | 373 | ||
376 | static irqreturn_t exynos_pcie_msi_irq_handler(int irq, void *arg) | 374 | static irqreturn_t exynos_pcie_msi_irq_handler(int irq, void *arg) |
377 | { | 375 | { |
378 | struct exynos_pcie *exynos_pcie = arg; | 376 | struct exynos_pcie *ep = arg; |
379 | struct pcie_port *pp = &exynos_pcie->pp; | 377 | struct pcie_port *pp = &ep->pp; |
380 | 378 | ||
381 | return dw_handle_msi_irq(pp); | 379 | return dw_handle_msi_irq(pp); |
382 | } | 380 | } |
383 | 381 | ||
384 | static void exynos_pcie_msi_init(struct exynos_pcie *exynos_pcie) | 382 | static void exynos_pcie_msi_init(struct exynos_pcie *ep) |
385 | { | 383 | { |
386 | struct pcie_port *pp = &exynos_pcie->pp; | 384 | struct pcie_port *pp = &ep->pp; |
387 | u32 val; | 385 | u32 val; |
388 | 386 | ||
389 | dw_pcie_msi_init(pp); | 387 | dw_pcie_msi_init(pp); |
390 | 388 | ||
391 | /* enable MSI interrupt */ | 389 | /* enable MSI interrupt */ |
392 | val = exynos_elb_readl(exynos_pcie, PCIE_IRQ_EN_LEVEL); | 390 | val = exynos_elb_readl(ep, PCIE_IRQ_EN_LEVEL); |
393 | val |= IRQ_MSI_ENABLE; | 391 | val |= IRQ_MSI_ENABLE; |
394 | exynos_elb_writel(exynos_pcie, val, PCIE_IRQ_EN_LEVEL); | 392 | exynos_elb_writel(ep, val, PCIE_IRQ_EN_LEVEL); |
395 | } | 393 | } |
396 | 394 | ||
397 | static void exynos_pcie_enable_interrupts(struct exynos_pcie *exynos_pcie) | 395 | static void exynos_pcie_enable_interrupts(struct exynos_pcie *ep) |
398 | { | 396 | { |
399 | exynos_pcie_enable_irq_pulse(exynos_pcie); | 397 | exynos_pcie_enable_irq_pulse(ep); |
400 | 398 | ||
401 | if (IS_ENABLED(CONFIG_PCI_MSI)) | 399 | if (IS_ENABLED(CONFIG_PCI_MSI)) |
402 | exynos_pcie_msi_init(exynos_pcie); | 400 | exynos_pcie_msi_init(ep); |
403 | } | 401 | } |
404 | 402 | ||
405 | static u32 exynos_pcie_readl_rc(struct pcie_port *pp, u32 reg) | 403 | static u32 exynos_pcie_readl_rc(struct pcie_port *pp, u32 reg) |
406 | { | 404 | { |
407 | struct exynos_pcie *exynos_pcie = to_exynos_pcie(pp); | 405 | struct exynos_pcie *ep = to_exynos_pcie(pp); |
408 | u32 val; | 406 | u32 val; |
409 | 407 | ||
410 | exynos_pcie_sideband_dbi_r_mode(exynos_pcie, true); | 408 | exynos_pcie_sideband_dbi_r_mode(ep, true); |
411 | val = readl(pp->dbi_base + reg); | 409 | val = readl(pp->dbi_base + reg); |
412 | exynos_pcie_sideband_dbi_r_mode(exynos_pcie, false); | 410 | exynos_pcie_sideband_dbi_r_mode(ep, false); |
413 | return val; | 411 | return val; |
414 | } | 412 | } |
415 | 413 | ||
416 | static void exynos_pcie_writel_rc(struct pcie_port *pp, u32 reg, u32 val) | 414 | static void exynos_pcie_writel_rc(struct pcie_port *pp, u32 reg, u32 val) |
417 | { | 415 | { |
418 | struct exynos_pcie *exynos_pcie = to_exynos_pcie(pp); | 416 | struct exynos_pcie *ep = to_exynos_pcie(pp); |
419 | 417 | ||
420 | exynos_pcie_sideband_dbi_w_mode(exynos_pcie, true); | 418 | exynos_pcie_sideband_dbi_w_mode(ep, true); |
421 | writel(val, pp->dbi_base + reg); | 419 | writel(val, pp->dbi_base + reg); |
422 | exynos_pcie_sideband_dbi_w_mode(exynos_pcie, false); | 420 | exynos_pcie_sideband_dbi_w_mode(ep, false); |
423 | } | 421 | } |
424 | 422 | ||
425 | static int exynos_pcie_rd_own_conf(struct pcie_port *pp, int where, int size, | 423 | static int exynos_pcie_rd_own_conf(struct pcie_port *pp, int where, int size, |
426 | u32 *val) | 424 | u32 *val) |
427 | { | 425 | { |
428 | struct exynos_pcie *exynos_pcie = to_exynos_pcie(pp); | 426 | struct exynos_pcie *ep = to_exynos_pcie(pp); |
429 | int ret; | 427 | int ret; |
430 | 428 | ||
431 | exynos_pcie_sideband_dbi_r_mode(exynos_pcie, true); | 429 | exynos_pcie_sideband_dbi_r_mode(ep, true); |
432 | ret = dw_pcie_cfg_read(pp->dbi_base + where, size, val); | 430 | ret = dw_pcie_cfg_read(pp->dbi_base + where, size, val); |
433 | exynos_pcie_sideband_dbi_r_mode(exynos_pcie, false); | 431 | exynos_pcie_sideband_dbi_r_mode(ep, false); |
434 | return ret; | 432 | return ret; |
435 | } | 433 | } |
436 | 434 | ||
437 | static int exynos_pcie_wr_own_conf(struct pcie_port *pp, int where, int size, | 435 | static int exynos_pcie_wr_own_conf(struct pcie_port *pp, int where, int size, |
438 | u32 val) | 436 | u32 val) |
439 | { | 437 | { |
440 | struct exynos_pcie *exynos_pcie = to_exynos_pcie(pp); | 438 | struct exynos_pcie *ep = to_exynos_pcie(pp); |
441 | int ret; | 439 | int ret; |
442 | 440 | ||
443 | exynos_pcie_sideband_dbi_w_mode(exynos_pcie, true); | 441 | exynos_pcie_sideband_dbi_w_mode(ep, true); |
444 | ret = dw_pcie_cfg_write(pp->dbi_base + where, size, val); | 442 | ret = dw_pcie_cfg_write(pp->dbi_base + where, size, val); |
445 | exynos_pcie_sideband_dbi_w_mode(exynos_pcie, false); | 443 | exynos_pcie_sideband_dbi_w_mode(ep, false); |
446 | return ret; | 444 | return ret; |
447 | } | 445 | } |
448 | 446 | ||
449 | static int exynos_pcie_link_up(struct pcie_port *pp) | 447 | static int exynos_pcie_link_up(struct pcie_port *pp) |
450 | { | 448 | { |
451 | struct exynos_pcie *exynos_pcie = to_exynos_pcie(pp); | 449 | struct exynos_pcie *ep = to_exynos_pcie(pp); |
452 | u32 val; | 450 | u32 val; |
453 | 451 | ||
454 | val = exynos_elb_readl(exynos_pcie, PCIE_ELBI_RDLH_LINKUP); | 452 | val = exynos_elb_readl(ep, PCIE_ELBI_RDLH_LINKUP); |
455 | if (val == PCIE_ELBI_LTSSM_ENABLE) | 453 | if (val == PCIE_ELBI_LTSSM_ENABLE) |
456 | return 1; | 454 | return 1; |
457 | 455 | ||
@@ -460,10 +458,10 @@ static int exynos_pcie_link_up(struct pcie_port *pp) | |||
460 | 458 | ||
461 | static void exynos_pcie_host_init(struct pcie_port *pp) | 459 | static void exynos_pcie_host_init(struct pcie_port *pp) |
462 | { | 460 | { |
463 | struct exynos_pcie *exynos_pcie = to_exynos_pcie(pp); | 461 | struct exynos_pcie *ep = to_exynos_pcie(pp); |
464 | 462 | ||
465 | exynos_pcie_establish_link(exynos_pcie); | 463 | exynos_pcie_establish_link(ep); |
466 | exynos_pcie_enable_interrupts(exynos_pcie); | 464 | exynos_pcie_enable_interrupts(ep); |
467 | } | 465 | } |
468 | 466 | ||
469 | static struct pcie_host_ops exynos_pcie_host_ops = { | 467 | static struct pcie_host_ops exynos_pcie_host_ops = { |
@@ -475,10 +473,10 @@ static struct pcie_host_ops exynos_pcie_host_ops = { | |||
475 | .host_init = exynos_pcie_host_init, | 473 | .host_init = exynos_pcie_host_init, |
476 | }; | 474 | }; |
477 | 475 | ||
478 | static int __init exynos_add_pcie_port(struct exynos_pcie *exynos_pcie, | 476 | static int __init exynos_add_pcie_port(struct exynos_pcie *ep, |
479 | struct platform_device *pdev) | 477 | struct platform_device *pdev) |
480 | { | 478 | { |
481 | struct pcie_port *pp = &exynos_pcie->pp; | 479 | struct pcie_port *pp = &ep->pp; |
482 | struct device *dev = pp->dev; | 480 | struct device *dev = pp->dev; |
483 | int ret; | 481 | int ret; |
484 | 482 | ||
@@ -488,7 +486,7 @@ static int __init exynos_add_pcie_port(struct exynos_pcie *exynos_pcie, | |||
488 | return -ENODEV; | 486 | return -ENODEV; |
489 | } | 487 | } |
490 | ret = devm_request_irq(dev, pp->irq, exynos_pcie_irq_handler, | 488 | ret = devm_request_irq(dev, pp->irq, exynos_pcie_irq_handler, |
491 | IRQF_SHARED, "exynos-pcie", exynos_pcie); | 489 | IRQF_SHARED, "exynos-pcie", ep); |
492 | if (ret) { | 490 | if (ret) { |
493 | dev_err(dev, "failed to request irq\n"); | 491 | dev_err(dev, "failed to request irq\n"); |
494 | return ret; | 492 | return ret; |
@@ -504,7 +502,7 @@ static int __init exynos_add_pcie_port(struct exynos_pcie *exynos_pcie, | |||
504 | ret = devm_request_irq(dev, pp->msi_irq, | 502 | ret = devm_request_irq(dev, pp->msi_irq, |
505 | exynos_pcie_msi_irq_handler, | 503 | exynos_pcie_msi_irq_handler, |
506 | IRQF_SHARED | IRQF_NO_THREAD, | 504 | IRQF_SHARED | IRQF_NO_THREAD, |
507 | "exynos-pcie", exynos_pcie); | 505 | "exynos-pcie", ep); |
508 | if (ret) { | 506 | if (ret) { |
509 | dev_err(dev, "failed to request msi irq\n"); | 507 | dev_err(dev, "failed to request msi irq\n"); |
510 | return ret; | 508 | return ret; |
@@ -526,7 +524,7 @@ static int __init exynos_add_pcie_port(struct exynos_pcie *exynos_pcie, | |||
526 | static int __init exynos_pcie_probe(struct platform_device *pdev) | 524 | static int __init exynos_pcie_probe(struct platform_device *pdev) |
527 | { | 525 | { |
528 | struct device *dev = &pdev->dev; | 526 | struct device *dev = &pdev->dev; |
529 | struct exynos_pcie *exynos_pcie; | 527 | struct exynos_pcie *ep; |
530 | struct pcie_port *pp; | 528 | struct pcie_port *pp; |
531 | struct device_node *np = dev->of_node; | 529 | struct device_node *np = dev->of_node; |
532 | struct resource *elbi_base; | 530 | struct resource *elbi_base; |
@@ -534,75 +532,75 @@ static int __init exynos_pcie_probe(struct platform_device *pdev) | |||
534 | struct resource *block_base; | 532 | struct resource *block_base; |
535 | int ret; | 533 | int ret; |
536 | 534 | ||
537 | exynos_pcie = devm_kzalloc(dev, sizeof(*exynos_pcie), GFP_KERNEL); | 535 | ep = devm_kzalloc(dev, sizeof(*ep), GFP_KERNEL); |
538 | if (!exynos_pcie) | 536 | if (!ep) |
539 | return -ENOMEM; | 537 | return -ENOMEM; |
540 | 538 | ||
541 | pp = &exynos_pcie->pp; | 539 | pp = &ep->pp; |
542 | pp->dev = dev; | 540 | pp->dev = dev; |
543 | 541 | ||
544 | exynos_pcie->reset_gpio = of_get_named_gpio(np, "reset-gpio", 0); | 542 | ep->reset_gpio = of_get_named_gpio(np, "reset-gpio", 0); |
545 | 543 | ||
546 | exynos_pcie->clk = devm_clk_get(dev, "pcie"); | 544 | ep->clk = devm_clk_get(dev, "pcie"); |
547 | if (IS_ERR(exynos_pcie->clk)) { | 545 | if (IS_ERR(ep->clk)) { |
548 | dev_err(dev, "Failed to get pcie rc clock\n"); | 546 | dev_err(dev, "Failed to get pcie rc clock\n"); |
549 | return PTR_ERR(exynos_pcie->clk); | 547 | return PTR_ERR(ep->clk); |
550 | } | 548 | } |
551 | ret = clk_prepare_enable(exynos_pcie->clk); | 549 | ret = clk_prepare_enable(ep->clk); |
552 | if (ret) | 550 | if (ret) |
553 | return ret; | 551 | return ret; |
554 | 552 | ||
555 | exynos_pcie->bus_clk = devm_clk_get(dev, "pcie_bus"); | 553 | ep->bus_clk = devm_clk_get(dev, "pcie_bus"); |
556 | if (IS_ERR(exynos_pcie->bus_clk)) { | 554 | if (IS_ERR(ep->bus_clk)) { |
557 | dev_err(dev, "Failed to get pcie bus clock\n"); | 555 | dev_err(dev, "Failed to get pcie bus clock\n"); |
558 | ret = PTR_ERR(exynos_pcie->bus_clk); | 556 | ret = PTR_ERR(ep->bus_clk); |
559 | goto fail_clk; | 557 | goto fail_clk; |
560 | } | 558 | } |
561 | ret = clk_prepare_enable(exynos_pcie->bus_clk); | 559 | ret = clk_prepare_enable(ep->bus_clk); |
562 | if (ret) | 560 | if (ret) |
563 | goto fail_clk; | 561 | goto fail_clk; |
564 | 562 | ||
565 | elbi_base = platform_get_resource(pdev, IORESOURCE_MEM, 0); | 563 | elbi_base = platform_get_resource(pdev, IORESOURCE_MEM, 0); |
566 | exynos_pcie->elbi_base = devm_ioremap_resource(dev, elbi_base); | 564 | ep->elbi_base = devm_ioremap_resource(dev, elbi_base); |
567 | if (IS_ERR(exynos_pcie->elbi_base)) { | 565 | if (IS_ERR(ep->elbi_base)) { |
568 | ret = PTR_ERR(exynos_pcie->elbi_base); | 566 | ret = PTR_ERR(ep->elbi_base); |
569 | goto fail_bus_clk; | 567 | goto fail_bus_clk; |
570 | } | 568 | } |
571 | 569 | ||
572 | phy_base = platform_get_resource(pdev, IORESOURCE_MEM, 1); | 570 | phy_base = platform_get_resource(pdev, IORESOURCE_MEM, 1); |
573 | exynos_pcie->phy_base = devm_ioremap_resource(dev, phy_base); | 571 | ep->phy_base = devm_ioremap_resource(dev, phy_base); |
574 | if (IS_ERR(exynos_pcie->phy_base)) { | 572 | if (IS_ERR(ep->phy_base)) { |
575 | ret = PTR_ERR(exynos_pcie->phy_base); | 573 | ret = PTR_ERR(ep->phy_base); |
576 | goto fail_bus_clk; | 574 | goto fail_bus_clk; |
577 | } | 575 | } |
578 | 576 | ||
579 | block_base = platform_get_resource(pdev, IORESOURCE_MEM, 2); | 577 | block_base = platform_get_resource(pdev, IORESOURCE_MEM, 2); |
580 | exynos_pcie->block_base = devm_ioremap_resource(dev, block_base); | 578 | ep->block_base = devm_ioremap_resource(dev, block_base); |
581 | if (IS_ERR(exynos_pcie->block_base)) { | 579 | if (IS_ERR(ep->block_base)) { |
582 | ret = PTR_ERR(exynos_pcie->block_base); | 580 | ret = PTR_ERR(ep->block_base); |
583 | goto fail_bus_clk; | 581 | goto fail_bus_clk; |
584 | } | 582 | } |
585 | 583 | ||
586 | ret = exynos_add_pcie_port(exynos_pcie, pdev); | 584 | ret = exynos_add_pcie_port(ep, pdev); |
587 | if (ret < 0) | 585 | if (ret < 0) |
588 | goto fail_bus_clk; | 586 | goto fail_bus_clk; |
589 | 587 | ||
590 | platform_set_drvdata(pdev, exynos_pcie); | 588 | platform_set_drvdata(pdev, ep); |
591 | return 0; | 589 | return 0; |
592 | 590 | ||
593 | fail_bus_clk: | 591 | fail_bus_clk: |
594 | clk_disable_unprepare(exynos_pcie->bus_clk); | 592 | clk_disable_unprepare(ep->bus_clk); |
595 | fail_clk: | 593 | fail_clk: |
596 | clk_disable_unprepare(exynos_pcie->clk); | 594 | clk_disable_unprepare(ep->clk); |
597 | return ret; | 595 | return ret; |
598 | } | 596 | } |
599 | 597 | ||
600 | static int __exit exynos_pcie_remove(struct platform_device *pdev) | 598 | static int __exit exynos_pcie_remove(struct platform_device *pdev) |
601 | { | 599 | { |
602 | struct exynos_pcie *exynos_pcie = platform_get_drvdata(pdev); | 600 | struct exynos_pcie *ep = platform_get_drvdata(pdev); |
603 | 601 | ||
604 | clk_disable_unprepare(exynos_pcie->bus_clk); | 602 | clk_disable_unprepare(ep->bus_clk); |
605 | clk_disable_unprepare(exynos_pcie->clk); | 603 | clk_disable_unprepare(ep->clk); |
606 | 604 | ||
607 | return 0; | 605 | return 0; |
608 | } | 606 | } |