aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/plat-omap/dma.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/plat-omap/dma.c')
-rw-r--r--arch/arm/plat-omap/dma.c492
1 files changed, 78 insertions, 414 deletions
diff --git a/arch/arm/plat-omap/dma.c b/arch/arm/plat-omap/dma.c
index 68eaae324b6a..1d959965ff52 100644
--- a/arch/arm/plat-omap/dma.c
+++ b/arch/arm/plat-omap/dma.c
@@ -29,12 +29,13 @@
29#include <linux/interrupt.h> 29#include <linux/interrupt.h>
30#include <linux/irq.h> 30#include <linux/irq.h>
31#include <linux/io.h> 31#include <linux/io.h>
32#include <linux/slab.h>
32 33
33#include <asm/system.h> 34#include <asm/system.h>
34#include <mach/hardware.h> 35#include <mach/hardware.h>
35#include <mach/dma.h> 36#include <plat/dma.h>
36 37
37#include <mach/tc.h> 38#include <plat/tc.h>
38 39
39#undef DEBUG 40#undef DEBUG
40 41
@@ -47,13 +48,18 @@ enum { DMA_CHAIN_STARTED, DMA_CHAIN_NOTSTARTED };
47#endif 48#endif
48 49
49#define OMAP_DMA_ACTIVE 0x01 50#define OMAP_DMA_ACTIVE 0x01
50#define OMAP_DMA_CCR_EN (1 << 7)
51#define OMAP2_DMA_CSR_CLEAR_MASK 0xffe 51#define OMAP2_DMA_CSR_CLEAR_MASK 0xffe
52 52
53#define OMAP_FUNC_MUX_ARM_BASE (0xfffe1000 + 0xec) 53#define OMAP_FUNC_MUX_ARM_BASE (0xfffe1000 + 0xec)
54 54
55static int enable_1510_mode; 55static int enable_1510_mode;
56 56
57static struct omap_dma_global_context_registers {
58 u32 dma_irqenable_l0;
59 u32 dma_ocp_sysconfig;
60 u32 dma_gcr;
61} omap_dma_global_context;
62
57struct omap_dma_lch { 63struct omap_dma_lch {
58 int next_lch; 64 int next_lch;
59 int dev_id; 65 int dev_id;
@@ -931,6 +937,15 @@ void omap_start_dma(int lch)
931{ 937{
932 u32 l; 938 u32 l;
933 939
940 /*
941 * The CPC/CDAC register needs to be initialized to zero
942 * before starting dma transfer.
943 */
944 if (cpu_is_omap15xx())
945 dma_write(0, CPC(lch));
946 else
947 dma_write(0, CDAC(lch));
948
934 if (!omap_dma_in_1510_mode() && dma_chan[lch].next_lch != -1) { 949 if (!omap_dma_in_1510_mode() && dma_chan[lch].next_lch != -1) {
935 int next_lch, cur_lch; 950 int next_lch, cur_lch;
936 char dma_chan_link_map[OMAP_DMA4_LOGICAL_DMA_CH_COUNT]; 951 char dma_chan_link_map[OMAP_DMA4_LOGICAL_DMA_CH_COUNT];
@@ -1114,17 +1129,8 @@ int omap_dma_running(void)
1114{ 1129{
1115 int lch; 1130 int lch;
1116 1131
1117 /* 1132 if (cpu_class_is_omap1())
1118 * On OMAP1510, internal LCD controller will start the transfer 1133 if (omap_lcd_dma_running())
1119 * when it gets enabled, so assume DMA running if LCD enabled.
1120 */
1121 if (cpu_is_omap1510())
1122 if (omap_readw(0xfffec000 + 0x00) & (1 << 0))
1123 return 1;
1124
1125 /* Check if LCD DMA is running */
1126 if (cpu_is_omap16xx())
1127 if (omap_readw(OMAP1610_DMA_LCD_CCR) & OMAP_DMA_CCR_EN)
1128 return 1; 1134 return 1;
1129 1135
1130 for (lch = 0; lch < dma_chan_count; lch++) 1136 for (lch = 0; lch < dma_chan_count; lch++)
@@ -1187,7 +1193,7 @@ void omap_dma_unlink_lch(int lch_head, int lch_queue)
1187 } 1193 }
1188 1194
1189 if ((dma_chan[lch_head].flags & OMAP_DMA_ACTIVE) || 1195 if ((dma_chan[lch_head].flags & OMAP_DMA_ACTIVE) ||
1190 (dma_chan[lch_head].flags & OMAP_DMA_ACTIVE)) { 1196 (dma_chan[lch_queue].flags & OMAP_DMA_ACTIVE)) {
1191 printk(KERN_ERR "omap_dma: You need to stop the DMA channels " 1197 printk(KERN_ERR "omap_dma: You need to stop the DMA channels "
1192 "before unlinking\n"); 1198 "before unlinking\n");
1193 dump_stack(); 1199 dump_stack();
@@ -1246,7 +1252,7 @@ static void create_dma_lch_chain(int lch_head, int lch_queue)
1246 * OMAP_DMA_DYNAMIC_CHAIN 1252 * OMAP_DMA_DYNAMIC_CHAIN
1247 * @params - Channel parameters 1253 * @params - Channel parameters
1248 * 1254 *
1249 * @return - Succes : 0 1255 * @return - Success : 0
1250 * Failure: -EINVAL/-ENOMEM 1256 * Failure: -EINVAL/-ENOMEM
1251 */ 1257 */
1252int omap_request_dma_chain(int dev_id, const char *dev_name, 1258int omap_request_dma_chain(int dev_id, const char *dev_name,
@@ -1874,8 +1880,7 @@ static irqreturn_t omap1_dma_irq_handler(int irq, void *dev_id)
1874#define omap1_dma_irq_handler NULL 1880#define omap1_dma_irq_handler NULL
1875#endif 1881#endif
1876 1882
1877#if defined(CONFIG_ARCH_OMAP2) || defined(CONFIG_ARCH_OMAP3) || \ 1883#ifdef CONFIG_ARCH_OMAP2PLUS
1878 defined(CONFIG_ARCH_OMAP4)
1879 1884
1880static int omap2_dma_handle_ch(int ch) 1885static int omap2_dma_handle_ch(int ch)
1881{ 1886{
@@ -1984,415 +1989,83 @@ static struct irqaction omap24xx_dma_irq;
1984 1989
1985/*----------------------------------------------------------------------------*/ 1990/*----------------------------------------------------------------------------*/
1986 1991
1987static struct lcd_dma_info { 1992void omap_dma_global_context_save(void)
1988 spinlock_t lock;
1989 int reserved;
1990 void (*callback)(u16 status, void *data);
1991 void *cb_data;
1992
1993 int active;
1994 unsigned long addr, size;
1995 int rotate, data_type, xres, yres;
1996 int vxres;
1997 int mirror;
1998 int xscale, yscale;
1999 int ext_ctrl;
2000 int src_port;
2001 int single_transfer;
2002} lcd_dma;
2003
2004void omap_set_lcd_dma_b1(unsigned long addr, u16 fb_xres, u16 fb_yres,
2005 int data_type)
2006{
2007 lcd_dma.addr = addr;
2008 lcd_dma.data_type = data_type;
2009 lcd_dma.xres = fb_xres;
2010 lcd_dma.yres = fb_yres;
2011}
2012EXPORT_SYMBOL(omap_set_lcd_dma_b1);
2013
2014void omap_set_lcd_dma_src_port(int port)
2015{
2016 lcd_dma.src_port = port;
2017}
2018
2019void omap_set_lcd_dma_ext_controller(int external)
2020{ 1993{
2021 lcd_dma.ext_ctrl = external; 1994 omap_dma_global_context.dma_irqenable_l0 =
1995 dma_read(IRQENABLE_L0);
1996 omap_dma_global_context.dma_ocp_sysconfig =
1997 dma_read(OCP_SYSCONFIG);
1998 omap_dma_global_context.dma_gcr = dma_read(GCR);
2022} 1999}
2023EXPORT_SYMBOL(omap_set_lcd_dma_ext_controller);
2024 2000
2025void omap_set_lcd_dma_single_transfer(int single) 2001void omap_dma_global_context_restore(void)
2026{ 2002{
2027 lcd_dma.single_transfer = single; 2003 int ch;
2028}
2029EXPORT_SYMBOL(omap_set_lcd_dma_single_transfer);
2030 2004
2031void omap_set_lcd_dma_b1_rotation(int rotate) 2005 dma_write(omap_dma_global_context.dma_gcr, GCR);
2032{ 2006 dma_write(omap_dma_global_context.dma_ocp_sysconfig,
2033 if (omap_dma_in_1510_mode()) { 2007 OCP_SYSCONFIG);
2034 printk(KERN_ERR "DMA rotation is not supported in 1510 mode\n"); 2008 dma_write(omap_dma_global_context.dma_irqenable_l0,
2035 BUG(); 2009 IRQENABLE_L0);
2036 return;
2037 }
2038 lcd_dma.rotate = rotate;
2039}
2040EXPORT_SYMBOL(omap_set_lcd_dma_b1_rotation);
2041
2042void omap_set_lcd_dma_b1_mirror(int mirror)
2043{
2044 if (omap_dma_in_1510_mode()) {
2045 printk(KERN_ERR "DMA mirror is not supported in 1510 mode\n");
2046 BUG();
2047 }
2048 lcd_dma.mirror = mirror;
2049}
2050EXPORT_SYMBOL(omap_set_lcd_dma_b1_mirror);
2051
2052void omap_set_lcd_dma_b1_vxres(unsigned long vxres)
2053{
2054 if (omap_dma_in_1510_mode()) {
2055 printk(KERN_ERR "DMA virtual resulotion is not supported "
2056 "in 1510 mode\n");
2057 BUG();
2058 }
2059 lcd_dma.vxres = vxres;
2060}
2061EXPORT_SYMBOL(omap_set_lcd_dma_b1_vxres);
2062
2063void omap_set_lcd_dma_b1_scale(unsigned int xscale, unsigned int yscale)
2064{
2065 if (omap_dma_in_1510_mode()) {
2066 printk(KERN_ERR "DMA scale is not supported in 1510 mode\n");
2067 BUG();
2068 }
2069 lcd_dma.xscale = xscale;
2070 lcd_dma.yscale = yscale;
2071}
2072EXPORT_SYMBOL(omap_set_lcd_dma_b1_scale);
2073
2074static void set_b1_regs(void)
2075{
2076 unsigned long top, bottom;
2077 int es;
2078 u16 w;
2079 unsigned long en, fn;
2080 long ei, fi;
2081 unsigned long vxres;
2082 unsigned int xscale, yscale;
2083
2084 switch (lcd_dma.data_type) {
2085 case OMAP_DMA_DATA_TYPE_S8:
2086 es = 1;
2087 break;
2088 case OMAP_DMA_DATA_TYPE_S16:
2089 es = 2;
2090 break;
2091 case OMAP_DMA_DATA_TYPE_S32:
2092 es = 4;
2093 break;
2094 default:
2095 BUG();
2096 return;
2097 }
2098
2099 vxres = lcd_dma.vxres ? lcd_dma.vxres : lcd_dma.xres;
2100 xscale = lcd_dma.xscale ? lcd_dma.xscale : 1;
2101 yscale = lcd_dma.yscale ? lcd_dma.yscale : 1;
2102 BUG_ON(vxres < lcd_dma.xres);
2103
2104#define PIXADDR(x, y) (lcd_dma.addr + \
2105 ((y) * vxres * yscale + (x) * xscale) * es)
2106#define PIXSTEP(sx, sy, dx, dy) (PIXADDR(dx, dy) - PIXADDR(sx, sy) - es + 1)
2107
2108 switch (lcd_dma.rotate) {
2109 case 0:
2110 if (!lcd_dma.mirror) {
2111 top = PIXADDR(0, 0);
2112 bottom = PIXADDR(lcd_dma.xres - 1, lcd_dma.yres - 1);
2113 /* 1510 DMA requires the bottom address to be 2 more
2114 * than the actual last memory access location. */
2115 if (omap_dma_in_1510_mode() &&
2116 lcd_dma.data_type == OMAP_DMA_DATA_TYPE_S32)
2117 bottom += 2;
2118 ei = PIXSTEP(0, 0, 1, 0);
2119 fi = PIXSTEP(lcd_dma.xres - 1, 0, 0, 1);
2120 } else {
2121 top = PIXADDR(lcd_dma.xres - 1, 0);
2122 bottom = PIXADDR(0, lcd_dma.yres - 1);
2123 ei = PIXSTEP(1, 0, 0, 0);
2124 fi = PIXSTEP(0, 0, lcd_dma.xres - 1, 1);
2125 }
2126 en = lcd_dma.xres;
2127 fn = lcd_dma.yres;
2128 break;
2129 case 90:
2130 if (!lcd_dma.mirror) {
2131 top = PIXADDR(0, lcd_dma.yres - 1);
2132 bottom = PIXADDR(lcd_dma.xres - 1, 0);
2133 ei = PIXSTEP(0, 1, 0, 0);
2134 fi = PIXSTEP(0, 0, 1, lcd_dma.yres - 1);
2135 } else {
2136 top = PIXADDR(lcd_dma.xres - 1, lcd_dma.yres - 1);
2137 bottom = PIXADDR(0, 0);
2138 ei = PIXSTEP(0, 1, 0, 0);
2139 fi = PIXSTEP(1, 0, 0, lcd_dma.yres - 1);
2140 }
2141 en = lcd_dma.yres;
2142 fn = lcd_dma.xres;
2143 break;
2144 case 180:
2145 if (!lcd_dma.mirror) {
2146 top = PIXADDR(lcd_dma.xres - 1, lcd_dma.yres - 1);
2147 bottom = PIXADDR(0, 0);
2148 ei = PIXSTEP(1, 0, 0, 0);
2149 fi = PIXSTEP(0, 1, lcd_dma.xres - 1, 0);
2150 } else {
2151 top = PIXADDR(0, lcd_dma.yres - 1);
2152 bottom = PIXADDR(lcd_dma.xres - 1, 0);
2153 ei = PIXSTEP(0, 0, 1, 0);
2154 fi = PIXSTEP(lcd_dma.xres - 1, 1, 0, 0);
2155 }
2156 en = lcd_dma.xres;
2157 fn = lcd_dma.yres;
2158 break;
2159 case 270:
2160 if (!lcd_dma.mirror) {
2161 top = PIXADDR(lcd_dma.xres - 1, 0);
2162 bottom = PIXADDR(0, lcd_dma.yres - 1);
2163 ei = PIXSTEP(0, 0, 0, 1);
2164 fi = PIXSTEP(1, lcd_dma.yres - 1, 0, 0);
2165 } else {
2166 top = PIXADDR(0, 0);
2167 bottom = PIXADDR(lcd_dma.xres - 1, lcd_dma.yres - 1);
2168 ei = PIXSTEP(0, 0, 0, 1);
2169 fi = PIXSTEP(0, lcd_dma.yres - 1, 1, 0);
2170 }
2171 en = lcd_dma.yres;
2172 fn = lcd_dma.xres;
2173 break;
2174 default:
2175 BUG();
2176 return; /* Suppress warning about uninitialized vars */
2177 }
2178
2179 if (omap_dma_in_1510_mode()) {
2180 omap_writew(top >> 16, OMAP1510_DMA_LCD_TOP_F1_U);
2181 omap_writew(top, OMAP1510_DMA_LCD_TOP_F1_L);
2182 omap_writew(bottom >> 16, OMAP1510_DMA_LCD_BOT_F1_U);
2183 omap_writew(bottom, OMAP1510_DMA_LCD_BOT_F1_L);
2184
2185 return;
2186 }
2187
2188 /* 1610 regs */
2189 omap_writew(top >> 16, OMAP1610_DMA_LCD_TOP_B1_U);
2190 omap_writew(top, OMAP1610_DMA_LCD_TOP_B1_L);
2191 omap_writew(bottom >> 16, OMAP1610_DMA_LCD_BOT_B1_U);
2192 omap_writew(bottom, OMAP1610_DMA_LCD_BOT_B1_L);
2193
2194 omap_writew(en, OMAP1610_DMA_LCD_SRC_EN_B1);
2195 omap_writew(fn, OMAP1610_DMA_LCD_SRC_FN_B1);
2196
2197 w = omap_readw(OMAP1610_DMA_LCD_CSDP);
2198 w &= ~0x03;
2199 w |= lcd_dma.data_type;
2200 omap_writew(w, OMAP1610_DMA_LCD_CSDP);
2201
2202 w = omap_readw(OMAP1610_DMA_LCD_CTRL);
2203 /* Always set the source port as SDRAM for now*/
2204 w &= ~(0x03 << 6);
2205 if (lcd_dma.callback != NULL)
2206 w |= 1 << 1; /* Block interrupt enable */
2207 else
2208 w &= ~(1 << 1);
2209 omap_writew(w, OMAP1610_DMA_LCD_CTRL);
2210
2211 if (!(lcd_dma.rotate || lcd_dma.mirror ||
2212 lcd_dma.vxres || lcd_dma.xscale || lcd_dma.yscale))
2213 return;
2214
2215 w = omap_readw(OMAP1610_DMA_LCD_CCR);
2216 /* Set the double-indexed addressing mode */
2217 w |= (0x03 << 12);
2218 omap_writew(w, OMAP1610_DMA_LCD_CCR);
2219
2220 omap_writew(ei, OMAP1610_DMA_LCD_SRC_EI_B1);
2221 omap_writew(fi >> 16, OMAP1610_DMA_LCD_SRC_FI_B1_U);
2222 omap_writew(fi, OMAP1610_DMA_LCD_SRC_FI_B1_L);
2223}
2224
2225static irqreturn_t lcd_dma_irq_handler(int irq, void *dev_id)
2226{
2227 u16 w;
2228
2229 w = omap_readw(OMAP1610_DMA_LCD_CTRL);
2230 if (unlikely(!(w & (1 << 3)))) {
2231 printk(KERN_WARNING "Spurious LCD DMA IRQ\n");
2232 return IRQ_NONE;
2233 }
2234 /* Ack the IRQ */
2235 w |= (1 << 3);
2236 omap_writew(w, OMAP1610_DMA_LCD_CTRL);
2237 lcd_dma.active = 0;
2238 if (lcd_dma.callback != NULL)
2239 lcd_dma.callback(w, lcd_dma.cb_data);
2240
2241 return IRQ_HANDLED;
2242}
2243
2244int omap_request_lcd_dma(void (*callback)(u16 status, void *data),
2245 void *data)
2246{
2247 spin_lock_irq(&lcd_dma.lock);
2248 if (lcd_dma.reserved) {
2249 spin_unlock_irq(&lcd_dma.lock);
2250 printk(KERN_ERR "LCD DMA channel already reserved\n");
2251 BUG();
2252 return -EBUSY;
2253 }
2254 lcd_dma.reserved = 1;
2255 spin_unlock_irq(&lcd_dma.lock);
2256 lcd_dma.callback = callback;
2257 lcd_dma.cb_data = data;
2258 lcd_dma.active = 0;
2259 lcd_dma.single_transfer = 0;
2260 lcd_dma.rotate = 0;
2261 lcd_dma.vxres = 0;
2262 lcd_dma.mirror = 0;
2263 lcd_dma.xscale = 0;
2264 lcd_dma.yscale = 0;
2265 lcd_dma.ext_ctrl = 0;
2266 lcd_dma.src_port = 0;
2267
2268 return 0;
2269}
2270EXPORT_SYMBOL(omap_request_lcd_dma);
2271
2272void omap_free_lcd_dma(void)
2273{
2274 spin_lock(&lcd_dma.lock);
2275 if (!lcd_dma.reserved) {
2276 spin_unlock(&lcd_dma.lock);
2277 printk(KERN_ERR "LCD DMA is not reserved\n");
2278 BUG();
2279 return;
2280 }
2281 if (!enable_1510_mode)
2282 omap_writew(omap_readw(OMAP1610_DMA_LCD_CCR) & ~1,
2283 OMAP1610_DMA_LCD_CCR);
2284 lcd_dma.reserved = 0;
2285 spin_unlock(&lcd_dma.lock);
2286}
2287EXPORT_SYMBOL(omap_free_lcd_dma);
2288
2289void omap_enable_lcd_dma(void)
2290{
2291 u16 w;
2292 2010
2293 /* 2011 /*
2294 * Set the Enable bit only if an external controller is 2012 * A bug in ROM code leaves IRQ status for channels 0 and 1 uncleared
2295 * connected. Otherwise the OMAP internal controller will 2013 * after secure sram context save and restore. Hence we need to
2296 * start the transfer when it gets enabled. 2014 * manually clear those IRQs to avoid spurious interrupts. This
2015 * affects only secure devices.
2297 */ 2016 */
2298 if (enable_1510_mode || !lcd_dma.ext_ctrl) 2017 if (cpu_is_omap34xx() && (omap_type() != OMAP2_DEVICE_TYPE_GP))
2299 return; 2018 dma_write(0x3 , IRQSTATUS_L0);
2300
2301 w = omap_readw(OMAP1610_DMA_LCD_CTRL);
2302 w |= 1 << 8;
2303 omap_writew(w, OMAP1610_DMA_LCD_CTRL);
2304
2305 lcd_dma.active = 1;
2306 2019
2307 w = omap_readw(OMAP1610_DMA_LCD_CCR); 2020 for (ch = 0; ch < dma_chan_count; ch++)
2308 w |= 1 << 7; 2021 if (dma_chan[ch].dev_id != -1)
2309 omap_writew(w, OMAP1610_DMA_LCD_CCR); 2022 omap_clear_dma(ch);
2310} 2023}
2311EXPORT_SYMBOL(omap_enable_lcd_dma);
2312
2313void omap_setup_lcd_dma(void)
2314{
2315 BUG_ON(lcd_dma.active);
2316 if (!enable_1510_mode) {
2317 /* Set some reasonable defaults */
2318 omap_writew(0x5440, OMAP1610_DMA_LCD_CCR);
2319 omap_writew(0x9102, OMAP1610_DMA_LCD_CSDP);
2320 omap_writew(0x0004, OMAP1610_DMA_LCD_LCH_CTRL);
2321 }
2322 set_b1_regs();
2323 if (!enable_1510_mode) {
2324 u16 w;
2325
2326 w = omap_readw(OMAP1610_DMA_LCD_CCR);
2327 /*
2328 * If DMA was already active set the end_prog bit to have
2329 * the programmed register set loaded into the active
2330 * register set.
2331 */
2332 w |= 1 << 11; /* End_prog */
2333 if (!lcd_dma.single_transfer)
2334 w |= (3 << 8); /* Auto_init, repeat */
2335 omap_writew(w, OMAP1610_DMA_LCD_CCR);
2336 }
2337}
2338EXPORT_SYMBOL(omap_setup_lcd_dma);
2339
2340void omap_stop_lcd_dma(void)
2341{
2342 u16 w;
2343
2344 lcd_dma.active = 0;
2345 if (enable_1510_mode || !lcd_dma.ext_ctrl)
2346 return;
2347
2348 w = omap_readw(OMAP1610_DMA_LCD_CCR);
2349 w &= ~(1 << 7);
2350 omap_writew(w, OMAP1610_DMA_LCD_CCR);
2351
2352 w = omap_readw(OMAP1610_DMA_LCD_CTRL);
2353 w &= ~(1 << 8);
2354 omap_writew(w, OMAP1610_DMA_LCD_CTRL);
2355}
2356EXPORT_SYMBOL(omap_stop_lcd_dma);
2357 2024
2358/*----------------------------------------------------------------------------*/ 2025/*----------------------------------------------------------------------------*/
2359 2026
2360static int __init omap_init_dma(void) 2027static int __init omap_init_dma(void)
2361{ 2028{
2029 unsigned long base;
2362 int ch, r; 2030 int ch, r;
2363 2031
2364 if (cpu_class_is_omap1()) { 2032 if (cpu_class_is_omap1()) {
2365 omap_dma_base = OMAP1_IO_ADDRESS(OMAP1_DMA_BASE); 2033 base = OMAP1_DMA_BASE;
2366 dma_lch_count = OMAP1_LOGICAL_DMA_CH_COUNT; 2034 dma_lch_count = OMAP1_LOGICAL_DMA_CH_COUNT;
2367 } else if (cpu_is_omap24xx()) { 2035 } else if (cpu_is_omap24xx()) {
2368 omap_dma_base = OMAP2_IO_ADDRESS(OMAP24XX_DMA4_BASE); 2036 base = OMAP24XX_DMA4_BASE;
2369 dma_lch_count = OMAP_DMA4_LOGICAL_DMA_CH_COUNT; 2037 dma_lch_count = OMAP_DMA4_LOGICAL_DMA_CH_COUNT;
2370 } else if (cpu_is_omap34xx()) { 2038 } else if (cpu_is_omap34xx()) {
2371 omap_dma_base = OMAP2_IO_ADDRESS(OMAP34XX_DMA4_BASE); 2039 base = OMAP34XX_DMA4_BASE;
2372 dma_lch_count = OMAP_DMA4_LOGICAL_DMA_CH_COUNT; 2040 dma_lch_count = OMAP_DMA4_LOGICAL_DMA_CH_COUNT;
2373 } else if (cpu_is_omap44xx()) { 2041 } else if (cpu_is_omap44xx()) {
2374 omap_dma_base = OMAP2_IO_ADDRESS(OMAP44XX_DMA4_BASE); 2042 base = OMAP44XX_DMA4_BASE;
2375 dma_lch_count = OMAP_DMA4_LOGICAL_DMA_CH_COUNT; 2043 dma_lch_count = OMAP_DMA4_LOGICAL_DMA_CH_COUNT;
2376 } else { 2044 } else {
2377 pr_err("DMA init failed for unsupported omap\n"); 2045 pr_err("DMA init failed for unsupported omap\n");
2378 return -ENODEV; 2046 return -ENODEV;
2379 } 2047 }
2380 2048
2049 omap_dma_base = ioremap(base, SZ_4K);
2050 BUG_ON(!omap_dma_base);
2051
2381 if (cpu_class_is_omap2() && omap_dma_reserve_channels 2052 if (cpu_class_is_omap2() && omap_dma_reserve_channels
2382 && (omap_dma_reserve_channels <= dma_lch_count)) 2053 && (omap_dma_reserve_channels <= dma_lch_count))
2383 dma_lch_count = omap_dma_reserve_channels; 2054 dma_lch_count = omap_dma_reserve_channels;
2384 2055
2385 dma_chan = kzalloc(sizeof(struct omap_dma_lch) * dma_lch_count, 2056 dma_chan = kzalloc(sizeof(struct omap_dma_lch) * dma_lch_count,
2386 GFP_KERNEL); 2057 GFP_KERNEL);
2387 if (!dma_chan) 2058 if (!dma_chan) {
2388 return -ENOMEM; 2059 r = -ENOMEM;
2060 goto out_unmap;
2061 }
2389 2062
2390 if (cpu_class_is_omap2()) { 2063 if (cpu_class_is_omap2()) {
2391 dma_linked_lch = kzalloc(sizeof(struct dma_link_info) * 2064 dma_linked_lch = kzalloc(sizeof(struct dma_link_info) *
2392 dma_lch_count, GFP_KERNEL); 2065 dma_lch_count, GFP_KERNEL);
2393 if (!dma_linked_lch) { 2066 if (!dma_linked_lch) {
2394 kfree(dma_chan); 2067 r = -ENOMEM;
2395 return -ENOMEM; 2068 goto out_free;
2396 } 2069 }
2397 } 2070 }
2398 2071
@@ -2420,14 +2093,6 @@ static int __init omap_init_dma(void)
2420 dma_chan_count = 16; 2093 dma_chan_count = 16;
2421 } else 2094 } else
2422 dma_chan_count = 9; 2095 dma_chan_count = 9;
2423 if (cpu_is_omap16xx()) {
2424 u16 w;
2425
2426 /* this would prevent OMAP sleep */
2427 w = omap_readw(OMAP1610_DMA_LCD_CTRL);
2428 w &= ~(1 << 8);
2429 omap_writew(w, OMAP1610_DMA_LCD_CTRL);
2430 }
2431 } else if (cpu_class_is_omap2()) { 2096 } else if (cpu_class_is_omap2()) {
2432 u8 revision = dma_read(REVISION) & 0xff; 2097 u8 revision = dma_read(REVISION) & 0xff;
2433 printk(KERN_INFO "OMAP DMA hardware revision %d.%d\n", 2098 printk(KERN_INFO "OMAP DMA hardware revision %d.%d\n",
@@ -2438,7 +2103,6 @@ static int __init omap_init_dma(void)
2438 return 0; 2103 return 0;
2439 } 2104 }
2440 2105
2441 spin_lock_init(&lcd_dma.lock);
2442 spin_lock_init(&dma_chan_lock); 2106 spin_lock_init(&dma_chan_lock);
2443 2107
2444 for (ch = 0; ch < dma_chan_count; ch++) { 2108 for (ch = 0; ch < dma_chan_count; ch++) {
@@ -2466,7 +2130,7 @@ static int __init omap_init_dma(void)
2466 for (i = 0; i < ch; i++) 2130 for (i = 0; i < ch; i++)
2467 free_irq(omap1_dma_irq[i], 2131 free_irq(omap1_dma_irq[i],
2468 (void *) (i + 1)); 2132 (void *) (i + 1));
2469 return r; 2133 goto out_free;
2470 } 2134 }
2471 } 2135 }
2472 } 2136 }
@@ -2478,14 +2142,14 @@ static int __init omap_init_dma(void)
2478 if (cpu_class_is_omap2()) { 2142 if (cpu_class_is_omap2()) {
2479 int irq; 2143 int irq;
2480 if (cpu_is_omap44xx()) 2144 if (cpu_is_omap44xx())
2481 irq = INT_44XX_SDMA_IRQ0; 2145 irq = OMAP44XX_IRQ_SDMA_0;
2482 else 2146 else
2483 irq = INT_24XX_SDMA_IRQ0; 2147 irq = INT_24XX_SDMA_IRQ0;
2484 setup_irq(irq, &omap24xx_dma_irq); 2148 setup_irq(irq, &omap24xx_dma_irq);
2485 } 2149 }
2486 2150
2487 /* Enable smartidle idlemodes and autoidle */ 2151 if (cpu_is_omap34xx() || cpu_is_omap44xx()) {
2488 if (cpu_is_omap34xx()) { 2152 /* Enable smartidle idlemodes and autoidle */
2489 u32 v = dma_read(OCP_SYSCONFIG); 2153 u32 v = dma_read(OCP_SYSCONFIG);
2490 v &= ~(DMA_SYSCONFIG_MIDLEMODE_MASK | 2154 v &= ~(DMA_SYSCONFIG_MIDLEMODE_MASK |
2491 DMA_SYSCONFIG_SIDLEMODE_MASK | 2155 DMA_SYSCONFIG_SIDLEMODE_MASK |
@@ -2494,25 +2158,25 @@ static int __init omap_init_dma(void)
2494 DMA_SYSCONFIG_SIDLEMODE(DMA_IDLEMODE_SMARTIDLE) | 2158 DMA_SYSCONFIG_SIDLEMODE(DMA_IDLEMODE_SMARTIDLE) |
2495 DMA_SYSCONFIG_AUTOIDLE); 2159 DMA_SYSCONFIG_AUTOIDLE);
2496 dma_write(v , OCP_SYSCONFIG); 2160 dma_write(v , OCP_SYSCONFIG);
2497 } 2161 /* reserve dma channels 0 and 1 in high security devices */
2498 2162 if (cpu_is_omap34xx() &&
2499 2163 (omap_type() != OMAP2_DEVICE_TYPE_GP)) {
2500 /* FIXME: Update LCD DMA to work on 24xx */ 2164 printk(KERN_INFO "Reserving DMA channels 0 and 1 for "
2501 if (cpu_class_is_omap1()) { 2165 "HS ROM code\n");
2502 r = request_irq(INT_DMA_LCD, lcd_dma_irq_handler, 0, 2166 dma_chan[0].dev_id = 0;
2503 "LCD DMA", NULL); 2167 dma_chan[1].dev_id = 1;
2504 if (r != 0) {
2505 int i;
2506
2507 printk(KERN_ERR "unable to request IRQ for LCD DMA "
2508 "(error %d)\n", r);
2509 for (i = 0; i < dma_chan_count; i++)
2510 free_irq(omap1_dma_irq[i], (void *) (i + 1));
2511 return r;
2512 } 2168 }
2513 } 2169 }
2514 2170
2515 return 0; 2171 return 0;
2172
2173out_free:
2174 kfree(dma_chan);
2175
2176out_unmap:
2177 iounmap(omap_dma_base);
2178
2179 return r;
2516} 2180}
2517 2181
2518arch_initcall(omap_init_dma); 2182arch_initcall(omap_init_dma);