- patches.apparmor/remove_suid_new_case_in_2.6.22.diff: Merge fix.
[linux-flexiantxendom0-3.2.10.git] / arch / blackfin / kernel / bfin_dma_5xx.c
1 /*
2  * File:         arch/blackfin/kernel/bfin_dma_5xx.c
3  * Based on:
4  * Author:
5  *
6  * Created:
7  * Description:  This file contains the simple DMA Implementation for Blackfin
8  *
9  * Modified:
10  *               Copyright 2004-2006 Analog Devices Inc.
11  *
12  * Bugs:         Enter bugs at http://blackfin.uclinux.org/
13  *
14  * This program is free software; you can redistribute it and/or modify
15  * it under the terms of the GNU General Public License as published by
16  * the Free Software Foundation; either version 2 of the License, or
17  * (at your option) any later version.
18  *
19  * This program is distributed in the hope that it will be useful,
20  * but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22  * GNU General Public License for more details.
23  *
24  * You should have received a copy of the GNU General Public License
25  * along with this program; if not, see the file COPYING, or write
26  * to the Free Software Foundation, Inc.,
27  * 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
28  */
29
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
36
37 #include <asm/dma.h>
38 #include <asm/cacheflush.h>
39
40 /* Remove unused code not exported by symbol or internally called */
41 #define REMOVE_DEAD_CODE
42
43 /**************************************************************************
44  * Global Variables
45 ***************************************************************************/
46
47 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
48 #if defined (CONFIG_BF561)
49 static struct dma_register *base_addr[MAX_BLACKFIN_DMA_CHANNEL] = {
50         (struct dma_register *) DMA1_0_NEXT_DESC_PTR,
51         (struct dma_register *) DMA1_1_NEXT_DESC_PTR,
52         (struct dma_register *) DMA1_2_NEXT_DESC_PTR,
53         (struct dma_register *) DMA1_3_NEXT_DESC_PTR,
54         (struct dma_register *) DMA1_4_NEXT_DESC_PTR,
55         (struct dma_register *) DMA1_5_NEXT_DESC_PTR,
56         (struct dma_register *) DMA1_6_NEXT_DESC_PTR,
57         (struct dma_register *) DMA1_7_NEXT_DESC_PTR,
58         (struct dma_register *) DMA1_8_NEXT_DESC_PTR,
59         (struct dma_register *) DMA1_9_NEXT_DESC_PTR,
60         (struct dma_register *) DMA1_10_NEXT_DESC_PTR,
61         (struct dma_register *) DMA1_11_NEXT_DESC_PTR,
62         (struct dma_register *) DMA2_0_NEXT_DESC_PTR,
63         (struct dma_register *) DMA2_1_NEXT_DESC_PTR,
64         (struct dma_register *) DMA2_2_NEXT_DESC_PTR,
65         (struct dma_register *) DMA2_3_NEXT_DESC_PTR,
66         (struct dma_register *) DMA2_4_NEXT_DESC_PTR,
67         (struct dma_register *) DMA2_5_NEXT_DESC_PTR,
68         (struct dma_register *) DMA2_6_NEXT_DESC_PTR,
69         (struct dma_register *) DMA2_7_NEXT_DESC_PTR,
70         (struct dma_register *) DMA2_8_NEXT_DESC_PTR,
71         (struct dma_register *) DMA2_9_NEXT_DESC_PTR,
72         (struct dma_register *) DMA2_10_NEXT_DESC_PTR,
73         (struct dma_register *) DMA2_11_NEXT_DESC_PTR,
74         (struct dma_register *) MDMA1_D0_NEXT_DESC_PTR,
75         (struct dma_register *) MDMA1_S0_NEXT_DESC_PTR,
76         (struct dma_register *) MDMA1_D1_NEXT_DESC_PTR,
77         (struct dma_register *) MDMA1_S1_NEXT_DESC_PTR,
78         (struct dma_register *) MDMA2_D0_NEXT_DESC_PTR,
79         (struct dma_register *) MDMA2_S0_NEXT_DESC_PTR,
80         (struct dma_register *) MDMA2_D1_NEXT_DESC_PTR,
81         (struct dma_register *) MDMA2_S1_NEXT_DESC_PTR,
82         (struct dma_register *) IMDMA_D0_NEXT_DESC_PTR,
83         (struct dma_register *) IMDMA_S0_NEXT_DESC_PTR,
84         (struct dma_register *) IMDMA_D1_NEXT_DESC_PTR,
85         (struct dma_register *) IMDMA_S1_NEXT_DESC_PTR,
86 };
87 #else
88 static struct dma_register *base_addr[MAX_BLACKFIN_DMA_CHANNEL] = {
89         (struct dma_register *) DMA0_NEXT_DESC_PTR,
90         (struct dma_register *) DMA1_NEXT_DESC_PTR,
91         (struct dma_register *) DMA2_NEXT_DESC_PTR,
92         (struct dma_register *) DMA3_NEXT_DESC_PTR,
93         (struct dma_register *) DMA4_NEXT_DESC_PTR,
94         (struct dma_register *) DMA5_NEXT_DESC_PTR,
95         (struct dma_register *) DMA6_NEXT_DESC_PTR,
96         (struct dma_register *) DMA7_NEXT_DESC_PTR,
97 #if (defined(CONFIG_BF537) || defined(CONFIG_BF534) || defined(CONFIG_BF536))
98         (struct dma_register *) DMA8_NEXT_DESC_PTR,
99         (struct dma_register *) DMA9_NEXT_DESC_PTR,
100         (struct dma_register *) DMA10_NEXT_DESC_PTR,
101         (struct dma_register *) DMA11_NEXT_DESC_PTR,
102 #endif
103         (struct dma_register *) MDMA_D0_NEXT_DESC_PTR,
104         (struct dma_register *) MDMA_S0_NEXT_DESC_PTR,
105         (struct dma_register *) MDMA_D1_NEXT_DESC_PTR,
106         (struct dma_register *) MDMA_S1_NEXT_DESC_PTR,
107 };
108 #endif
109
110 /*------------------------------------------------------------------------------
111  *       Set the Buffer Clear bit in the Configuration register of specific DMA
112  *       channel. This will stop the descriptor based DMA operation.
113  *-----------------------------------------------------------------------------*/
114 static void clear_dma_buffer(unsigned int channel)
115 {
116         dma_ch[channel].regs->cfg |= RESTART;
117         SSYNC();
118         dma_ch[channel].regs->cfg &= ~RESTART;
119         SSYNC();
120 }
121
122 static int __init blackfin_dma_init(void)
123 {
124         int i;
125
126         printk(KERN_INFO "Blackfin DMA Controller\n");
127
128         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
129                 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
130                 dma_ch[i].regs = base_addr[i];
131                 mutex_init(&(dma_ch[i].dmalock));
132         }
133         /* Mark MEMDMA Channel 0 as requested since we're using it internally */
134         dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
135         dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
136         return 0;
137 }
138
139 arch_initcall(blackfin_dma_init);
140
141 /*
142  *      Form the channel find the irq number for that channel.
143  */
144 #if !defined(CONFIG_BF561)
145
146 static int bf533_channel2irq(unsigned int channel)
147 {
148         int ret_irq = -1;
149
150         switch (channel) {
151         case CH_PPI:
152                 ret_irq = IRQ_PPI;
153                 break;
154
155 #if (defined(CONFIG_BF537) || defined(CONFIG_BF534) || defined(CONFIG_BF536))
156         case CH_EMAC_RX:
157                 ret_irq = IRQ_MAC_RX;
158                 break;
159
160         case CH_EMAC_TX:
161                 ret_irq = IRQ_MAC_TX;
162                 break;
163
164         case CH_UART1_RX:
165                 ret_irq = IRQ_UART1_RX;
166                 break;
167
168         case CH_UART1_TX:
169                 ret_irq = IRQ_UART1_TX;
170                 break;
171 #endif
172
173         case CH_SPORT0_RX:
174                 ret_irq = IRQ_SPORT0_RX;
175                 break;
176
177         case CH_SPORT0_TX:
178                 ret_irq = IRQ_SPORT0_TX;
179                 break;
180
181         case CH_SPORT1_RX:
182                 ret_irq = IRQ_SPORT1_RX;
183                 break;
184
185         case CH_SPORT1_TX:
186                 ret_irq = IRQ_SPORT1_TX;
187                 break;
188
189         case CH_SPI:
190                 ret_irq = IRQ_SPI;
191                 break;
192
193         case CH_UART_RX:
194                 ret_irq = IRQ_UART_RX;
195                 break;
196
197         case CH_UART_TX:
198                 ret_irq = IRQ_UART_TX;
199                 break;
200
201         case CH_MEM_STREAM0_SRC:
202         case CH_MEM_STREAM0_DEST:
203                 ret_irq = IRQ_MEM_DMA0;
204                 break;
205
206         case CH_MEM_STREAM1_SRC:
207         case CH_MEM_STREAM1_DEST:
208                 ret_irq = IRQ_MEM_DMA1;
209                 break;
210         }
211         return ret_irq;
212 }
213
214 # define channel2irq(channel) bf533_channel2irq(channel)
215
216 #else
217
218 static int bf561_channel2irq(unsigned int channel)
219 {
220         int ret_irq = -1;
221
222         switch (channel) {
223         case CH_PPI0:
224                 ret_irq = IRQ_PPI0;
225                 break;
226         case CH_PPI1:
227                 ret_irq = IRQ_PPI1;
228                 break;
229         case CH_SPORT0_RX:
230                 ret_irq = IRQ_SPORT0_RX;
231                 break;
232         case CH_SPORT0_TX:
233                 ret_irq = IRQ_SPORT0_TX;
234                 break;
235         case CH_SPORT1_RX:
236                 ret_irq = IRQ_SPORT1_RX;
237                 break;
238         case CH_SPORT1_TX:
239                 ret_irq = IRQ_SPORT1_TX;
240                 break;
241         case CH_SPI:
242                 ret_irq = IRQ_SPI;
243                 break;
244         case CH_UART_RX:
245                 ret_irq = IRQ_UART_RX;
246                 break;
247         case CH_UART_TX:
248                 ret_irq = IRQ_UART_TX;
249                 break;
250
251         case CH_MEM_STREAM0_SRC:
252         case CH_MEM_STREAM0_DEST:
253                 ret_irq = IRQ_MEM_DMA0;
254                 break;
255         case CH_MEM_STREAM1_SRC:
256         case CH_MEM_STREAM1_DEST:
257                 ret_irq = IRQ_MEM_DMA1;
258                 break;
259         case CH_MEM_STREAM2_SRC:
260         case CH_MEM_STREAM2_DEST:
261                 ret_irq = IRQ_MEM_DMA2;
262                 break;
263         case CH_MEM_STREAM3_SRC:
264         case CH_MEM_STREAM3_DEST:
265                 ret_irq = IRQ_MEM_DMA3;
266                 break;
267
268         case CH_IMEM_STREAM0_SRC:
269         case CH_IMEM_STREAM0_DEST:
270                 ret_irq = IRQ_IMEM_DMA0;
271                 break;
272         case CH_IMEM_STREAM1_SRC:
273         case CH_IMEM_STREAM1_DEST:
274                 ret_irq = IRQ_IMEM_DMA1;
275                 break;
276         }
277         return ret_irq;
278 }
279
280 # define channel2irq(channel) bf561_channel2irq(channel)
281
282 #endif
283
284 /*------------------------------------------------------------------------------
285  *      Request the specific DMA channel from the system.
286  *-----------------------------------------------------------------------------*/
287 int request_dma(unsigned int channel, char *device_id)
288 {
289
290         pr_debug("request_dma() : BEGIN \n");
291         mutex_lock(&(dma_ch[channel].dmalock));
292
293         if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
294             || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
295                 mutex_unlock(&(dma_ch[channel].dmalock));
296                 pr_debug("DMA CHANNEL IN USE  \n");
297                 return -EBUSY;
298         } else {
299                 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
300                 pr_debug("DMA CHANNEL IS ALLOCATED  \n");
301         }
302
303         mutex_unlock(&(dma_ch[channel].dmalock));
304
305         dma_ch[channel].device_id = device_id;
306         dma_ch[channel].irq_callback = NULL;
307
308         /* This is to be enabled by putting a restriction -
309          * you have to request DMA, before doing any operations on
310          * descriptor/channel
311          */
312         pr_debug("request_dma() : END  \n");
313         return channel;
314 }
315 EXPORT_SYMBOL(request_dma);
316
317 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
318 {
319         int ret_irq = 0;
320
321         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
322                && channel < MAX_BLACKFIN_DMA_CHANNEL));
323
324         if (callback != NULL) {
325                 int ret_val;
326                 ret_irq = channel2irq(channel);
327
328                 dma_ch[channel].data = data;
329
330                 ret_val =
331                     request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
332                                 dma_ch[channel].device_id, data);
333                 if (ret_val) {
334                         printk(KERN_NOTICE
335                                "Request irq in DMA engine failed.\n");
336                         return -EPERM;
337                 }
338                 dma_ch[channel].irq_callback = callback;
339         }
340         return 0;
341 }
342 EXPORT_SYMBOL(set_dma_callback);
343
344 void free_dma(unsigned int channel)
345 {
346         int ret_irq;
347
348         pr_debug("freedma() : BEGIN \n");
349         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
350                && channel < MAX_BLACKFIN_DMA_CHANNEL));
351
352         /* Halt the DMA */
353         disable_dma(channel);
354         clear_dma_buffer(channel);
355
356         if (dma_ch[channel].irq_callback != NULL) {
357                 ret_irq = channel2irq(channel);
358                 free_irq(ret_irq, dma_ch[channel].data);
359         }
360
361         /* Clear the DMA Variable in the Channel */
362         mutex_lock(&(dma_ch[channel].dmalock));
363         dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
364         mutex_unlock(&(dma_ch[channel].dmalock));
365
366         pr_debug("freedma() : END \n");
367 }
368 EXPORT_SYMBOL(free_dma);
369
370 void dma_enable_irq(unsigned int channel)
371 {
372         int ret_irq;
373
374         pr_debug("dma_enable_irq() : BEGIN \n");
375         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
376                && channel < MAX_BLACKFIN_DMA_CHANNEL));
377
378         ret_irq = channel2irq(channel);
379         enable_irq(ret_irq);
380 }
381 EXPORT_SYMBOL(dma_enable_irq);
382
383 void dma_disable_irq(unsigned int channel)
384 {
385         int ret_irq;
386
387         pr_debug("dma_disable_irq() : BEGIN \n");
388         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
389                && channel < MAX_BLACKFIN_DMA_CHANNEL));
390
391         ret_irq = channel2irq(channel);
392         disable_irq(ret_irq);
393 }
394 EXPORT_SYMBOL(dma_disable_irq);
395
396 int dma_channel_active(unsigned int channel)
397 {
398         if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
399                 return 0;
400         } else {
401                 return 1;
402         }
403 }
404 EXPORT_SYMBOL(dma_channel_active);
405
406 /*------------------------------------------------------------------------------
407 *       stop the specific DMA channel.
408 *-----------------------------------------------------------------------------*/
409 void disable_dma(unsigned int channel)
410 {
411         pr_debug("stop_dma() : BEGIN \n");
412
413         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
414                && channel < MAX_BLACKFIN_DMA_CHANNEL));
415
416         dma_ch[channel].regs->cfg &= ~DMAEN;    /* Clean the enable bit */
417         SSYNC();
418         dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
419         /* Needs to be enabled Later */
420         pr_debug("stop_dma() : END \n");
421         return;
422 }
423 EXPORT_SYMBOL(disable_dma);
424
425 void enable_dma(unsigned int channel)
426 {
427         pr_debug("enable_dma() : BEGIN \n");
428
429         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
430                && channel < MAX_BLACKFIN_DMA_CHANNEL));
431
432         dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
433         dma_ch[channel].regs->curr_x_count = 0;
434         dma_ch[channel].regs->curr_y_count = 0;
435
436         dma_ch[channel].regs->cfg |= DMAEN;     /* Set the enable bit */
437         SSYNC();
438         pr_debug("enable_dma() : END \n");
439         return;
440 }
441 EXPORT_SYMBOL(enable_dma);
442
443 /*------------------------------------------------------------------------------
444 *               Set the Start Address register for the specific DMA channel
445 *               This function can be used for register based DMA,
446 *               to setup the start address
447 *               addr:           Starting address of the DMA Data to be transferred.
448 *-----------------------------------------------------------------------------*/
449 void set_dma_start_addr(unsigned int channel, unsigned long addr)
450 {
451         pr_debug("set_dma_start_addr() : BEGIN \n");
452
453         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
454                && channel < MAX_BLACKFIN_DMA_CHANNEL));
455
456         dma_ch[channel].regs->start_addr = addr;
457         SSYNC();
458         pr_debug("set_dma_start_addr() : END\n");
459 }
460 EXPORT_SYMBOL(set_dma_start_addr);
461
462 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
463 {
464         pr_debug("set_dma_next_desc_addr() : BEGIN \n");
465
466         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
467                && channel < MAX_BLACKFIN_DMA_CHANNEL));
468
469         dma_ch[channel].regs->next_desc_ptr = addr;
470         SSYNC();
471         pr_debug("set_dma_start_addr() : END\n");
472 }
473 EXPORT_SYMBOL(set_dma_next_desc_addr);
474
475 void set_dma_x_count(unsigned int channel, unsigned short x_count)
476 {
477         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
478                && channel < MAX_BLACKFIN_DMA_CHANNEL));
479
480         dma_ch[channel].regs->x_count = x_count;
481         SSYNC();
482 }
483 EXPORT_SYMBOL(set_dma_x_count);
484
485 void set_dma_y_count(unsigned int channel, unsigned short y_count)
486 {
487         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
488                && channel < MAX_BLACKFIN_DMA_CHANNEL));
489
490         dma_ch[channel].regs->y_count = y_count;
491         SSYNC();
492 }
493 EXPORT_SYMBOL(set_dma_y_count);
494
495 void set_dma_x_modify(unsigned int channel, short x_modify)
496 {
497         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
498                && channel < MAX_BLACKFIN_DMA_CHANNEL));
499
500         dma_ch[channel].regs->x_modify = x_modify;
501         SSYNC();
502 }
503 EXPORT_SYMBOL(set_dma_x_modify);
504
505 void set_dma_y_modify(unsigned int channel, short y_modify)
506 {
507         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
508                && channel < MAX_BLACKFIN_DMA_CHANNEL));
509
510         dma_ch[channel].regs->y_modify = y_modify;
511         SSYNC();
512 }
513 EXPORT_SYMBOL(set_dma_y_modify);
514
515 void set_dma_config(unsigned int channel, unsigned short config)
516 {
517         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
518                && channel < MAX_BLACKFIN_DMA_CHANNEL));
519
520         dma_ch[channel].regs->cfg = config;
521         SSYNC();
522 }
523 EXPORT_SYMBOL(set_dma_config);
524
525 unsigned short
526 set_bfin_dma_config(char direction, char flow_mode,
527                     char intr_mode, char dma_mode, char width)
528 {
529         unsigned short config;
530
531         config =
532             ((direction << 1) | (width << 2) | (dma_mode << 4) |
533              (intr_mode << 6) | (flow_mode << 12) | RESTART);
534         return config;
535 }
536 EXPORT_SYMBOL(set_bfin_dma_config);
537
538 void set_dma_sg(unsigned int channel, struct dmasg * sg, int nr_sg)
539 {
540         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
541                && channel < MAX_BLACKFIN_DMA_CHANNEL));
542
543         dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
544
545         dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
546
547         SSYNC();
548 }
549 EXPORT_SYMBOL(set_dma_sg);
550
551 /*------------------------------------------------------------------------------
552  *      Get the DMA status of a specific DMA channel from the system.
553  *-----------------------------------------------------------------------------*/
554 unsigned short get_dma_curr_irqstat(unsigned int channel)
555 {
556         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
557                && channel < MAX_BLACKFIN_DMA_CHANNEL));
558
559         return dma_ch[channel].regs->irq_status;
560 }
561 EXPORT_SYMBOL(get_dma_curr_irqstat);
562
563 /*------------------------------------------------------------------------------
564  *      Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
565  *-----------------------------------------------------------------------------*/
566 void clear_dma_irqstat(unsigned int channel)
567 {
568         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
569                && channel < MAX_BLACKFIN_DMA_CHANNEL));
570         dma_ch[channel].regs->irq_status |= 3;
571 }
572 EXPORT_SYMBOL(clear_dma_irqstat);
573
574 /*------------------------------------------------------------------------------
575  *      Get current DMA xcount of a specific DMA channel from the system.
576  *-----------------------------------------------------------------------------*/
577 unsigned short get_dma_curr_xcount(unsigned int channel)
578 {
579         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
580                && channel < MAX_BLACKFIN_DMA_CHANNEL));
581
582         return dma_ch[channel].regs->curr_x_count;
583 }
584 EXPORT_SYMBOL(get_dma_curr_xcount);
585
586 /*------------------------------------------------------------------------------
587  *      Get current DMA ycount of a specific DMA channel from the system.
588  *-----------------------------------------------------------------------------*/
589 unsigned short get_dma_curr_ycount(unsigned int channel)
590 {
591         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
592                && channel < MAX_BLACKFIN_DMA_CHANNEL));
593
594         return dma_ch[channel].regs->curr_y_count;
595 }
596 EXPORT_SYMBOL(get_dma_curr_ycount);
597
598 void *_dma_memcpy(void *dest, const void *src, size_t size)
599 {
600         int direction;  /* 1 - address decrease, 0 - address increase */
601         int flag_align; /* 1 - address aligned,  0 - address unaligned */
602         int flag_2D;    /* 1 - 2D DMA needed,    0 - 1D DMA needed */
603         unsigned long flags;
604
605         if (size <= 0)
606                 return NULL;
607         
608         local_irq_save(flags);
609
610         if ((unsigned long)src < memory_end)
611                 blackfin_dcache_flush_range((unsigned int)src,
612                                             (unsigned int)(src + size));
613
614         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
615
616         if ((unsigned long)src < (unsigned long)dest)
617                 direction = 1;
618         else
619                 direction = 0;
620
621         if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
622             && ((size % 2) == 0))
623                 flag_align = 1;
624         else
625                 flag_align = 0;
626
627         if (size > 0x10000)     /* size > 64K */
628                 flag_2D = 1;
629         else
630                 flag_2D = 0;
631
632         /* Setup destination and source start address */
633         if (direction) {
634                 if (flag_align) {
635                         bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
636                         bfin_write_MDMA_S0_START_ADDR(src + size - 2);
637                 } else {
638                         bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
639                         bfin_write_MDMA_S0_START_ADDR(src + size - 1);
640                 }
641         } else {
642                 bfin_write_MDMA_D0_START_ADDR(dest);
643                 bfin_write_MDMA_S0_START_ADDR(src);
644         }
645
646         /* Setup destination and source xcount */
647         if (flag_2D) {
648                 if (flag_align) {
649                         bfin_write_MDMA_D0_X_COUNT(1024 / 2);
650                         bfin_write_MDMA_S0_X_COUNT(1024 / 2);
651                 } else {
652                         bfin_write_MDMA_D0_X_COUNT(1024);
653                         bfin_write_MDMA_S0_X_COUNT(1024);
654                 }
655                 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
656                 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
657         } else {
658                 if (flag_align) {
659                         bfin_write_MDMA_D0_X_COUNT(size / 2);
660                         bfin_write_MDMA_S0_X_COUNT(size / 2);
661                 } else {
662                         bfin_write_MDMA_D0_X_COUNT(size);
663                         bfin_write_MDMA_S0_X_COUNT(size);
664                 }
665         }
666
667         /* Setup destination and source xmodify and ymodify */
668         if (direction) {
669                 if (flag_align) {
670                         bfin_write_MDMA_D0_X_MODIFY(-2);
671                         bfin_write_MDMA_S0_X_MODIFY(-2);
672                         if (flag_2D) {
673                                 bfin_write_MDMA_D0_Y_MODIFY(-2);
674                                 bfin_write_MDMA_S0_Y_MODIFY(-2);
675                         }
676                 } else {
677                         bfin_write_MDMA_D0_X_MODIFY(-1);
678                         bfin_write_MDMA_S0_X_MODIFY(-1);
679                         if (flag_2D) {
680                                 bfin_write_MDMA_D0_Y_MODIFY(-1);
681                                 bfin_write_MDMA_S0_Y_MODIFY(-1);
682                         }
683                 }
684         } else {
685                 if (flag_align) {
686                         bfin_write_MDMA_D0_X_MODIFY(2);
687                         bfin_write_MDMA_S0_X_MODIFY(2);
688                         if (flag_2D) {
689                                 bfin_write_MDMA_D0_Y_MODIFY(2);
690                                 bfin_write_MDMA_S0_Y_MODIFY(2);
691                         }
692                 } else {
693                         bfin_write_MDMA_D0_X_MODIFY(1);
694                         bfin_write_MDMA_S0_X_MODIFY(1);
695                         if (flag_2D) {
696                                 bfin_write_MDMA_D0_Y_MODIFY(1);
697                                 bfin_write_MDMA_S0_Y_MODIFY(1);
698                         }
699                 }
700         }
701
702         /* Enable source DMA */
703         if (flag_2D) {
704                 if (flag_align) {
705                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
706                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
707                 } else {
708                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
709                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
710                 }
711         } else {
712                 if (flag_align) {
713                         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
714                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
715                 } else {
716                         bfin_write_MDMA_S0_CONFIG(DMAEN);
717                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
718                 }
719         }
720
721         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
722                 ;
723
724         bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
725                                       (DMA_DONE | DMA_ERR));
726
727         bfin_write_MDMA_S0_CONFIG(0);
728         bfin_write_MDMA_D0_CONFIG(0);
729
730         if ((unsigned long)dest < memory_end)
731                 blackfin_dcache_invalidate_range((unsigned int)dest,
732                                                  (unsigned int)(dest + size));
733         local_irq_restore(flags);
734
735         return dest;
736 }
737
738 void *dma_memcpy(void *dest, const void *src, size_t size)
739 {
740         size_t bulk;
741         size_t rest;
742         void * addr;
743
744         bulk = (size >> 16) << 16;
745         rest = size - bulk;
746         if (bulk)
747                 _dma_memcpy(dest, src, bulk);
748         addr = _dma_memcpy(dest+bulk, src+bulk, rest);
749         return addr;
750 }
751
752 EXPORT_SYMBOL(dma_memcpy);
753
754 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
755 {
756         void *addr;
757         addr = dma_memcpy(dest, src, size);
758         return addr;
759 }
760 EXPORT_SYMBOL(safe_dma_memcpy);
761
762 void dma_outsb(void __iomem *addr, const void *buf, unsigned short len)
763 {
764
765         unsigned long flags;
766         
767         local_irq_save(flags);
768         
769         blackfin_dcache_flush_range((unsigned int)buf,(unsigned int)(buf) + len);
770
771         bfin_write_MDMA_D0_START_ADDR(addr);
772         bfin_write_MDMA_D0_X_COUNT(len);
773         bfin_write_MDMA_D0_X_MODIFY(0);
774         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
775
776         bfin_write_MDMA_S0_START_ADDR(buf);
777         bfin_write_MDMA_S0_X_COUNT(len);
778         bfin_write_MDMA_S0_X_MODIFY(1);
779         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
780
781         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
782         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
783
784         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
785
786         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
787
788         bfin_write_MDMA_S0_CONFIG(0);
789         bfin_write_MDMA_D0_CONFIG(0);
790         local_irq_restore(flags);
791
792 }
793 EXPORT_SYMBOL(dma_outsb);
794
795
796 void dma_insb(const void __iomem *addr, void *buf, unsigned short len)
797 {
798         unsigned long flags;
799                 
800         local_irq_save(flags);
801         bfin_write_MDMA_D0_START_ADDR(buf);
802         bfin_write_MDMA_D0_X_COUNT(len);
803         bfin_write_MDMA_D0_X_MODIFY(1);
804         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
805
806         bfin_write_MDMA_S0_START_ADDR(addr);
807         bfin_write_MDMA_S0_X_COUNT(len);
808         bfin_write_MDMA_S0_X_MODIFY(0);
809         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
810
811         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
812         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
813
814         blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
815
816         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
817
818         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
819
820         bfin_write_MDMA_S0_CONFIG(0);
821         bfin_write_MDMA_D0_CONFIG(0);
822         local_irq_restore(flags);
823
824 }
825 EXPORT_SYMBOL(dma_insb);
826
827 void dma_outsw(void __iomem *addr, const void  *buf, unsigned short len)
828 {
829         unsigned long flags;
830         
831         local_irq_save(flags);
832                 
833         blackfin_dcache_flush_range((unsigned int)buf,(unsigned int)(buf) + len);
834
835         bfin_write_MDMA_D0_START_ADDR(addr);
836         bfin_write_MDMA_D0_X_COUNT(len);
837         bfin_write_MDMA_D0_X_MODIFY(0);
838         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
839
840         bfin_write_MDMA_S0_START_ADDR(buf);
841         bfin_write_MDMA_S0_X_COUNT(len);
842         bfin_write_MDMA_S0_X_MODIFY(2);
843         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
844
845         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
846         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
847
848         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
849
850         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
851
852         bfin_write_MDMA_S0_CONFIG(0);
853         bfin_write_MDMA_D0_CONFIG(0);
854         local_irq_restore(flags);
855
856 }
857 EXPORT_SYMBOL(dma_outsw);
858
859 void dma_insw(const void __iomem *addr, void *buf, unsigned short len)
860 {
861         unsigned long flags;
862                 
863         local_irq_save(flags);
864         
865         bfin_write_MDMA_D0_START_ADDR(buf);
866         bfin_write_MDMA_D0_X_COUNT(len);
867         bfin_write_MDMA_D0_X_MODIFY(2);
868         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
869
870         bfin_write_MDMA_S0_START_ADDR(addr);
871         bfin_write_MDMA_S0_X_COUNT(len);
872         bfin_write_MDMA_S0_X_MODIFY(0);
873         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
874
875         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
876         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
877
878         blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
879
880         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
881
882         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
883
884         bfin_write_MDMA_S0_CONFIG(0);
885         bfin_write_MDMA_D0_CONFIG(0);
886         local_irq_restore(flags);
887
888 }
889 EXPORT_SYMBOL(dma_insw);
890
891 void dma_outsl(void __iomem *addr, const void *buf, unsigned short len)
892 {
893         unsigned long flags;
894         
895         local_irq_save(flags);
896         
897         blackfin_dcache_flush_range((unsigned int)buf,(unsigned int)(buf) + len);
898
899         bfin_write_MDMA_D0_START_ADDR(addr);
900         bfin_write_MDMA_D0_X_COUNT(len);
901         bfin_write_MDMA_D0_X_MODIFY(0);
902         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
903
904         bfin_write_MDMA_S0_START_ADDR(buf);
905         bfin_write_MDMA_S0_X_COUNT(len);
906         bfin_write_MDMA_S0_X_MODIFY(4);
907         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
908
909         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
910         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
911
912         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
913
914         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
915
916         bfin_write_MDMA_S0_CONFIG(0);
917         bfin_write_MDMA_D0_CONFIG(0);
918         local_irq_restore(flags);
919
920 }
921 EXPORT_SYMBOL(dma_outsl);
922
923 void dma_insl(const void __iomem *addr, void *buf, unsigned short len)
924 {
925         unsigned long flags;
926         
927         local_irq_save(flags);
928         
929         bfin_write_MDMA_D0_START_ADDR(buf);
930         bfin_write_MDMA_D0_X_COUNT(len);
931         bfin_write_MDMA_D0_X_MODIFY(4);
932         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
933
934         bfin_write_MDMA_S0_START_ADDR(addr);
935         bfin_write_MDMA_S0_X_COUNT(len);
936         bfin_write_MDMA_S0_X_MODIFY(0);
937         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
938
939         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
940         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
941
942         blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
943
944         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
945
946         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
947
948         bfin_write_MDMA_S0_CONFIG(0);
949         bfin_write_MDMA_D0_CONFIG(0);
950         local_irq_restore(flags);
951
952 }
953 EXPORT_SYMBOL(dma_insl);