]> pilppa.org Git - linux-2.6-omap-h63xx.git/blob - arch/blackfin/kernel/bfin_dma_5xx.c
tcp: Restore ordering of TCP options for the sake of inter-operability
[linux-2.6-omap-h63xx.git] / arch / blackfin / kernel / bfin_dma_5xx.c
1 /*
2  * File:         arch/blackfin/kernel/bfin_dma_5xx.c
3  * Based on:
4  * Author:
5  *
6  * Created:
7  * Description:  This file contains the simple DMA Implementation for Blackfin
8  *
9  * Modified:
10  *               Copyright 2004-2006 Analog Devices Inc.
11  *
12  * Bugs:         Enter bugs at http://blackfin.uclinux.org/
13  *
14  * This program is free software; you can redistribute it and/or modify
15  * it under the terms of the GNU General Public License as published by
16  * the Free Software Foundation; either version 2 of the License, or
17  * (at your option) any later version.
18  *
19  * This program is distributed in the hope that it will be useful,
20  * but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22  * GNU General Public License for more details.
23  *
24  * You should have received a copy of the GNU General Public License
25  * along with this program; if not, see the file COPYING, or write
26  * to the Free Software Foundation, Inc.,
27  * 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
28  */
29
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
36
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
40
41 /* Remove unused code not exported by symbol or internally called */
42 #define REMOVE_DEAD_CODE
43
44 /**************************************************************************
45  * Global Variables
46 ***************************************************************************/
47
48 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
49
50 /*------------------------------------------------------------------------------
51  *       Set the Buffer Clear bit in the Configuration register of specific DMA
52  *       channel. This will stop the descriptor based DMA operation.
53  *-----------------------------------------------------------------------------*/
54 static void clear_dma_buffer(unsigned int channel)
55 {
56         dma_ch[channel].regs->cfg |= RESTART;
57         SSYNC();
58         dma_ch[channel].regs->cfg &= ~RESTART;
59         SSYNC();
60 }
61
62 static int __init blackfin_dma_init(void)
63 {
64         int i;
65
66         printk(KERN_INFO "Blackfin DMA Controller\n");
67
68         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
69                 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
70                 dma_ch[i].regs = dma_io_base_addr[i];
71                 mutex_init(&(dma_ch[i].dmalock));
72         }
73         /* Mark MEMDMA Channel 0 as requested since we're using it internally */
74         dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
75         dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
76
77 #if defined(CONFIG_DEB_DMA_URGENT)
78         bfin_write_EBIU_DDRQUE(bfin_read_EBIU_DDRQUE()
79                          | DEB1_URGENT | DEB2_URGENT | DEB3_URGENT);
80 #endif
81         return 0;
82 }
83
84 arch_initcall(blackfin_dma_init);
85
86 /*------------------------------------------------------------------------------
87  *      Request the specific DMA channel from the system.
88  *-----------------------------------------------------------------------------*/
89 int request_dma(unsigned int channel, char *device_id)
90 {
91
92         pr_debug("request_dma() : BEGIN \n");
93
94 #if defined(CONFIG_BF561) && ANOMALY_05000182
95         if (channel >= CH_IMEM_STREAM0_DEST && channel <= CH_IMEM_STREAM1_DEST) {
96                 if (get_cclk() > 500000000) {
97                         printk(KERN_WARNING
98                                "Request IMDMA failed due to ANOMALY 05000182\n");
99                         return -EFAULT;
100                 }
101         }
102 #endif
103
104         mutex_lock(&(dma_ch[channel].dmalock));
105
106         if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
107             || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
108                 mutex_unlock(&(dma_ch[channel].dmalock));
109                 pr_debug("DMA CHANNEL IN USE  \n");
110                 return -EBUSY;
111         } else {
112                 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
113                 pr_debug("DMA CHANNEL IS ALLOCATED  \n");
114         }
115
116         mutex_unlock(&(dma_ch[channel].dmalock));
117
118 #ifdef CONFIG_BF54x
119         if (channel >= CH_UART2_RX && channel <= CH_UART3_TX) {
120                 unsigned int per_map;
121                 per_map = dma_ch[channel].regs->peripheral_map & 0xFFF;
122                 if (strncmp(device_id, "BFIN_UART", 9) == 0)
123                         dma_ch[channel].regs->peripheral_map = per_map |
124                                 ((channel - CH_UART2_RX + 0xC)<<12);
125                 else
126                         dma_ch[channel].regs->peripheral_map = per_map |
127                                 ((channel - CH_UART2_RX + 0x6)<<12);
128         }
129 #endif
130
131         dma_ch[channel].device_id = device_id;
132         dma_ch[channel].irq_callback = NULL;
133
134         /* This is to be enabled by putting a restriction -
135          * you have to request DMA, before doing any operations on
136          * descriptor/channel
137          */
138         pr_debug("request_dma() : END  \n");
139         return channel;
140 }
141 EXPORT_SYMBOL(request_dma);
142
143 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
144 {
145         int ret_irq = 0;
146
147         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
148                && channel < MAX_BLACKFIN_DMA_CHANNEL));
149
150         if (callback != NULL) {
151                 int ret_val;
152                 ret_irq = channel2irq(channel);
153
154                 dma_ch[channel].data = data;
155
156                 ret_val =
157                     request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
158                                 dma_ch[channel].device_id, data);
159                 if (ret_val) {
160                         printk(KERN_NOTICE
161                                "Request irq in DMA engine failed.\n");
162                         return -EPERM;
163                 }
164                 dma_ch[channel].irq_callback = callback;
165         }
166         return 0;
167 }
168 EXPORT_SYMBOL(set_dma_callback);
169
170 void free_dma(unsigned int channel)
171 {
172         int ret_irq;
173
174         pr_debug("freedma() : BEGIN \n");
175         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
176                && channel < MAX_BLACKFIN_DMA_CHANNEL));
177
178         /* Halt the DMA */
179         disable_dma(channel);
180         clear_dma_buffer(channel);
181
182         if (dma_ch[channel].irq_callback != NULL) {
183                 ret_irq = channel2irq(channel);
184                 free_irq(ret_irq, dma_ch[channel].data);
185         }
186
187         /* Clear the DMA Variable in the Channel */
188         mutex_lock(&(dma_ch[channel].dmalock));
189         dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
190         mutex_unlock(&(dma_ch[channel].dmalock));
191
192         pr_debug("freedma() : END \n");
193 }
194 EXPORT_SYMBOL(free_dma);
195
196 void dma_enable_irq(unsigned int channel)
197 {
198         int ret_irq;
199
200         pr_debug("dma_enable_irq() : BEGIN \n");
201         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
202                && channel < MAX_BLACKFIN_DMA_CHANNEL));
203
204         ret_irq = channel2irq(channel);
205         enable_irq(ret_irq);
206 }
207 EXPORT_SYMBOL(dma_enable_irq);
208
209 void dma_disable_irq(unsigned int channel)
210 {
211         int ret_irq;
212
213         pr_debug("dma_disable_irq() : BEGIN \n");
214         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
215                && channel < MAX_BLACKFIN_DMA_CHANNEL));
216
217         ret_irq = channel2irq(channel);
218         disable_irq(ret_irq);
219 }
220 EXPORT_SYMBOL(dma_disable_irq);
221
222 int dma_channel_active(unsigned int channel)
223 {
224         if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
225                 return 0;
226         } else {
227                 return 1;
228         }
229 }
230 EXPORT_SYMBOL(dma_channel_active);
231
232 /*------------------------------------------------------------------------------
233 *       stop the specific DMA channel.
234 *-----------------------------------------------------------------------------*/
235 void disable_dma(unsigned int channel)
236 {
237         pr_debug("stop_dma() : BEGIN \n");
238
239         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
240                && channel < MAX_BLACKFIN_DMA_CHANNEL));
241
242         dma_ch[channel].regs->cfg &= ~DMAEN;    /* Clean the enable bit */
243         SSYNC();
244         dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
245         /* Needs to be enabled Later */
246         pr_debug("stop_dma() : END \n");
247         return;
248 }
249 EXPORT_SYMBOL(disable_dma);
250
251 void enable_dma(unsigned int channel)
252 {
253         pr_debug("enable_dma() : BEGIN \n");
254
255         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
256                && channel < MAX_BLACKFIN_DMA_CHANNEL));
257
258         dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
259         dma_ch[channel].regs->curr_x_count = 0;
260         dma_ch[channel].regs->curr_y_count = 0;
261
262         dma_ch[channel].regs->cfg |= DMAEN;     /* Set the enable bit */
263         SSYNC();
264         pr_debug("enable_dma() : END \n");
265         return;
266 }
267 EXPORT_SYMBOL(enable_dma);
268
269 /*------------------------------------------------------------------------------
270 *               Set the Start Address register for the specific DMA channel
271 *               This function can be used for register based DMA,
272 *               to setup the start address
273 *               addr:           Starting address of the DMA Data to be transferred.
274 *-----------------------------------------------------------------------------*/
275 void set_dma_start_addr(unsigned int channel, unsigned long addr)
276 {
277         pr_debug("set_dma_start_addr() : BEGIN \n");
278
279         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
280                && channel < MAX_BLACKFIN_DMA_CHANNEL));
281
282         dma_ch[channel].regs->start_addr = addr;
283         SSYNC();
284         pr_debug("set_dma_start_addr() : END\n");
285 }
286 EXPORT_SYMBOL(set_dma_start_addr);
287
288 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
289 {
290         pr_debug("set_dma_next_desc_addr() : BEGIN \n");
291
292         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
293                && channel < MAX_BLACKFIN_DMA_CHANNEL));
294
295         dma_ch[channel].regs->next_desc_ptr = addr;
296         SSYNC();
297         pr_debug("set_dma_next_desc_addr() : END\n");
298 }
299 EXPORT_SYMBOL(set_dma_next_desc_addr);
300
301 void set_dma_curr_desc_addr(unsigned int channel, unsigned long addr)
302 {
303         pr_debug("set_dma_curr_desc_addr() : BEGIN \n");
304
305         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
306                && channel < MAX_BLACKFIN_DMA_CHANNEL));
307
308         dma_ch[channel].regs->curr_desc_ptr = addr;
309         SSYNC();
310         pr_debug("set_dma_curr_desc_addr() : END\n");
311 }
312 EXPORT_SYMBOL(set_dma_curr_desc_addr);
313
314 void set_dma_x_count(unsigned int channel, unsigned short x_count)
315 {
316         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
317                && channel < MAX_BLACKFIN_DMA_CHANNEL));
318
319         dma_ch[channel].regs->x_count = x_count;
320         SSYNC();
321 }
322 EXPORT_SYMBOL(set_dma_x_count);
323
324 void set_dma_y_count(unsigned int channel, unsigned short y_count)
325 {
326         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
327                && channel < MAX_BLACKFIN_DMA_CHANNEL));
328
329         dma_ch[channel].regs->y_count = y_count;
330         SSYNC();
331 }
332 EXPORT_SYMBOL(set_dma_y_count);
333
334 void set_dma_x_modify(unsigned int channel, short x_modify)
335 {
336         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
337                && channel < MAX_BLACKFIN_DMA_CHANNEL));
338
339         dma_ch[channel].regs->x_modify = x_modify;
340         SSYNC();
341 }
342 EXPORT_SYMBOL(set_dma_x_modify);
343
344 void set_dma_y_modify(unsigned int channel, short y_modify)
345 {
346         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
347                && channel < MAX_BLACKFIN_DMA_CHANNEL));
348
349         dma_ch[channel].regs->y_modify = y_modify;
350         SSYNC();
351 }
352 EXPORT_SYMBOL(set_dma_y_modify);
353
354 void set_dma_config(unsigned int channel, unsigned short config)
355 {
356         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
357                && channel < MAX_BLACKFIN_DMA_CHANNEL));
358
359         dma_ch[channel].regs->cfg = config;
360         SSYNC();
361 }
362 EXPORT_SYMBOL(set_dma_config);
363
364 unsigned short
365 set_bfin_dma_config(char direction, char flow_mode,
366                     char intr_mode, char dma_mode, char width, char syncmode)
367 {
368         unsigned short config;
369
370         config =
371             ((direction << 1) | (width << 2) | (dma_mode << 4) |
372              (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5));
373         return config;
374 }
375 EXPORT_SYMBOL(set_bfin_dma_config);
376
377 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
378 {
379         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
380                && channel < MAX_BLACKFIN_DMA_CHANNEL));
381
382         dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
383
384         dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
385
386         SSYNC();
387 }
388 EXPORT_SYMBOL(set_dma_sg);
389
390 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
391 {
392         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
393                && channel < MAX_BLACKFIN_DMA_CHANNEL));
394
395         dma_ch[channel].regs->curr_addr_ptr = addr;
396         SSYNC();
397 }
398 EXPORT_SYMBOL(set_dma_curr_addr);
399
400 /*------------------------------------------------------------------------------
401  *      Get the DMA status of a specific DMA channel from the system.
402  *-----------------------------------------------------------------------------*/
403 unsigned short get_dma_curr_irqstat(unsigned int channel)
404 {
405         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
406                && channel < MAX_BLACKFIN_DMA_CHANNEL));
407
408         return dma_ch[channel].regs->irq_status;
409 }
410 EXPORT_SYMBOL(get_dma_curr_irqstat);
411
412 /*------------------------------------------------------------------------------
413  *      Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
414  *-----------------------------------------------------------------------------*/
415 void clear_dma_irqstat(unsigned int channel)
416 {
417         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
418                && channel < MAX_BLACKFIN_DMA_CHANNEL));
419         dma_ch[channel].regs->irq_status |= 3;
420 }
421 EXPORT_SYMBOL(clear_dma_irqstat);
422
423 /*------------------------------------------------------------------------------
424  *      Get current DMA xcount of a specific DMA channel from the system.
425  *-----------------------------------------------------------------------------*/
426 unsigned short get_dma_curr_xcount(unsigned int channel)
427 {
428         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
429                && channel < MAX_BLACKFIN_DMA_CHANNEL));
430
431         return dma_ch[channel].regs->curr_x_count;
432 }
433 EXPORT_SYMBOL(get_dma_curr_xcount);
434
435 /*------------------------------------------------------------------------------
436  *      Get current DMA ycount of a specific DMA channel from the system.
437  *-----------------------------------------------------------------------------*/
438 unsigned short get_dma_curr_ycount(unsigned int channel)
439 {
440         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
441                && channel < MAX_BLACKFIN_DMA_CHANNEL));
442
443         return dma_ch[channel].regs->curr_y_count;
444 }
445 EXPORT_SYMBOL(get_dma_curr_ycount);
446
447 unsigned long get_dma_next_desc_ptr(unsigned int channel)
448 {
449         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
450               && channel < MAX_BLACKFIN_DMA_CHANNEL));
451
452         return dma_ch[channel].regs->next_desc_ptr;
453 }
454 EXPORT_SYMBOL(get_dma_next_desc_ptr);
455
456 unsigned long get_dma_curr_desc_ptr(unsigned int channel)
457 {
458         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
459               && channel < MAX_BLACKFIN_DMA_CHANNEL));
460
461         return dma_ch[channel].regs->curr_desc_ptr;
462 }
463 EXPORT_SYMBOL(get_dma_curr_desc_ptr);
464
465 unsigned long get_dma_curr_addr(unsigned int channel)
466 {
467         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
468               && channel < MAX_BLACKFIN_DMA_CHANNEL));
469
470         return dma_ch[channel].regs->curr_addr_ptr;
471 }
472 EXPORT_SYMBOL(get_dma_curr_addr);
473
474 #ifdef CONFIG_PM
475 int blackfin_dma_suspend(void)
476 {
477         int i;
478
479 #ifdef CONFIG_BF561     /* IMDMA channels doesn't have a PERIPHERAL_MAP */
480         for (i = 0; i <= CH_MEM_STREAM3_SRC; i++) {
481 #else
482         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
483 #endif
484                 if (dma_ch[i].chan_status == DMA_CHANNEL_ENABLED) {
485                         printk(KERN_ERR "DMA Channel %d failed to suspend\n", i);
486                         return -EBUSY;
487                 }
488
489                 dma_ch[i].saved_peripheral_map = dma_ch[i].regs->peripheral_map;
490         }
491
492         return 0;
493 }
494
495 void blackfin_dma_resume(void)
496 {
497         int i;
498
499 #ifdef CONFIG_BF561     /* IMDMA channels doesn't have a PERIPHERAL_MAP */
500         for (i = 0; i <= CH_MEM_STREAM3_SRC; i++)
501 #else
502         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++)
503 #endif
504                 dma_ch[i].regs->peripheral_map = dma_ch[i].saved_peripheral_map;
505 }
506 #endif
507
508 static void *__dma_memcpy(void *dest, const void *src, size_t size)
509 {
510         int direction;  /* 1 - address decrease, 0 - address increase */
511         int flag_align; /* 1 - address aligned,  0 - address unaligned */
512         int flag_2D;    /* 1 - 2D DMA needed,    0 - 1D DMA needed */
513         unsigned long flags;
514
515         if (size <= 0)
516                 return NULL;
517
518         local_irq_save(flags);
519
520         if ((unsigned long)src < memory_end)
521                 blackfin_dcache_flush_range((unsigned int)src,
522                                             (unsigned int)(src + size));
523
524         if ((unsigned long)dest < memory_end)
525                 blackfin_dcache_invalidate_range((unsigned int)dest,
526                                                  (unsigned int)(dest + size));
527
528         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
529
530         if ((unsigned long)src < (unsigned long)dest)
531                 direction = 1;
532         else
533                 direction = 0;
534
535         if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
536             && ((size % 2) == 0))
537                 flag_align = 1;
538         else
539                 flag_align = 0;
540
541         if (size > 0x10000)     /* size > 64K */
542                 flag_2D = 1;
543         else
544                 flag_2D = 0;
545
546         /* Setup destination and source start address */
547         if (direction) {
548                 if (flag_align) {
549                         bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
550                         bfin_write_MDMA_S0_START_ADDR(src + size - 2);
551                 } else {
552                         bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
553                         bfin_write_MDMA_S0_START_ADDR(src + size - 1);
554                 }
555         } else {
556                 bfin_write_MDMA_D0_START_ADDR(dest);
557                 bfin_write_MDMA_S0_START_ADDR(src);
558         }
559
560         /* Setup destination and source xcount */
561         if (flag_2D) {
562                 if (flag_align) {
563                         bfin_write_MDMA_D0_X_COUNT(1024 / 2);
564                         bfin_write_MDMA_S0_X_COUNT(1024 / 2);
565                 } else {
566                         bfin_write_MDMA_D0_X_COUNT(1024);
567                         bfin_write_MDMA_S0_X_COUNT(1024);
568                 }
569                 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
570                 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
571         } else {
572                 if (flag_align) {
573                         bfin_write_MDMA_D0_X_COUNT(size / 2);
574                         bfin_write_MDMA_S0_X_COUNT(size / 2);
575                 } else {
576                         bfin_write_MDMA_D0_X_COUNT(size);
577                         bfin_write_MDMA_S0_X_COUNT(size);
578                 }
579         }
580
581         /* Setup destination and source xmodify and ymodify */
582         if (direction) {
583                 if (flag_align) {
584                         bfin_write_MDMA_D0_X_MODIFY(-2);
585                         bfin_write_MDMA_S0_X_MODIFY(-2);
586                         if (flag_2D) {
587                                 bfin_write_MDMA_D0_Y_MODIFY(-2);
588                                 bfin_write_MDMA_S0_Y_MODIFY(-2);
589                         }
590                 } else {
591                         bfin_write_MDMA_D0_X_MODIFY(-1);
592                         bfin_write_MDMA_S0_X_MODIFY(-1);
593                         if (flag_2D) {
594                                 bfin_write_MDMA_D0_Y_MODIFY(-1);
595                                 bfin_write_MDMA_S0_Y_MODIFY(-1);
596                         }
597                 }
598         } else {
599                 if (flag_align) {
600                         bfin_write_MDMA_D0_X_MODIFY(2);
601                         bfin_write_MDMA_S0_X_MODIFY(2);
602                         if (flag_2D) {
603                                 bfin_write_MDMA_D0_Y_MODIFY(2);
604                                 bfin_write_MDMA_S0_Y_MODIFY(2);
605                         }
606                 } else {
607                         bfin_write_MDMA_D0_X_MODIFY(1);
608                         bfin_write_MDMA_S0_X_MODIFY(1);
609                         if (flag_2D) {
610                                 bfin_write_MDMA_D0_Y_MODIFY(1);
611                                 bfin_write_MDMA_S0_Y_MODIFY(1);
612                         }
613                 }
614         }
615
616         /* Enable source DMA */
617         if (flag_2D) {
618                 if (flag_align) {
619                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
620                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
621                 } else {
622                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
623                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
624                 }
625         } else {
626                 if (flag_align) {
627                         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
628                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
629                 } else {
630                         bfin_write_MDMA_S0_CONFIG(DMAEN);
631                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
632                 }
633         }
634
635         SSYNC();
636
637         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
638                 ;
639
640         bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
641                                       (DMA_DONE | DMA_ERR));
642
643         bfin_write_MDMA_S0_CONFIG(0);
644         bfin_write_MDMA_D0_CONFIG(0);
645
646         local_irq_restore(flags);
647
648         return dest;
649 }
650
651 void *dma_memcpy(void *dest, const void *src, size_t size)
652 {
653         size_t bulk;
654         size_t rest;
655         void * addr;
656
657         bulk = (size >> 16) << 16;
658         rest = size - bulk;
659         if (bulk)
660                 __dma_memcpy(dest, src, bulk);
661         addr = __dma_memcpy(dest+bulk, src+bulk, rest);
662         return addr;
663 }
664 EXPORT_SYMBOL(dma_memcpy);
665
666 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
667 {
668         void *addr;
669         addr = dma_memcpy(dest, src, size);
670         return addr;
671 }
672 EXPORT_SYMBOL(safe_dma_memcpy);
673
674 void dma_outsb(unsigned long addr, const void *buf, unsigned short len)
675 {
676         unsigned long flags;
677
678         local_irq_save(flags);
679
680         blackfin_dcache_flush_range((unsigned int)buf,
681                          (unsigned int)(buf) + len);
682
683         bfin_write_MDMA_D0_START_ADDR(addr);
684         bfin_write_MDMA_D0_X_COUNT(len);
685         bfin_write_MDMA_D0_X_MODIFY(0);
686         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
687
688         bfin_write_MDMA_S0_START_ADDR(buf);
689         bfin_write_MDMA_S0_X_COUNT(len);
690         bfin_write_MDMA_S0_X_MODIFY(1);
691         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
692
693         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
694         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
695
696         SSYNC();
697
698         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
699
700         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
701
702         bfin_write_MDMA_S0_CONFIG(0);
703         bfin_write_MDMA_D0_CONFIG(0);
704         local_irq_restore(flags);
705
706 }
707 EXPORT_SYMBOL(dma_outsb);
708
709
710 void dma_insb(unsigned long addr, void *buf, unsigned short len)
711 {
712         unsigned long flags;
713
714         blackfin_dcache_invalidate_range((unsigned int)buf,
715                          (unsigned int)(buf) + len);
716
717         local_irq_save(flags);
718         bfin_write_MDMA_D0_START_ADDR(buf);
719         bfin_write_MDMA_D0_X_COUNT(len);
720         bfin_write_MDMA_D0_X_MODIFY(1);
721         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
722
723         bfin_write_MDMA_S0_START_ADDR(addr);
724         bfin_write_MDMA_S0_X_COUNT(len);
725         bfin_write_MDMA_S0_X_MODIFY(0);
726         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
727
728         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
729         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
730
731         SSYNC();
732
733         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
734
735         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
736
737         bfin_write_MDMA_S0_CONFIG(0);
738         bfin_write_MDMA_D0_CONFIG(0);
739         local_irq_restore(flags);
740
741 }
742 EXPORT_SYMBOL(dma_insb);
743
744 void dma_outsw(unsigned long addr, const void  *buf, unsigned short len)
745 {
746         unsigned long flags;
747
748         local_irq_save(flags);
749
750         blackfin_dcache_flush_range((unsigned int)buf,
751                          (unsigned int)(buf) + len * sizeof(short));
752
753         bfin_write_MDMA_D0_START_ADDR(addr);
754         bfin_write_MDMA_D0_X_COUNT(len);
755         bfin_write_MDMA_D0_X_MODIFY(0);
756         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
757
758         bfin_write_MDMA_S0_START_ADDR(buf);
759         bfin_write_MDMA_S0_X_COUNT(len);
760         bfin_write_MDMA_S0_X_MODIFY(2);
761         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
762
763         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
764         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
765
766         SSYNC();
767
768         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
769
770         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
771
772         bfin_write_MDMA_S0_CONFIG(0);
773         bfin_write_MDMA_D0_CONFIG(0);
774         local_irq_restore(flags);
775
776 }
777 EXPORT_SYMBOL(dma_outsw);
778
779 void dma_insw(unsigned long addr, void *buf, unsigned short len)
780 {
781         unsigned long flags;
782
783         blackfin_dcache_invalidate_range((unsigned int)buf,
784                          (unsigned int)(buf) + len * sizeof(short));
785
786         local_irq_save(flags);
787
788         bfin_write_MDMA_D0_START_ADDR(buf);
789         bfin_write_MDMA_D0_X_COUNT(len);
790         bfin_write_MDMA_D0_X_MODIFY(2);
791         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
792
793         bfin_write_MDMA_S0_START_ADDR(addr);
794         bfin_write_MDMA_S0_X_COUNT(len);
795         bfin_write_MDMA_S0_X_MODIFY(0);
796         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
797
798         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
799         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
800
801         SSYNC();
802
803         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
804
805         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
806
807         bfin_write_MDMA_S0_CONFIG(0);
808         bfin_write_MDMA_D0_CONFIG(0);
809         local_irq_restore(flags);
810
811 }
812 EXPORT_SYMBOL(dma_insw);
813
814 void dma_outsl(unsigned long addr, const void *buf, unsigned short len)
815 {
816         unsigned long flags;
817
818         local_irq_save(flags);
819
820         blackfin_dcache_flush_range((unsigned int)buf,
821                          (unsigned int)(buf) + len * sizeof(long));
822
823         bfin_write_MDMA_D0_START_ADDR(addr);
824         bfin_write_MDMA_D0_X_COUNT(len);
825         bfin_write_MDMA_D0_X_MODIFY(0);
826         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
827
828         bfin_write_MDMA_S0_START_ADDR(buf);
829         bfin_write_MDMA_S0_X_COUNT(len);
830         bfin_write_MDMA_S0_X_MODIFY(4);
831         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
832
833         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
834         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
835
836         SSYNC();
837
838         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
839
840         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
841
842         bfin_write_MDMA_S0_CONFIG(0);
843         bfin_write_MDMA_D0_CONFIG(0);
844         local_irq_restore(flags);
845
846 }
847 EXPORT_SYMBOL(dma_outsl);
848
849 void dma_insl(unsigned long addr, void *buf, unsigned short len)
850 {
851         unsigned long flags;
852
853         blackfin_dcache_invalidate_range((unsigned int)buf,
854                          (unsigned int)(buf) + len * sizeof(long));
855
856         local_irq_save(flags);
857
858         bfin_write_MDMA_D0_START_ADDR(buf);
859         bfin_write_MDMA_D0_X_COUNT(len);
860         bfin_write_MDMA_D0_X_MODIFY(4);
861         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
862
863         bfin_write_MDMA_S0_START_ADDR(addr);
864         bfin_write_MDMA_S0_X_COUNT(len);
865         bfin_write_MDMA_S0_X_MODIFY(0);
866         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
867
868         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
869         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
870
871         SSYNC();
872
873         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
874
875         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
876
877         bfin_write_MDMA_S0_CONFIG(0);
878         bfin_write_MDMA_D0_CONFIG(0);
879         local_irq_restore(flags);
880
881 }
882 EXPORT_SYMBOL(dma_insl);