Blackfin arch: add set_dma_curr_addr DMA API to support sound driver recording function
[powerpc.git] / arch / blackfin / kernel / bfin_dma_5xx.c
1 /*
2  * File:         arch/blackfin/kernel/bfin_dma_5xx.c
3  * Based on:
4  * Author:
5  *
6  * Created:
7  * Description:  This file contains the simple DMA Implementation for Blackfin
8  *
9  * Modified:
10  *               Copyright 2004-2006 Analog Devices Inc.
11  *
12  * Bugs:         Enter bugs at http://blackfin.uclinux.org/
13  *
14  * This program is free software; you can redistribute it and/or modify
15  * it under the terms of the GNU General Public License as published by
16  * the Free Software Foundation; either version 2 of the License, or
17  * (at your option) any later version.
18  *
19  * This program is distributed in the hope that it will be useful,
20  * but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22  * GNU General Public License for more details.
23  *
24  * You should have received a copy of the GNU General Public License
25  * along with this program; if not, see the file COPYING, or write
26  * to the Free Software Foundation, Inc.,
27  * 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
28  */
29
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
36
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
40
41 /* Remove unused code not exported by symbol or internally called */
42 #define REMOVE_DEAD_CODE
43
44 /**************************************************************************
45  * Global Variables
46 ***************************************************************************/
47
48 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
49
50 /*------------------------------------------------------------------------------
51  *       Set the Buffer Clear bit in the Configuration register of specific DMA
52  *       channel. This will stop the descriptor based DMA operation.
53  *-----------------------------------------------------------------------------*/
54 static void clear_dma_buffer(unsigned int channel)
55 {
56         dma_ch[channel].regs->cfg |= RESTART;
57         SSYNC();
58         dma_ch[channel].regs->cfg &= ~RESTART;
59         SSYNC();
60 }
61
62 static int __init blackfin_dma_init(void)
63 {
64         int i;
65
66         printk(KERN_INFO "Blackfin DMA Controller\n");
67
68         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
69                 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
70                 dma_ch[i].regs = base_addr[i];
71                 mutex_init(&(dma_ch[i].dmalock));
72         }
73         /* Mark MEMDMA Channel 0 as requested since we're using it internally */
74         dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
75         dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
76         return 0;
77 }
78
79 arch_initcall(blackfin_dma_init);
80
81 /*------------------------------------------------------------------------------
82  *      Request the specific DMA channel from the system.
83  *-----------------------------------------------------------------------------*/
84 int request_dma(unsigned int channel, char *device_id)
85 {
86
87         pr_debug("request_dma() : BEGIN \n");
88         mutex_lock(&(dma_ch[channel].dmalock));
89
90         if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
91             || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
92                 mutex_unlock(&(dma_ch[channel].dmalock));
93                 pr_debug("DMA CHANNEL IN USE  \n");
94                 return -EBUSY;
95         } else {
96                 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
97                 pr_debug("DMA CHANNEL IS ALLOCATED  \n");
98         }
99
100         mutex_unlock(&(dma_ch[channel].dmalock));
101
102         dma_ch[channel].device_id = device_id;
103         dma_ch[channel].irq_callback = NULL;
104
105         /* This is to be enabled by putting a restriction -
106          * you have to request DMA, before doing any operations on
107          * descriptor/channel
108          */
109         pr_debug("request_dma() : END  \n");
110         return channel;
111 }
112 EXPORT_SYMBOL(request_dma);
113
114 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
115 {
116         int ret_irq = 0;
117
118         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
119                && channel < MAX_BLACKFIN_DMA_CHANNEL));
120
121         if (callback != NULL) {
122                 int ret_val;
123                 ret_irq = channel2irq(channel);
124
125                 dma_ch[channel].data = data;
126
127                 ret_val =
128                     request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
129                                 dma_ch[channel].device_id, data);
130                 if (ret_val) {
131                         printk(KERN_NOTICE
132                                "Request irq in DMA engine failed.\n");
133                         return -EPERM;
134                 }
135                 dma_ch[channel].irq_callback = callback;
136         }
137         return 0;
138 }
139 EXPORT_SYMBOL(set_dma_callback);
140
141 void free_dma(unsigned int channel)
142 {
143         int ret_irq;
144
145         pr_debug("freedma() : BEGIN \n");
146         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
147                && channel < MAX_BLACKFIN_DMA_CHANNEL));
148
149         /* Halt the DMA */
150         disable_dma(channel);
151         clear_dma_buffer(channel);
152
153         if (dma_ch[channel].irq_callback != NULL) {
154                 ret_irq = channel2irq(channel);
155                 free_irq(ret_irq, dma_ch[channel].data);
156         }
157
158         /* Clear the DMA Variable in the Channel */
159         mutex_lock(&(dma_ch[channel].dmalock));
160         dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
161         mutex_unlock(&(dma_ch[channel].dmalock));
162
163         pr_debug("freedma() : END \n");
164 }
165 EXPORT_SYMBOL(free_dma);
166
167 void dma_enable_irq(unsigned int channel)
168 {
169         int ret_irq;
170
171         pr_debug("dma_enable_irq() : BEGIN \n");
172         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
173                && channel < MAX_BLACKFIN_DMA_CHANNEL));
174
175         ret_irq = channel2irq(channel);
176         enable_irq(ret_irq);
177 }
178 EXPORT_SYMBOL(dma_enable_irq);
179
180 void dma_disable_irq(unsigned int channel)
181 {
182         int ret_irq;
183
184         pr_debug("dma_disable_irq() : BEGIN \n");
185         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
186                && channel < MAX_BLACKFIN_DMA_CHANNEL));
187
188         ret_irq = channel2irq(channel);
189         disable_irq(ret_irq);
190 }
191 EXPORT_SYMBOL(dma_disable_irq);
192
193 int dma_channel_active(unsigned int channel)
194 {
195         if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
196                 return 0;
197         } else {
198                 return 1;
199         }
200 }
201 EXPORT_SYMBOL(dma_channel_active);
202
203 /*------------------------------------------------------------------------------
204 *       stop the specific DMA channel.
205 *-----------------------------------------------------------------------------*/
206 void disable_dma(unsigned int channel)
207 {
208         pr_debug("stop_dma() : BEGIN \n");
209
210         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
211                && channel < MAX_BLACKFIN_DMA_CHANNEL));
212
213         dma_ch[channel].regs->cfg &= ~DMAEN;    /* Clean the enable bit */
214         SSYNC();
215         dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
216         /* Needs to be enabled Later */
217         pr_debug("stop_dma() : END \n");
218         return;
219 }
220 EXPORT_SYMBOL(disable_dma);
221
222 void enable_dma(unsigned int channel)
223 {
224         pr_debug("enable_dma() : BEGIN \n");
225
226         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
227                && channel < MAX_BLACKFIN_DMA_CHANNEL));
228
229         dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
230         dma_ch[channel].regs->curr_x_count = 0;
231         dma_ch[channel].regs->curr_y_count = 0;
232
233         dma_ch[channel].regs->cfg |= DMAEN;     /* Set the enable bit */
234         SSYNC();
235         pr_debug("enable_dma() : END \n");
236         return;
237 }
238 EXPORT_SYMBOL(enable_dma);
239
240 /*------------------------------------------------------------------------------
241 *               Set the Start Address register for the specific DMA channel
242 *               This function can be used for register based DMA,
243 *               to setup the start address
244 *               addr:           Starting address of the DMA Data to be transferred.
245 *-----------------------------------------------------------------------------*/
246 void set_dma_start_addr(unsigned int channel, unsigned long addr)
247 {
248         pr_debug("set_dma_start_addr() : BEGIN \n");
249
250         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
251                && channel < MAX_BLACKFIN_DMA_CHANNEL));
252
253         dma_ch[channel].regs->start_addr = addr;
254         SSYNC();
255         pr_debug("set_dma_start_addr() : END\n");
256 }
257 EXPORT_SYMBOL(set_dma_start_addr);
258
259 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
260 {
261         pr_debug("set_dma_next_desc_addr() : BEGIN \n");
262
263         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
264                && channel < MAX_BLACKFIN_DMA_CHANNEL));
265
266         dma_ch[channel].regs->next_desc_ptr = addr;
267         SSYNC();
268         pr_debug("set_dma_start_addr() : END\n");
269 }
270 EXPORT_SYMBOL(set_dma_next_desc_addr);
271
272 void set_dma_x_count(unsigned int channel, unsigned short x_count)
273 {
274         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
275                && channel < MAX_BLACKFIN_DMA_CHANNEL));
276
277         dma_ch[channel].regs->x_count = x_count;
278         SSYNC();
279 }
280 EXPORT_SYMBOL(set_dma_x_count);
281
282 void set_dma_y_count(unsigned int channel, unsigned short y_count)
283 {
284         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
285                && channel < MAX_BLACKFIN_DMA_CHANNEL));
286
287         dma_ch[channel].regs->y_count = y_count;
288         SSYNC();
289 }
290 EXPORT_SYMBOL(set_dma_y_count);
291
292 void set_dma_x_modify(unsigned int channel, short x_modify)
293 {
294         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
295                && channel < MAX_BLACKFIN_DMA_CHANNEL));
296
297         dma_ch[channel].regs->x_modify = x_modify;
298         SSYNC();
299 }
300 EXPORT_SYMBOL(set_dma_x_modify);
301
302 void set_dma_y_modify(unsigned int channel, short y_modify)
303 {
304         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
305                && channel < MAX_BLACKFIN_DMA_CHANNEL));
306
307         dma_ch[channel].regs->y_modify = y_modify;
308         SSYNC();
309 }
310 EXPORT_SYMBOL(set_dma_y_modify);
311
312 void set_dma_config(unsigned int channel, unsigned short config)
313 {
314         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
315                && channel < MAX_BLACKFIN_DMA_CHANNEL));
316
317         dma_ch[channel].regs->cfg = config;
318         SSYNC();
319 }
320 EXPORT_SYMBOL(set_dma_config);
321
322 unsigned short
323 set_bfin_dma_config(char direction, char flow_mode,
324                     char intr_mode, char dma_mode, char width)
325 {
326         unsigned short config;
327
328         config =
329             ((direction << 1) | (width << 2) | (dma_mode << 4) |
330              (intr_mode << 6) | (flow_mode << 12) | RESTART);
331         return config;
332 }
333 EXPORT_SYMBOL(set_bfin_dma_config);
334
335 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
336 {
337         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
338                && channel < MAX_BLACKFIN_DMA_CHANNEL));
339
340         dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
341
342         dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
343
344         SSYNC();
345 }
346 EXPORT_SYMBOL(set_dma_sg);
347
348 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
349 {
350         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
351                && channel < MAX_BLACKFIN_DMA_CHANNEL));
352
353         dma_ch[channel].regs->curr_addr_ptr = addr;
354         SSYNC();
355 }
356 EXPORT_SYMBOL(set_dma_curr_addr);
357
358 /*------------------------------------------------------------------------------
359  *      Get the DMA status of a specific DMA channel from the system.
360  *-----------------------------------------------------------------------------*/
361 unsigned short get_dma_curr_irqstat(unsigned int channel)
362 {
363         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
364                && channel < MAX_BLACKFIN_DMA_CHANNEL));
365
366         return dma_ch[channel].regs->irq_status;
367 }
368 EXPORT_SYMBOL(get_dma_curr_irqstat);
369
370 /*------------------------------------------------------------------------------
371  *      Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
372  *-----------------------------------------------------------------------------*/
373 void clear_dma_irqstat(unsigned int channel)
374 {
375         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
376                && channel < MAX_BLACKFIN_DMA_CHANNEL));
377         dma_ch[channel].regs->irq_status |= 3;
378 }
379 EXPORT_SYMBOL(clear_dma_irqstat);
380
381 /*------------------------------------------------------------------------------
382  *      Get current DMA xcount of a specific DMA channel from the system.
383  *-----------------------------------------------------------------------------*/
384 unsigned short get_dma_curr_xcount(unsigned int channel)
385 {
386         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
387                && channel < MAX_BLACKFIN_DMA_CHANNEL));
388
389         return dma_ch[channel].regs->curr_x_count;
390 }
391 EXPORT_SYMBOL(get_dma_curr_xcount);
392
393 /*------------------------------------------------------------------------------
394  *      Get current DMA ycount of a specific DMA channel from the system.
395  *-----------------------------------------------------------------------------*/
396 unsigned short get_dma_curr_ycount(unsigned int channel)
397 {
398         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
399                && channel < MAX_BLACKFIN_DMA_CHANNEL));
400
401         return dma_ch[channel].regs->curr_y_count;
402 }
403 EXPORT_SYMBOL(get_dma_curr_ycount);
404
405 static void *__dma_memcpy(void *dest, const void *src, size_t size)
406 {
407         int direction;  /* 1 - address decrease, 0 - address increase */
408         int flag_align; /* 1 - address aligned,  0 - address unaligned */
409         int flag_2D;    /* 1 - 2D DMA needed,    0 - 1D DMA needed */
410         unsigned long flags;
411
412         if (size <= 0)
413                 return NULL;
414
415         local_irq_save(flags);
416
417         if ((unsigned long)src < memory_end)
418                 blackfin_dcache_flush_range((unsigned int)src,
419                                             (unsigned int)(src + size));
420
421         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
422
423         if ((unsigned long)src < (unsigned long)dest)
424                 direction = 1;
425         else
426                 direction = 0;
427
428         if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
429             && ((size % 2) == 0))
430                 flag_align = 1;
431         else
432                 flag_align = 0;
433
434         if (size > 0x10000)     /* size > 64K */
435                 flag_2D = 1;
436         else
437                 flag_2D = 0;
438
439         /* Setup destination and source start address */
440         if (direction) {
441                 if (flag_align) {
442                         bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
443                         bfin_write_MDMA_S0_START_ADDR(src + size - 2);
444                 } else {
445                         bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
446                         bfin_write_MDMA_S0_START_ADDR(src + size - 1);
447                 }
448         } else {
449                 bfin_write_MDMA_D0_START_ADDR(dest);
450                 bfin_write_MDMA_S0_START_ADDR(src);
451         }
452
453         /* Setup destination and source xcount */
454         if (flag_2D) {
455                 if (flag_align) {
456                         bfin_write_MDMA_D0_X_COUNT(1024 / 2);
457                         bfin_write_MDMA_S0_X_COUNT(1024 / 2);
458                 } else {
459                         bfin_write_MDMA_D0_X_COUNT(1024);
460                         bfin_write_MDMA_S0_X_COUNT(1024);
461                 }
462                 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
463                 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
464         } else {
465                 if (flag_align) {
466                         bfin_write_MDMA_D0_X_COUNT(size / 2);
467                         bfin_write_MDMA_S0_X_COUNT(size / 2);
468                 } else {
469                         bfin_write_MDMA_D0_X_COUNT(size);
470                         bfin_write_MDMA_S0_X_COUNT(size);
471                 }
472         }
473
474         /* Setup destination and source xmodify and ymodify */
475         if (direction) {
476                 if (flag_align) {
477                         bfin_write_MDMA_D0_X_MODIFY(-2);
478                         bfin_write_MDMA_S0_X_MODIFY(-2);
479                         if (flag_2D) {
480                                 bfin_write_MDMA_D0_Y_MODIFY(-2);
481                                 bfin_write_MDMA_S0_Y_MODIFY(-2);
482                         }
483                 } else {
484                         bfin_write_MDMA_D0_X_MODIFY(-1);
485                         bfin_write_MDMA_S0_X_MODIFY(-1);
486                         if (flag_2D) {
487                                 bfin_write_MDMA_D0_Y_MODIFY(-1);
488                                 bfin_write_MDMA_S0_Y_MODIFY(-1);
489                         }
490                 }
491         } else {
492                 if (flag_align) {
493                         bfin_write_MDMA_D0_X_MODIFY(2);
494                         bfin_write_MDMA_S0_X_MODIFY(2);
495                         if (flag_2D) {
496                                 bfin_write_MDMA_D0_Y_MODIFY(2);
497                                 bfin_write_MDMA_S0_Y_MODIFY(2);
498                         }
499                 } else {
500                         bfin_write_MDMA_D0_X_MODIFY(1);
501                         bfin_write_MDMA_S0_X_MODIFY(1);
502                         if (flag_2D) {
503                                 bfin_write_MDMA_D0_Y_MODIFY(1);
504                                 bfin_write_MDMA_S0_Y_MODIFY(1);
505                         }
506                 }
507         }
508
509         /* Enable source DMA */
510         if (flag_2D) {
511                 if (flag_align) {
512                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
513                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
514                 } else {
515                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
516                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
517                 }
518         } else {
519                 if (flag_align) {
520                         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
521                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
522                 } else {
523                         bfin_write_MDMA_S0_CONFIG(DMAEN);
524                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
525                 }
526         }
527
528         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
529                 ;
530
531         bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
532                                       (DMA_DONE | DMA_ERR));
533
534         bfin_write_MDMA_S0_CONFIG(0);
535         bfin_write_MDMA_D0_CONFIG(0);
536
537         if ((unsigned long)dest < memory_end)
538                 blackfin_dcache_invalidate_range((unsigned int)dest,
539                                                  (unsigned int)(dest + size));
540         local_irq_restore(flags);
541
542         return dest;
543 }
544
545 void *dma_memcpy(void *dest, const void *src, size_t size)
546 {
547         size_t bulk;
548         size_t rest;
549         void * addr;
550
551         bulk = (size >> 16) << 16;
552         rest = size - bulk;
553         if (bulk)
554                 __dma_memcpy(dest, src, bulk);
555         addr = __dma_memcpy(dest+bulk, src+bulk, rest);
556         return addr;
557 }
558 EXPORT_SYMBOL(dma_memcpy);
559
560 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
561 {
562         void *addr;
563         addr = dma_memcpy(dest, src, size);
564         return addr;
565 }
566 EXPORT_SYMBOL(safe_dma_memcpy);
567
568 void dma_outsb(void __iomem *addr, const void *buf, unsigned short len)
569 {
570         unsigned long flags;
571
572         local_irq_save(flags);
573
574         blackfin_dcache_flush_range((unsigned int)buf, (unsigned int)(buf) + len);
575
576         bfin_write_MDMA_D0_START_ADDR(addr);
577         bfin_write_MDMA_D0_X_COUNT(len);
578         bfin_write_MDMA_D0_X_MODIFY(0);
579         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
580
581         bfin_write_MDMA_S0_START_ADDR(buf);
582         bfin_write_MDMA_S0_X_COUNT(len);
583         bfin_write_MDMA_S0_X_MODIFY(1);
584         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
585
586         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
587         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
588
589         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
590
591         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
592
593         bfin_write_MDMA_S0_CONFIG(0);
594         bfin_write_MDMA_D0_CONFIG(0);
595         local_irq_restore(flags);
596
597 }
598 EXPORT_SYMBOL(dma_outsb);
599
600
601 void dma_insb(const void __iomem *addr, void *buf, unsigned short len)
602 {
603         unsigned long flags;
604
605         local_irq_save(flags);
606         bfin_write_MDMA_D0_START_ADDR(buf);
607         bfin_write_MDMA_D0_X_COUNT(len);
608         bfin_write_MDMA_D0_X_MODIFY(1);
609         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
610
611         bfin_write_MDMA_S0_START_ADDR(addr);
612         bfin_write_MDMA_S0_X_COUNT(len);
613         bfin_write_MDMA_S0_X_MODIFY(0);
614         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
615
616         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
617         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
618
619         blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
620
621         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
622
623         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
624
625         bfin_write_MDMA_S0_CONFIG(0);
626         bfin_write_MDMA_D0_CONFIG(0);
627         local_irq_restore(flags);
628
629 }
630 EXPORT_SYMBOL(dma_insb);
631
632 void dma_outsw(void __iomem *addr, const void  *buf, unsigned short len)
633 {
634         unsigned long flags;
635
636         local_irq_save(flags);
637
638         blackfin_dcache_flush_range((unsigned int)buf, (unsigned int)(buf) + len);
639
640         bfin_write_MDMA_D0_START_ADDR(addr);
641         bfin_write_MDMA_D0_X_COUNT(len);
642         bfin_write_MDMA_D0_X_MODIFY(0);
643         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
644
645         bfin_write_MDMA_S0_START_ADDR(buf);
646         bfin_write_MDMA_S0_X_COUNT(len);
647         bfin_write_MDMA_S0_X_MODIFY(2);
648         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
649
650         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
651         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
652
653         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
654
655         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
656
657         bfin_write_MDMA_S0_CONFIG(0);
658         bfin_write_MDMA_D0_CONFIG(0);
659         local_irq_restore(flags);
660
661 }
662 EXPORT_SYMBOL(dma_outsw);
663
664 void dma_insw(const void __iomem *addr, void *buf, unsigned short len)
665 {
666         unsigned long flags;
667
668         local_irq_save(flags);
669
670         bfin_write_MDMA_D0_START_ADDR(buf);
671         bfin_write_MDMA_D0_X_COUNT(len);
672         bfin_write_MDMA_D0_X_MODIFY(2);
673         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
674
675         bfin_write_MDMA_S0_START_ADDR(addr);
676         bfin_write_MDMA_S0_X_COUNT(len);
677         bfin_write_MDMA_S0_X_MODIFY(0);
678         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
679
680         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
681         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
682
683         blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
684
685         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
686
687         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
688
689         bfin_write_MDMA_S0_CONFIG(0);
690         bfin_write_MDMA_D0_CONFIG(0);
691         local_irq_restore(flags);
692
693 }
694 EXPORT_SYMBOL(dma_insw);
695
696 void dma_outsl(void __iomem *addr, const void *buf, unsigned short len)
697 {
698         unsigned long flags;
699
700         local_irq_save(flags);
701
702         blackfin_dcache_flush_range((unsigned int)buf, (unsigned int)(buf) + len);
703
704         bfin_write_MDMA_D0_START_ADDR(addr);
705         bfin_write_MDMA_D0_X_COUNT(len);
706         bfin_write_MDMA_D0_X_MODIFY(0);
707         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
708
709         bfin_write_MDMA_S0_START_ADDR(buf);
710         bfin_write_MDMA_S0_X_COUNT(len);
711         bfin_write_MDMA_S0_X_MODIFY(4);
712         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
713
714         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
715         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
716
717         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
718
719         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
720
721         bfin_write_MDMA_S0_CONFIG(0);
722         bfin_write_MDMA_D0_CONFIG(0);
723         local_irq_restore(flags);
724
725 }
726 EXPORT_SYMBOL(dma_outsl);
727
728 void dma_insl(const void __iomem *addr, void *buf, unsigned short len)
729 {
730         unsigned long flags;
731
732         local_irq_save(flags);
733
734         bfin_write_MDMA_D0_START_ADDR(buf);
735         bfin_write_MDMA_D0_X_COUNT(len);
736         bfin_write_MDMA_D0_X_MODIFY(4);
737         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
738
739         bfin_write_MDMA_S0_START_ADDR(addr);
740         bfin_write_MDMA_S0_X_COUNT(len);
741         bfin_write_MDMA_S0_X_MODIFY(0);
742         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
743
744         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
745         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
746
747         blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
748
749         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
750
751         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
752
753         bfin_write_MDMA_S0_CONFIG(0);
754         bfin_write_MDMA_D0_CONFIG(0);
755         local_irq_restore(flags);
756
757 }
758 EXPORT_SYMBOL(dma_insl);