2 * Author: Pete Popov <ppopov@mvista.com>
4 * 2000 (c) MontaVista, Software, Inc. This file is licensed under
5 * the terms of the GNU General Public License version 2. This program
6 * is licensed "as is" without any warranty of any kind, whether express
9 * Data structures specific to the IBM PowerPC 405 on-chip DMA controller
14 #ifndef __ASMPPC_405_DMA_H
15 #define __ASMPPC_405_DMA_H
17 #include <linux/types.h>
19 /* #define DEBUG_405DMA */
24 #define SGL_LIST_SIZE 4096
25 /* #define PCI_ALLOC_IS_NONCONSISTENT */
27 #define MAX_405GP_DMA_CHANNELS 4
29 /* The maximum address that we can perform a DMA transfer to on this platform */
30 /* Doesn't really apply... */
31 #define MAX_DMA_ADDRESS 0xFFFFFFFF
33 extern unsigned long ISA_DMA_THRESHOLD;
40 * Function return status codes
41 * These values are used to indicate whether or not the function
42 * call was successful, or a bad/invalid parameter was passed.
44 #define DMA_STATUS_GOOD 0
45 #define DMA_STATUS_BAD_CHANNEL 1
46 #define DMA_STATUS_BAD_HANDLE 2
47 #define DMA_STATUS_BAD_MODE 3
48 #define DMA_STATUS_NULL_POINTER 4
49 #define DMA_STATUS_OUT_OF_MEMORY 5
50 #define DMA_STATUS_SGL_LIST_EMPTY 6
51 #define DMA_STATUS_GENERAL_ERROR 7
55 * These indicate status as returned from the DMA Status Register.
57 #define DMA_STATUS_NO_ERROR 0
58 #define DMA_STATUS_CS 1 /* Count Status */
59 #define DMA_STATUS_TS 2 /* Transfer Status */
60 #define DMA_STATUS_DMA_ERROR 3 /* DMA Error Occurred */
61 #define DMA_STATUS_DMA_BUSY 4 /* The channel is busy */
66 * These modes are defined in a way that makes it possible to
67 * simply "or" in the value in the control register.
69 #define DMA_MODE_READ DMA_TD /* Peripheral to Memory */
70 #define DMA_MODE_WRITE 0 /* Memory to Peripheral */
71 #define DMA_MODE_MM (SET_DMA_TM(TM_S_MM)) /* memory to memory */
73 /* Device-paced memory to memory, */
74 /* device is at source address */
75 #define DMA_MODE_MM_DEVATSRC (DMA_TD | SET_DMA_TM(TM_D_MM))
77 /* Device-paced memory to memory, */
78 /* device is at destination address */
79 #define DMA_MODE_MM_DEVATDST (SET_DMA_TM(TM_D_MM))
83 * DMA Polarity Configuration Register
85 #define DMAReq0_ActiveLow (1<<31)
86 #define DMAAck0_ActiveLow (1<<30)
87 #define EOT0_ActiveLow (1<<29) /* End of Transfer */
89 #define DMAReq1_ActiveLow (1<<28)
90 #define DMAAck1_ActiveLow (1<<27)
91 #define EOT1_ActiveLow (1<<26)
93 #define DMAReq2_ActiveLow (1<<25)
94 #define DMAAck2_ActiveLow (1<<24)
95 #define EOT2_ActiveLow (1<<23)
97 #define DMAReq3_ActiveLow (1<<22)
98 #define DMAAck3_ActiveLow (1<<21)
99 #define EOT3_ActiveLow (1<<20)
102 * DMA Sleep Mode Register
104 #define SLEEP_MODE_ENABLE (1<<21)
108 * DMA Status Register
110 #define DMA_CS0 (1<<31) /* Terminal Count has been reached */
111 #define DMA_CS1 (1<<30)
112 #define DMA_CS2 (1<<29)
113 #define DMA_CS3 (1<<28)
115 #define DMA_TS0 (1<<27) /* End of Transfer has been requested */
116 #define DMA_TS1 (1<<26)
117 #define DMA_TS2 (1<<25)
118 #define DMA_TS3 (1<<24)
120 #define DMA_CH0_ERR (1<<23) /* DMA Chanel 0 Error */
121 #define DMA_CH1_ERR (1<<22)
122 #define DMA_CH2_ERR (1<<21)
123 #define DMA_CH3_ERR (1<<20)
125 #define DMA_IN_DMA_REQ0 (1<<19) /* Internal DMA Request is pending */
126 #define DMA_IN_DMA_REQ1 (1<<18)
127 #define DMA_IN_DMA_REQ2 (1<<17)
128 #define DMA_IN_DMA_REQ3 (1<<16)
130 #define DMA_EXT_DMA_REQ0 (1<<15) /* External DMA Request is pending */
131 #define DMA_EXT_DMA_REQ1 (1<<14)
132 #define DMA_EXT_DMA_REQ2 (1<<13)
133 #define DMA_EXT_DMA_REQ3 (1<<12)
135 #define DMA_CH0_BUSY (1<<11) /* DMA Channel 0 Busy */
136 #define DMA_CH1_BUSY (1<<10)
137 #define DMA_CH2_BUSY (1<<9)
138 #define DMA_CH3_BUSY (1<<8)
140 #define DMA_SG0 (1<<7) /* DMA Channel 0 Scatter/Gather in progress */
141 #define DMA_SG1 (1<<6)
142 #define DMA_SG2 (1<<5)
143 #define DMA_SG3 (1<<4)
148 * DMA Channel Control Registers
150 #define DMA_CH_ENABLE (1<<31) /* DMA Channel Enable */
151 #define SET_DMA_CH_ENABLE(x) (((x)&0x1)<<31)
152 #define GET_DMA_CH_ENABLE(x) (((x)&DMA_CH_ENABLE)>>31)
154 #define DMA_CIE_ENABLE (1<<30) /* DMA Channel Interrupt Enable */
155 #define SET_DMA_CIE_ENABLE(x) (((x)&0x1)<<30)
156 #define GET_DMA_CIE_ENABLE(x) (((x)&DMA_CIE_ENABLE)>>30)
158 #define DMA_TD (1<<29)
159 #define SET_DMA_TD(x) (((x)&0x1)<<29)
160 #define GET_DMA_TD(x) (((x)&DMA_TD)>>29)
162 #define DMA_PL (1<<28) /* Peripheral Location */
163 #define SET_DMA_PL(x) (((x)&0x1)<<28)
164 #define GET_DMA_PL(x) (((x)&DMA_PL)>>28)
166 #define EXTERNAL_PERIPHERAL 0
167 #define INTERNAL_PERIPHERAL 1
170 #define SET_DMA_PW(x) (((x)&0x3)<<26) /* Peripheral Width */
171 #define DMA_PW_MASK SET_DMA_PW(3)
176 #define GET_DMA_PW(x) (((x)&DMA_PW_MASK)>>26)
178 #define DMA_DAI (1<<25) /* Destination Address Increment */
179 #define SET_DMA_DAI(x) (((x)&0x1)<<25)
181 #define DMA_SAI (1<<24) /* Source Address Increment */
182 #define SET_DMA_SAI(x) (((x)&0x1)<<24)
184 #define DMA_BEN (1<<23) /* Buffer Enable */
185 #define SET_DMA_BEN(x) (((x)&0x1)<<23)
187 #define SET_DMA_TM(x) (((x)&0x3)<<21) /* Transfer Mode */
188 #define DMA_TM_MASK SET_DMA_TM(3)
189 #define TM_PERIPHERAL 0 /* Peripheral */
190 #define TM_RESERVED 1 /* Reserved */
191 #define TM_S_MM 2 /* Memory to Memory */
192 #define TM_D_MM 3 /* Device Paced Memory to Memory */
193 #define GET_DMA_TM(x) (((x)&DMA_TM_MASK)>>21)
195 #define SET_DMA_PSC(x) (((x)&0x3)<<19) /* Peripheral Setup Cycles */
196 #define DMA_PSC_MASK SET_DMA_PSC(3)
197 #define GET_DMA_PSC(x) (((x)&DMA_PSC_MASK)>>19)
199 #define SET_DMA_PWC(x) (((x)&0x3F)<<13) /* Peripheral Wait Cycles */
200 #define DMA_PWC_MASK SET_DMA_PWC(0x3F)
201 #define GET_DMA_PWC(x) (((x)&DMA_PWC_MASK)>>13)
203 #define SET_DMA_PHC(x) (((x)&0x7)<<10) /* Peripheral Hold Cycles */
204 #define DMA_PHC_MASK SET_DMA_PHC(0x7)
205 #define GET_DMA_PHC(x) (((x)&DMA_PHC_MASK)>>10)
207 #define DMA_ETD_OUTPUT (1<<9) /* EOT pin is a TC output */
208 #define SET_DMA_ETD(x) (((x)&0x1)<<9)
210 #define DMA_TCE_ENABLE (1<<8)
211 #define SET_DMA_TCE(x) (((x)&0x1)<<8)
213 #define SET_DMA_PRIORITY(x) (((x)&0x3)<<6) /* DMA Channel Priority */
214 #define DMA_PRIORITY_MASK SET_DMA_PRIORITY(3)
215 #define PRIORITY_LOW 0
216 #define PRIORITY_MID_LOW 1
217 #define PRIORITY_MID_HIGH 2
218 #define PRIORITY_HIGH 3
219 #define GET_DMA_PRIORITY(x) (((x)&DMA_PRIORITY_MASK)>>6)
221 #define SET_DMA_PREFETCH(x) (((x)&0x3)<<4) /* Memory Read Prefetch */
222 #define DMA_PREFETCH_MASK SET_DMA_PREFETCH(3)
223 #define PREFETCH_1 0 /* Prefetch 1 Double Word */
226 #define GET_DMA_PREFETCH(x) (((x)&DMA_PREFETCH_MASK)>>4)
228 #define DMA_PCE (1<<3) /* Parity Check Enable */
229 #define SET_DMA_PCE(x) (((x)&0x1)<<3)
230 #define GET_DMA_PCE(x) (((x)&DMA_PCE)>>3)
232 #define DMA_DEC (1<<2) /* Address Decrement */
233 #define SET_DMA_DEC(x) (((x)&0x1)<<2)
234 #define GET_DMA_DEC(x) (((x)&DMA_DEC)>>2)
237 * DMA SG Command Register
239 #define SSG0_ENABLE (1<<31) /* Start Scatter Gather */
240 #define SSG1_ENABLE (1<<30)
241 #define SSG2_ENABLE (1<<29)
242 #define SSG3_ENABLE (1<<28)
243 #define SSG0_MASK_ENABLE (1<<15) /* Enable writing to SSG0 bit */
244 #define SSG1_MASK_ENABLE (1<<14)
245 #define SSG2_MASK_ENABLE (1<<13)
246 #define SSG3_MASK_ENABLE (1<<12)
250 * DMA Scatter/Gather Descriptor Bit fields
252 #define SG_LINK (1<<31) /* Link */
253 #define SG_TCI_ENABLE (1<<29) /* Enable Terminal Count Interrupt */
254 #define SG_ETI_ENABLE (1<<28) /* Enable End of Transfer Interrupt */
255 #define SG_ERI_ENABLE (1<<27) /* Enable Error Interrupt */
256 #define SG_COUNT_MASK 0xFFFF /* Count Field */
261 typedef uint32_t sgl_handle_t;
266 * Valid polarity settings:
283 unsigned int polarity;
285 char buffer_enable; /* Boolean: buffer enable */
286 char tce_enable; /* Boolean: terminal count enable */
287 char etd_output; /* Boolean: eot pin is a tc output */
288 char pce; /* Boolean: parity check enable */
291 * Peripheral location:
292 * INTERNAL_PERIPHERAL (UART0 on the 405GP)
293 * EXTERNAL_PERIPHERAL
295 char pl; /* internal/external peripheral */
298 * Valid pwidth settings:
306 char dai; /* Boolean: dst address increment */
307 char sai; /* Boolean: src address increment */
310 * Valid psc settings: 0-3
312 unsigned int psc; /* Peripheral Setup Cycles */
315 * Valid pwc settings:
318 unsigned int pwc; /* Peripheral Wait Cycles */
321 * Valid phc settings:
324 unsigned int phc; /* Peripheral Hold Cycles */
327 * Valid cp (channel priority) settings:
333 unsigned int cp; /* channel priority */
336 * Valid pf (memory read prefetch) settings:
342 unsigned int pf; /* memory read prefetch */
345 * Boolean: channel interrupt enable
346 * NOTE: for sgl transfers, only the last descriptor will be setup to
351 char shift; /* easy access to byte_count shift, based on */
352 /* the width of the channel */
354 uint32_t control; /* channel control word */
357 /* These variabled are used ONLY in single dma transfers */
358 unsigned int mode; /* transfer mode */
368 uint32_t control_count;
376 uint32_t control; /* channel ctrl word; loaded from each descrptr */
377 uint32_t sgl_control; /* LK, TCI, ETI, and ERI bits in sgl descriptor */
378 dma_addr_t dma_addr; /* dma (physical) address of this list */
386 unsigned int *src_addr;
387 unsigned int *dst_addr;
388 dma_addr_t dma_src_addr;
389 dma_addr_t dma_dst_addr;
393 extern ppc_dma_ch_t dma_channels[];
397 * DMA API inline functions
398 * These functions are implemented here as inline functions for
399 * performance reasons.
403 static __inline__ int get_405gp_dma_status(void)
405 return (mfdcr(DCRN_DMASR));
409 static __inline__ int enable_405gp_dma(unsigned int dmanr)
411 unsigned int control;
412 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
415 if (dmanr >= MAX_405GP_DMA_CHANNELS) {
416 printk("enable_dma: bad channel: %d\n", dmanr);
417 return DMA_STATUS_BAD_CHANNEL;
424 if (p_dma_ch->mode == DMA_MODE_READ) {
425 /* peripheral to memory */
426 mtdcr(DCRN_DMASA0, NULL);
427 mtdcr(DCRN_DMADA0, p_dma_ch->addr);
429 else if (p_dma_ch->mode == DMA_MODE_WRITE) {
430 /* memory to peripheral */
431 mtdcr(DCRN_DMASA0, p_dma_ch->addr);
432 mtdcr(DCRN_DMADA0, NULL);
434 /* for other xfer modes, the addresses are already set */
435 control = mfdcr(DCRN_DMACR0);
436 control &= ~(DMA_TM_MASK | DMA_TD); /* clear all mode bits */
437 control |= (p_dma_ch->mode | DMA_CH_ENABLE);
438 mtdcr(DCRN_DMACR0, control);
441 if (p_dma_ch->mode == DMA_MODE_READ) {
442 mtdcr(DCRN_DMASA1, NULL);
443 mtdcr(DCRN_DMADA1, p_dma_ch->addr);
444 } else if (p_dma_ch->mode == DMA_MODE_WRITE) {
445 mtdcr(DCRN_DMASA1, p_dma_ch->addr);
446 mtdcr(DCRN_DMADA1, NULL);
448 control = mfdcr(DCRN_DMACR1);
449 control &= ~(DMA_TM_MASK | DMA_TD);
450 control |= (p_dma_ch->mode | DMA_CH_ENABLE);
451 mtdcr(DCRN_DMACR1, control);
454 if (p_dma_ch->mode == DMA_MODE_READ) {
455 mtdcr(DCRN_DMASA2, NULL);
456 mtdcr(DCRN_DMADA2, p_dma_ch->addr);
457 } else if (p_dma_ch->mode == DMA_MODE_WRITE) {
458 mtdcr(DCRN_DMASA2, p_dma_ch->addr);
459 mtdcr(DCRN_DMADA2, NULL);
461 control = mfdcr(DCRN_DMACR2);
462 control &= ~(DMA_TM_MASK | DMA_TD);
463 control |= (p_dma_ch->mode | DMA_CH_ENABLE);
464 mtdcr(DCRN_DMACR2, control);
467 if (p_dma_ch->mode == DMA_MODE_READ) {
468 mtdcr(DCRN_DMASA3, NULL);
469 mtdcr(DCRN_DMADA3, p_dma_ch->addr);
470 } else if (p_dma_ch->mode == DMA_MODE_WRITE) {
471 mtdcr(DCRN_DMASA3, p_dma_ch->addr);
472 mtdcr(DCRN_DMADA3, NULL);
474 control = mfdcr(DCRN_DMACR3);
475 control &= ~(DMA_TM_MASK | DMA_TD);
476 control |= (p_dma_ch->mode | DMA_CH_ENABLE);
477 mtdcr(DCRN_DMACR3, control);
480 return DMA_STATUS_BAD_CHANNEL;
482 return DMA_STATUS_GOOD;
487 static __inline__ void disable_405gp_dma(unsigned int dmanr)
489 unsigned int control;
493 control = mfdcr(DCRN_DMACR0);
494 control &= ~DMA_CH_ENABLE;
495 mtdcr(DCRN_DMACR0, control);
498 control = mfdcr(DCRN_DMACR1);
499 control &= ~DMA_CH_ENABLE;
500 mtdcr(DCRN_DMACR1, control);
503 control = mfdcr(DCRN_DMACR2);
504 control &= ~DMA_CH_ENABLE;
505 mtdcr(DCRN_DMACR2, control);
508 control = mfdcr(DCRN_DMACR3);
509 control &= ~DMA_CH_ENABLE;
510 mtdcr(DCRN_DMACR3, control);
514 printk("disable_dma: bad channel: %d\n", dmanr);
522 * Sets the dma mode for single DMA transfers only.
523 * For scatter/gather transfers, the mode is passed to the
524 * alloc_dma_handle() function as one of the parameters.
526 * The mode is simply saved and used later. This allows
527 * the driver to call set_dma_mode() and set_dma_addr() in
530 * Valid mode values are:
532 * DMA_MODE_READ peripheral to memory
533 * DMA_MODE_WRITE memory to peripheral
534 * DMA_MODE_MM memory to memory
535 * DMA_MODE_MM_DEVATSRC device-paced memory to memory, device at src
536 * DMA_MODE_MM_DEVATDST device-paced memory to memory, device at dst
538 static __inline__ int set_405gp_dma_mode(unsigned int dmanr, unsigned int mode)
540 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
547 case DMA_MODE_MM_DEVATSRC:
548 case DMA_MODE_MM_DEVATDST:
551 printk("set_dma_mode: bad mode 0x%x\n", mode);
552 return DMA_STATUS_BAD_MODE;
554 if (dmanr >= MAX_405GP_DMA_CHANNELS) {
555 printk("set_dma_mode: bad channel 0x%x\n", dmanr);
556 return DMA_STATUS_BAD_CHANNEL;
560 p_dma_ch->mode = mode;
561 return DMA_STATUS_GOOD;
567 * Sets the DMA Count register. Note that 'count' is in bytes.
568 * However, the DMA Count register counts the number of "transfers",
569 * where each transfer is equal to the bus width. Thus, count
570 * MUST be a multiple of the bus width.
572 static __inline__ void
573 set_405gp_dma_count(unsigned int dmanr, unsigned int count)
575 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
580 switch(p_dma_ch->pwidth) {
596 printk("set_dma_count: invalid bus width: 0x%x\n",
601 printk("Warning: set_dma_count count 0x%x bus width %d\n",
602 count, p_dma_ch->pwidth);
606 count = count >> p_dma_ch->shift;
609 mtdcr(DCRN_DMACT0, count);
612 mtdcr(DCRN_DMACT1, count);
615 mtdcr(DCRN_DMACT2, count);
618 mtdcr(DCRN_DMACT3, count);
622 printk("set_dma_count: bad channel: %d\n", dmanr);
630 * Returns the number of bytes left to be transfered.
631 * After a DMA transfer, this should return zero.
632 * Reading this while a DMA transfer is still in progress will return
633 * unpredictable results.
635 static __inline__ int get_405gp_dma_residue(unsigned int dmanr)
638 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
642 count = mfdcr(DCRN_DMACT0);
645 count = mfdcr(DCRN_DMACT1);
648 count = mfdcr(DCRN_DMACT2);
651 count = mfdcr(DCRN_DMACT3);
655 printk("get_dma_residue: bad channel: %d\n", dmanr);
660 return (count << p_dma_ch->shift);
666 * Sets the DMA address for a memory to peripheral or peripheral
667 * to memory transfer. The address is just saved in the channel
668 * structure for now and used later in enable_dma().
670 static __inline__ void set_405gp_dma_addr(unsigned int dmanr, dma_addr_t addr)
672 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
676 switch(p_dma_ch->pwidth) {
680 if ((unsigned)addr & 0x1)
684 if ((unsigned)addr & 0x3)
688 if ((unsigned)addr & 0x7)
692 printk("set_dma_addr: invalid bus width: 0x%x\n",
697 printk("Warning: set_dma_addr addr 0x%x bus width %d\n",
698 addr, p_dma_ch->pwidth);
702 /* save dma address and program it later after we know the xfer mode */
703 p_dma_ch->addr = addr;
710 * Sets both DMA addresses for a memory to memory transfer.
711 * For memory to peripheral or peripheral to memory transfers
712 * the function set_dma_addr() should be used instead.
714 static __inline__ void
715 set_405gp_dma_addr2(unsigned int dmanr, dma_addr_t src_dma_addr,
716 dma_addr_t dst_dma_addr)
720 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
722 switch(p_dma_ch->pwidth) {
726 if (((unsigned)src_dma_addr & 0x1) ||
727 ((unsigned)dst_dma_addr & 0x1)
732 if (((unsigned)src_dma_addr & 0x3) ||
733 ((unsigned)dst_dma_addr & 0x3)
738 if (((unsigned)src_dma_addr & 0x7) ||
739 ((unsigned)dst_dma_addr & 0x7)
744 printk("set_dma_addr2: invalid bus width: 0x%x\n",
749 printk("Warning: set_dma_addr2 src 0x%x dst 0x%x bus width %d\n",
750 src_dma_addr, dst_dma_addr, p_dma_ch->pwidth);
756 mtdcr(DCRN_DMASA0, src_dma_addr);
757 mtdcr(DCRN_DMADA0, dst_dma_addr);
760 mtdcr(DCRN_DMASA1, src_dma_addr);
761 mtdcr(DCRN_DMADA1, dst_dma_addr);
764 mtdcr(DCRN_DMASA2, src_dma_addr);
765 mtdcr(DCRN_DMADA2, dst_dma_addr);
768 mtdcr(DCRN_DMASA3, src_dma_addr);
769 mtdcr(DCRN_DMADA3, dst_dma_addr);
773 printk("set_dma_addr2: bad channel: %d\n", dmanr);
781 * Enables the channel interrupt.
783 * If performing a scatter/gatter transfer, this function
784 * MUST be called before calling alloc_dma_handle() and building
785 * the sgl list. Otherwise, interrupts will not be enabled, if
786 * they were previously disabled.
788 static __inline__ int
789 enable_405gp_dma_interrupt(unsigned int dmanr)
791 unsigned int control;
792 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
794 p_dma_ch->int_enable = TRUE;
797 control = mfdcr(DCRN_DMACR0);
798 control|= DMA_CIE_ENABLE; /* Channel Interrupt Enable */
799 mtdcr(DCRN_DMACR0, control);
802 control = mfdcr(DCRN_DMACR1);
803 control|= DMA_CIE_ENABLE;
804 mtdcr(DCRN_DMACR1, control);
807 control = mfdcr(DCRN_DMACR2);
808 control|= DMA_CIE_ENABLE;
809 mtdcr(DCRN_DMACR2, control);
812 control = mfdcr(DCRN_DMACR3);
813 control|= DMA_CIE_ENABLE;
814 mtdcr(DCRN_DMACR3, control);
818 printk("enable_dma_interrupt: bad channel: %d\n", dmanr);
820 return DMA_STATUS_BAD_CHANNEL;
822 return DMA_STATUS_GOOD;
828 * Disables the channel interrupt.
830 * If performing a scatter/gatter transfer, this function
831 * MUST be called before calling alloc_dma_handle() and building
832 * the sgl list. Otherwise, interrupts will not be disabled, if
833 * they were previously enabled.
835 static __inline__ int
836 disable_405gp_dma_interrupt(unsigned int dmanr)
838 unsigned int control;
839 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
841 p_dma_ch->int_enable = TRUE;
844 control = mfdcr(DCRN_DMACR0);
845 control &= ~DMA_CIE_ENABLE; /* Channel Interrupt Enable */
846 mtdcr(DCRN_DMACR0, control);
849 control = mfdcr(DCRN_DMACR1);
850 control &= ~DMA_CIE_ENABLE;
851 mtdcr(DCRN_DMACR1, control);
854 control = mfdcr(DCRN_DMACR2);
855 control &= ~DMA_CIE_ENABLE;
856 mtdcr(DCRN_DMACR2, control);
859 control = mfdcr(DCRN_DMACR3);
860 control &= ~DMA_CIE_ENABLE;
861 mtdcr(DCRN_DMACR3, control);
865 printk("enable_dma_interrupt: bad channel: %d\n", dmanr);
867 return DMA_STATUS_BAD_CHANNEL;
869 return DMA_STATUS_GOOD;
873 #ifdef DCRNCAP_DMA_SG
876 * Add a new sgl descriptor to the end of a scatter/gather list
877 * which was created by alloc_dma_handle().
879 * For a memory to memory transfer, both dma addresses must be
880 * valid. For a peripheral to memory transfer, one of the addresses
881 * must be set to NULL, depending on the direction of the transfer:
882 * memory to peripheral: set dst_addr to NULL,
883 * peripheral to memory: set src_addr to NULL.
885 static __inline__ int
886 add_405gp_dma_sgl(sgl_handle_t handle, dma_addr_t src_addr, dma_addr_t dst_addr,
889 sgl_list_info_t *psgl = (sgl_list_info_t *)handle;
890 ppc_dma_ch_t *p_dma_ch;
894 printk("add_dma_sgl: null handle\n");
896 return DMA_STATUS_BAD_HANDLE;
900 if (psgl->dmanr >= MAX_405GP_DMA_CHANNELS) {
901 printk("add_dma_sgl error: psgl->dmanr == %d\n", psgl->dmanr);
902 return DMA_STATUS_BAD_CHANNEL;
906 p_dma_ch = &dma_channels[psgl->dmanr];
911 unsigned int aligned = (unsigned)src_addr | (unsigned)dst_addr | count;
912 switch(p_dma_ch->pwidth) {
928 printk("add_dma_sgl: invalid bus width: 0x%x\n",
930 return DMA_STATUS_GENERAL_ERROR;
933 printk("Alignment warning: add_dma_sgl src 0x%x dst 0x%x count 0x%x bus width var %d\n",
934 src_addr, dst_addr, count, p_dma_ch->pwidth);
939 if ((unsigned)(psgl->ptail + 1) >= ((unsigned)psgl + SGL_LIST_SIZE)) {
941 printk("sgl handle out of memory \n");
943 return DMA_STATUS_OUT_OF_MEMORY;
948 psgl->phead = (ppc_sgl_t *)
949 ((unsigned)psgl + sizeof(sgl_list_info_t));
950 psgl->ptail = psgl->phead;
952 psgl->ptail->next = virt_to_bus(psgl->ptail + 1);
956 psgl->ptail->control = psgl->control;
957 psgl->ptail->src_addr = src_addr;
958 psgl->ptail->dst_addr = dst_addr;
959 psgl->ptail->control_count = (count >> p_dma_ch->shift) |
961 psgl->ptail->next = (uint32_t)NULL;
963 return DMA_STATUS_GOOD;
969 * Enable (start) the DMA described by the sgl handle.
971 static __inline__ void enable_405gp_dma_sgl(sgl_handle_t handle)
973 sgl_list_info_t *psgl = (sgl_list_info_t *)handle;
974 ppc_dma_ch_t *p_dma_ch;
979 printk("enable_dma_sgl: null handle\n");
981 } else if (psgl->dmanr > (MAX_405GP_DMA_CHANNELS - 1)) {
982 printk("enable_dma_sgl: bad channel in handle %d\n",
985 } else if (!psgl->phead) {
986 printk("enable_dma_sgl: sg list empty\n");
991 p_dma_ch = &dma_channels[psgl->dmanr];
992 psgl->ptail->control_count &= ~SG_LINK; /* make this the last dscrptr */
993 sg_command = mfdcr(DCRN_ASGC);
995 switch(psgl->dmanr) {
997 mtdcr(DCRN_ASG0, virt_to_bus(psgl->phead));
998 sg_command |= SSG0_ENABLE;
1001 mtdcr(DCRN_ASG1, virt_to_bus(psgl->phead));
1002 sg_command |= SSG1_ENABLE;
1005 mtdcr(DCRN_ASG2, virt_to_bus(psgl->phead));
1006 sg_command |= SSG2_ENABLE;
1009 mtdcr(DCRN_ASG3, virt_to_bus(psgl->phead));
1010 sg_command |= SSG3_ENABLE;
1014 printk("enable_dma_sgl: bad channel: %d\n", psgl->dmanr);
1019 printk("\n\nenable_dma_sgl at dma_addr 0x%x\n",
1020 virt_to_bus(psgl->phead));
1022 ppc_sgl_t *pnext, *sgl_addr;
1024 pnext = psgl->phead;
1026 printk("dma descriptor at 0x%x, dma addr 0x%x\n",
1027 (unsigned)pnext, (unsigned)virt_to_bus(pnext));
1028 printk("control 0x%x src 0x%x dst 0x%x c_count 0x%x, next 0x%x\n",
1029 (unsigned)pnext->control, (unsigned)pnext->src_addr,
1030 (unsigned)pnext->dst_addr,
1031 (unsigned)pnext->control_count, (unsigned)pnext->next);
1033 (unsigned)pnext = bus_to_virt(pnext->next);
1035 printk("sg_command 0x%x\n", sg_command);
1039 #ifdef PCI_ALLOC_IS_NONCONSISTENT
1041 * This is temporary only, until pci_alloc_consistent() really does
1042 * return "consistent" memory.
1044 flush_dcache_range((unsigned)handle, (unsigned)handle + SGL_LIST_SIZE);
1047 mtdcr(DCRN_ASGC, sg_command); /* start transfer */
1053 * Halt an active scatter/gather DMA operation.
1055 static __inline__ void disable_405gp_dma_sgl(sgl_handle_t handle)
1057 sgl_list_info_t *psgl = (sgl_list_info_t *)handle;
1058 uint32_t sg_command;
1062 printk("enable_dma_sgl: null handle\n");
1064 } else if (psgl->dmanr > (MAX_405GP_DMA_CHANNELS - 1)) {
1065 printk("enable_dma_sgl: bad channel in handle %d\n",
1070 sg_command = mfdcr(DCRN_ASGC);
1071 switch(psgl->dmanr) {
1073 sg_command &= ~SSG0_ENABLE;
1076 sg_command &= ~SSG1_ENABLE;
1079 sg_command &= ~SSG2_ENABLE;
1082 sg_command &= ~SSG3_ENABLE;
1086 printk("enable_dma_sgl: bad channel: %d\n", psgl->dmanr);
1090 mtdcr(DCRN_ASGC, sg_command); /* stop transfer */
1096 * Returns number of bytes left to be transferred from the entire sgl list.
1097 * *src_addr and *dst_addr get set to the source/destination address of
1098 * the sgl descriptor where the DMA stopped.
1100 * An sgl transfer must NOT be active when this function is called.
1102 static __inline__ int
1103 get_405gp_dma_sgl_residue(sgl_handle_t handle, dma_addr_t *src_addr,
1104 dma_addr_t *dst_addr)
1106 sgl_list_info_t *psgl = (sgl_list_info_t *)handle;
1107 ppc_dma_ch_t *p_dma_ch;
1108 ppc_sgl_t *pnext, *sgl_addr;
1109 uint32_t count_left;
1113 printk("get_dma_sgl_residue: null handle\n");
1114 return DMA_STATUS_BAD_HANDLE;
1115 } else if (psgl->dmanr > (MAX_405GP_DMA_CHANNELS - 1)) {
1116 printk("get_dma_sgl_residue: bad channel in handle %d\n",
1118 return DMA_STATUS_BAD_CHANNEL;
1122 switch(psgl->dmanr) {
1124 sgl_addr = (ppc_sgl_t *)bus_to_virt(mfdcr(DCRN_ASG0));
1125 count_left = mfdcr(DCRN_DMACT0);
1128 sgl_addr = (ppc_sgl_t *)bus_to_virt(mfdcr(DCRN_ASG1));
1129 count_left = mfdcr(DCRN_DMACT1);
1132 sgl_addr = (ppc_sgl_t *)bus_to_virt(mfdcr(DCRN_ASG2));
1133 count_left = mfdcr(DCRN_DMACT2);
1136 sgl_addr = (ppc_sgl_t *)bus_to_virt(mfdcr(DCRN_ASG3));
1137 count_left = mfdcr(DCRN_DMACT3);
1141 printk("get_dma_sgl_residue: bad channel: %d\n", psgl->dmanr);
1148 printk("get_dma_sgl_residue: sgl addr register is null\n");
1153 pnext = psgl->phead;
1155 ((unsigned)pnext < ((unsigned)psgl + SGL_LIST_SIZE) &&
1156 (pnext != sgl_addr))
1161 if (pnext == sgl_addr) { /* found the sgl descriptor */
1163 *src_addr = pnext->src_addr;
1164 *dst_addr = pnext->dst_addr;
1167 * Now search the remaining descriptors and add their count.
1168 * We already have the remaining count from this descriptor in
1173 while ((pnext != psgl->ptail) &&
1174 ((unsigned)pnext < ((unsigned)psgl + SGL_LIST_SIZE))
1176 count_left += pnext->control_count & SG_COUNT_MASK;
1179 if (pnext != psgl->ptail) { /* should never happen */
1181 printk("get_dma_sgl_residue error (1) psgl->ptail 0x%x handle 0x%x\n",
1182 (unsigned int)psgl->ptail,
1183 (unsigned int)handle);
1189 p_dma_ch = &dma_channels[psgl->dmanr];
1190 return (count_left << p_dma_ch->shift); /* count in bytes */
1193 /* this shouldn't happen */
1195 printk("get_dma_sgl_residue, unable to match current address 0x%x, handle 0x%x\n",
1196 (unsigned int)sgl_addr, (unsigned int)handle);
1203 *src_addr = (dma_addr_t)NULL;
1204 *dst_addr = (dma_addr_t)NULL;
1212 * Returns the address(es) of the buffer(s) contained in the head element of
1213 * the scatter/gather list. The element is removed from the scatter/gather
1214 * list and the next element becomes the head.
1216 * This function should only be called when the DMA is not active.
1218 static __inline__ int
1219 delete_405gp_dma_sgl_element(sgl_handle_t handle, dma_addr_t *src_dma_addr,
1220 dma_addr_t *dst_dma_addr)
1222 sgl_list_info_t *psgl = (sgl_list_info_t *)handle;
1226 printk("delete_sgl_element: null handle\n");
1227 return DMA_STATUS_BAD_HANDLE;
1228 } else if (psgl->dmanr > (MAX_405GP_DMA_CHANNELS - 1)) {
1229 printk("delete_sgl_element: bad channel in handle %d\n",
1231 return DMA_STATUS_BAD_CHANNEL;
1237 printk("delete_sgl_element: sgl list empty\n");
1239 *src_dma_addr = (dma_addr_t)NULL;
1240 *dst_dma_addr = (dma_addr_t)NULL;
1241 return DMA_STATUS_SGL_LIST_EMPTY;
1244 *src_dma_addr = (dma_addr_t)psgl->phead->src_addr;
1245 *dst_dma_addr = (dma_addr_t)psgl->phead->dst_addr;
1247 if (psgl->phead == psgl->ptail) {
1248 /* last descriptor on the list */
1255 return DMA_STATUS_GOOD;
1258 #endif /* DCRNCAP_DMA_SG */
1261 * The rest of the DMA API, in ppc405_dma.c
1263 extern int hw_init_dma_channel(unsigned int, ppc_dma_ch_t *);
1264 extern int get_channel_config(unsigned int, ppc_dma_ch_t *);
1265 extern int set_channel_priority(unsigned int, unsigned int);
1266 extern unsigned int get_peripheral_width(unsigned int);
1267 extern int alloc_dma_handle(sgl_handle_t *, unsigned int, unsigned int);
1268 extern void free_dma_handle(sgl_handle_t);
1271 #endif /* __KERNEL__ */