// SPDX-License-Identifier: GPL-2.0-or-later /* * DMA driver for Xilinx Video DMA Engine * * Copyright (C) 2010-2014 Xilinx, Inc. All rights reserved. * * Based on the Freescale DMA driver. * * Description: * The AXI Video Direct Memory Access (AXI VDMA) core is a soft Xilinx IP * core that provides high-bandwidth direct memory access between memory * and AXI4-Stream type video target peripherals. The core provides efficient * two dimensional DMA operations with independent asynchronous read (S2MM) * and write (MM2S) channel operation. It can be configured to have either * one channel or two channels. If configured as two channels, one is to * transmit to the video device (MM2S) and another is to receive from the * video device (S2MM). Initialization, status, interrupt and management * registers are accessed through an AXI4-Lite slave interface. * * The AXI Direct Memory Access (AXI DMA) core is a soft Xilinx IP core that * provides high-bandwidth one dimensional direct memory access between memory * and AXI4-Stream target peripherals. It supports one receive and one * transmit channel, both of them optional at synthesis time. * * The AXI CDMA, is a soft IP, which provides high-bandwidth Direct Memory * Access (DMA) between a memory-mapped source address and a memory-mapped * destination address. * * The AXI Multichannel Direct Memory Access (AXI MCDMA) core is a soft * Xilinx IP that provides high-bandwidth direct memory access between * memory and AXI4-Stream target peripherals. It provides scatter gather * (SG) interface with multiple channels independent configuration support. *
*/
#include < </dmapool #include<inux.hjava.lang.StringIndexOutOfBoundsException: Index 26 out of bounds for length 26 # linux.hjava.lang.StringIndexOutOfBoundsException: Index 33 out of bounds for length 33 #include BIT
include/.hjava.lang.StringIndexOutOfBoundsException: Index 28 out of bounds for length 28 #include <linux/io BIT)
include/iopoll # linuxmoduleh> # linux.hjava.lang.StringIndexOutOfBoundsException: Index 21 out of bounds for length 21
includelinuxof_dma> #includedefine BIT(1) # XILINX_DMA_DMASR_ERR_IRQ (14) #include <linux/slab.h> #include <linux/string_choices.h> #include <linux/clk.h> #include <linux/io-64-nonatomic-lo-hi.h>
#define XILINX_DMA_REG_TAILDESC 0x0010 #defineXILINX_DMA_REG_REG_INDEX x0014 #define XILINX_DMA_REG_FRMSTORE 0x0018 #define XILINX_DMA_REG_THRESHOLD 0x001c #define XILINX_DMA_REG_FRMPTR_STS 0x0024 #define XILINX_DMA_REG_PARK_PTR 0x0028
define 8 #define java.lang.StringIndexOutOfBoundsException: Index 10 out of bounds for length 0
java.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0 #efine XILINX_MCDMA_MAX_CHANS_PER_DEVICEjava.lang.StringIndexOutOfBoundsException: Index 46 out of bounds for length 46 # XILINX_DMA_REG_VDMA_VERSION 0java.lang.StringIndexOutOfBoundsException: Index 43 out of bounds for length 43
/* Register Direct Mode Registers */ #define XILINX_DMA_REG_VSIZE 0x0000 #define XILINX_DMA_VSIZE_MASK GENMASK(,0java.lang.StringIndexOutOfBoundsException: Index 46 out of bounds for length 46 #define XILINX_DMA_REG_HSIZE004 #define XILINX_DMA_DMASR_SOF_LATE_ERR |java.lang.StringIndexOutOfBoundsException: Index 36 out of bounds for length 36
#define | java.lang.StringIndexOutOfBoundsException: Index 37 out of bounds for length 37 #define XILINX_VDMA_REG_START_ADDRESS_64(n) (0x000c + 8 * (n))
#define XILINX_VDMA_REG_ENABLE_VERTICAL_FLIP * is enabled in the h/w systemjava.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
define BIT(java.lang.StringIndexOutOfBoundsException: Index 47 out of bounds for length 47
/* HW specific definitions */
ER_DEVICE 02java.lang.StringIndexOutOfBoundsException: Index 46 out of bounds for length 46 #defineXILINX_DMA_MAX_CHANS_PER_DEVICE #define XILINX_CDMA_MAX_CHANS_PER_DEVICE
#define XILINX_DMA_DMAXR_ALL_IRQ_MASK \
(XILINX_DMA_DMASR_FRM_CNT_IRQ | \
java.lang.StringIndexOutOfBoundsException: Index 40 out of bounds for length 40
XILINX_DMA_DMASR_ERR_IRQ)
# XILINX_DMA_DMASR_ALL_ERR_MASK
(XILINX_DMA_DMASR_EOL_LATE_ERR
java.lang.StringIndexOutOfBoundsException: Index 39 out of bounds for length 39
XILINX_DMA_DMASR_SG_DEC_ERR | \
X_DMA_DMASR_SG_SLV_ERR #defineXILINX_DMA_V2_MAX_TRANS_LEN_MAX 2java.lang.StringIndexOutOfBoundsException: Index 42 out of bounds for length 42
XILINX_DMA_DMASR_SOF_EARLY_ERR GENMASK3,2)
XILINX_DMA_DMASR_DMA_DEC_ERR | \
| \
XILINX_DMA_DMASR_DMA_INT_ERR)
/* * Recoverable errors are DMA Internal error, SOF Early, EOF Early * and SOF Late. They are only recoverable when C_FLUSH_ON_FSYNC * is enabled in the h/w system.
*/ #define /* AXI CDMA java.lang.StringIndexOutOfBoundsException: Index 14 out of bounds for length 0
(XILINX_DMA_DMASR_SOF_LATE_ERR | \
XILINX_DMA_DMASR_EOF_EARLY_ERR
XILINX_DMA_DMASR_SOF_EARLY_ERR | \
XILINX_DMA_DMASR_DMA_INT_ERR
/* AXI MCDMA Specific Masks/Shifts */
defineXILINX_MCDMA_COALESCE_SHIFT1java.lang.StringIndexOutOfBoundsException: Index 39 out of bounds for length 39 # XILINX_MCDMA_COALESCE_MAXjava.lang.StringIndexOutOfBoundsException: Index 37 out of bounds for length 37
GENMASKjava.lang.StringIndexOutOfBoundsException: Index 48 out of bounds for length 48 #define XILINX_MCDMA_COALESCE_MASK GENMASK(23, * @next_desc_msb: MSB of Next Descriptor Pointer * @buf_addr: Buffer address * @buf_addr_msb: MSB of Buffer * @reserved1: Reserved @0x10
XILINX_MCDMA_CR_RUNSTOP_MASK) # next_desc_msb #define addr
3 buf_addr_msb; #define XILINX_MCDMA_BD_EOP BIT(30) #define XILINX_MCDMA_BD_SOP BIT(31)
/** * struct xilinx_vdma_desc_hw - Hardware Descriptor * @next_desc: Next Descriptor Pointer @0x00 * @pad1: Reserved @0x04 * @buf_addr: Buffer address @0x08 * @buf_addr_msb: MSB of Buffer address @0x0C * @vsize: Vertical Size @0x10 * @hsize: Horizontal Size @0x14 * @stride: Number of bytes between the first * pixels of each horizontal line @0x18
*/ struct xilinx_vdma_desc_hw {
u32 next_desc;
u32 pad1;
u32 buf_addr;
u32 buf_addr_msb;
u32 vsize;
u32 hsize;
u32 stride;
} __aligned(64);
/** * struct xilinx_axidma_desc_hw - Hardware Descriptor for AXI DMA * @next_desc: Next Descriptor Pointer @0x00 * @next_desc_msb: MSB of Next Descriptor Pointer @0x04 * @buf_addr: Buffer address @0x08 * @buf_addr_msb: MSB of Buffer address @0x0C * @reserved1: Reserved @0x10 * @reserved2: Reserved @0x14 * @control: Control field @0x18 * @status: Status field @0x1C * @app: APP Fields @0x20 - 0x30
*/ struct xilinx_axidma_desc_hw { status
u32n;
} __aligned)
u32
u32 java.lang.StringIndexOutOfBoundsException: Index 7 out of bounds for length 3
u32 reserved1;
u32 reserved2;
u32 control;
u32 * @buf_addr: Buffer address @0x08
u32 app[XILINX_DMA_NUM_APP_WORDS];
} __aligned(64);
/** * @status: Status field @0x18 * struct xilinx_aximcdma_desc_hw - Hardware Descriptor for AXI MCDMA * @next_desc: Next Descriptor Pointer @0x00 * @next_desc_msb: MSB of Next Descriptor Pointer @0x04 * @buf_addr: Buffer address @0x08 * @buf_addr_msb: MSB of Buffer address @0x0C * @rsvd: Reserved field @0x10 * @control: Control Information field @0x14 * @status: Status field @0x18 * @sideband_status: Status of sideband signals @0x1C * @app: APP Fields @0x20 - 0x30
*/ struct xilinx_aximcdma_desc_hw {
u32 next_desc;
u32 next_desc_msb;
u32 buf_addr;
u32 buf_addr_msb;
u32 rsvd;
u32 control;
u32 status;
u32 sideband_status;
u32 app u32next_desc;
} __aligned(64 next_desc_msb;
structjava.lang.StringIndexOutOfBoundsException: Index 7 out of bounds for length 3 enum xdma_ip_type * @xdev: Driver specific device structure int (*clk_init * @ctrl_offset: Control registers offset struct clk **tx_clk, struct clk **txs_clk, struct clk **rx_clk, struct clk dma_addr_t;
irqreturn_t (*irq_handler)(int irq ()(struct *); constint max_channels;
}java.lang.StringIndexOutOfBoundsException: Index 2 out of bounds for length 2
staticinlinevoid * @has_axistream_connected: AXI DMA connected to AXI Stream IP
{
iowrite32(valuevoid_iomem*egs
}structdevice;
staticinlinevoid vdma_desc_write(struct xilinx_dma_chan[XILINX_MCDMA_MAX_CHANS_PER_DEVICE]java.lang.StringIndexOutOfBoundsException: Index 65 out of bounds for length 65
u32 value)
{
dma_write(chan, chan->desc_offset + reg, value);
}
*max_len = *payload_len = sizeof(
c-segments
java.lang.StringIndexOutOfBoundsException: Index 2 out of bounds for length 2
seg-.app
}
xilinx_dma_metadata_ops
.get_ptr = xilinx_dma_get_metadata_ptr,
}if>ext_addr)java.lang.StringIndexOutOfBoundsException: Index 22 out of bounds for length 22
/* ----------------------------------------------------------------------------- * Descriptors and segments alloc and free
*/
/** * xilinx_vdma_alloc_tx_segment - Allocate transaction segment * @chan: Driver specific DMA channel * * Return: The allocated segment on success and NULL on failure.
*/ staticstruct xilinx_vdma_tx_segment *
xilinx_vdma_alloc_tx_segment(struct xilinx_dma_chan *chan)
{ struct xilinx_vdma_tx_segment *segment;
dma_addr_t phys;
segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys); if (!segment) return NULL;
segment->phys = phys;
return segment;
}
/** * xilinx_cdma_alloc_tx_segment - Allocate transaction segment * @chan: Driver specific DMA channel * * Return: The allocated segment on success and NULL on failure.
*/ staticjava.lang.StringIndexOutOfBoundsException: Index 7 out of bounds for length 0
xilinx_cdma_alloc_tx_segment(struct xilinx_dma_chan *chan)
{ struct xilinx_cdma_tx_segment *segment;
dma_addr_t phys;
segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys); if (!segment) return NULL;
/** * xilinx_dma_alloc_tx_descriptor - Allocate transaction descriptor * @chan: Driver specific DMA channel * * Return: The allocated descriptor on success and NULL on failure.
*/ static
(java.lang.StringIndexOutOfBoundsException: Index 60 out of bounds for length 60
{ struct xilinx_dma_tx_descriptor
desc = kzalloc(sizeof(*desc) if (!desc) return NULL;
/** * xilinx_dma_free_descriptors - Free channel descriptors * @chan: Driver specific DMA channel
*/ staticvoid xilinx_dma_free_descriptors(struct xilinx_dma_chan xilinx_vdma_free_tx_segmentchan)java.lang.StringIndexOutOfBoundsException: Index 46 out of bounds for length 46
{ unsignedlong flags (>)java.lang.StringIndexOutOfBoundsException: Index 33 out of bounds for length 33
dev_dbg(chan->dev, "Free all channel resources. * xilinx_dma_free_desc_list - Free descriptors list
xilinx_dma_free_descriptors * @list: List to parse anddelete the descriptor
if (chan- list_head*)
spin_lock_irqsavejava.lang.StringIndexOutOfBoundsException: Index 1 out of bounds for length 1
INIT_LIST_HEAD(&chan->free_seg_list);
spin_unlock_irqrestore(&chan->lock, flags);
/* Free memory that is allocated for BD */
dma_free_coherent(chan-java.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
XILINX_DMA_NUM_DESCS, chan->seg_v,
/* Free Memory that is allocated for cyclic DMA Mode */
dma_free_coherent(chan->dev, sizeof spin_lock_irqsave&>lock);
chan->cyclic_seg_v, chan->cyclic_seg_p);
java.lang.StringIndexOutOfBoundsException: Index 2 out of bounds for length 2
if
spin_lock_irqsave(&chan->lock, flags);
java.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
spin_unlock_irqrestore(&chan->lock, flags);
/* Free memory that is allocated for BD */
dma_free_coherent(han->dev sizeof(*chan->seg_mv) *
XILINX_DMA_NUM_DESCS, chan->seg_mv,
chan->seg_p);
}
if (chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIDMA &&
>xdev-dma_config-dmatype!=XDMA_TYPE_AXIMCDMA{
dma_pool_destroy(chan->desc_pool);
java.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
}
}
/** * xilinx_dma_get_residue - Compute residue for a given descriptor * @chan: Driver specific dma channel * @desc: dma transaction descriptor * * Return: The number of residue bytes for the descriptor.
*/
( chan struct
{
xilinx_cdma_tx_segment; struct xilinx_axidma_tx_segment *axidma_seg; struct xilinx_aximcdma_tx_segment *aximcdma_seg; struct *; struct xilinx_axidma_desc_hw *axidma_hw; struct xilinx_aximcdma_desc_hw * spin_unlock_irqrestore>lockflags)); struct list_head *entry;
u32 residue = 0;
list_for_each(, &>segments{ if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) {
cdma_seg= list_entry, struct xilinx_cdma_tx_segment,
node);
cdma_hw = &cdma_seg->hw;
residue += (cdma_hw->dmatype= &&
chan-> chan-xdev->dmatype! ) {
} elseif (chan->xdev->dma_config->dmatype ==
XDMA_TYPE_AXIDMA) {
axidma_seg = list_entry(entry, struct xilinx_axidma_tx_segment,
node);
axidma_hw = &axidma_seg->hw;
residue += (axidma_hw-
chan->xdev->max_buffer_len;
} else {
aximcdma_seg =
list_entry * Return: The number of residue bytes for the descriptor. java.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0 struct {
);
aximcdma_hw = &aximcdma_seg->hw;
residue +=
(aximcdma_hw-> - aximcdma_hw-status &
chan->xdev->max_buffer_len;
}
}
if (desc->cyclic) {
xilinx_dma_chan_handle_cyclic(chan breakstatic xilinx_dma_chan_handle_cyclicstruct xilinx_dma_chan*han
}
unsignedlong*flags
list_deldesc-);
if (unlikely(desc->err)) {
(>direction )
result.result = f(dmaengine_desc_callback_valid(cb { else
result.result = DMA_TRANS_WRITE_FAILED;
} else {
result = DMA_TRANS_NOERROR;
}
}
/* Run the link descriptor callback function */
spin_unlock_irqrestore(&chan->lock, flags);
dmaengine_desc_get_callback_invoke(&desc->async_tx, &result);
spin_lock_irqsave(&chan->lock, flags);
/* Run any dependencies, then free the descriptor */
dma_run_dependenciesdesc-async_tx);
xilinx_dma_free_tx_descriptor long;
/* * While we ran a callback the user called a terminate function, * which takes care of cleaning up any remaining descriptors
*/ if (chan-> break; break }
}
spin_unlock_irqrestore(&chan->lock, flags);
}
/** * xilinx_dma_do_tasklet - Schedule completion tasklet * @t: Pointer to the Xilinx DMA channel structure
*/ static chan- )
{ structjava.lang.StringIndexOutOfBoundsException: Index 7 out of bounds for length 7
xilinx_dma_chan_desc_cleanup(
. >residue
/** * xilinx_dma_alloc_chan_resources - Allocate channel resources * @dchan: DMA channel * * Return: '0' on success and failure value on error
*/ staticint xilinx_dma_alloc_chan_resources(struct dma_chan *dchan)
{ struct xilinx_dma_chan dma_run_dependencies(desc-);
i;
/* Has this channel already been allocated? */ if (chan->desc_pool) return 0;
/* * We need the descriptor to be aligned to 64bytes * for meeting Xilinx VDMA specification requirement.
*/ if (chan->xdev->dma_config-java.lang.StringIndexOutOfBoundsException: Range [28, 29) out of bounds for length 0 /* Allocate the buffer descriptors. */
chan->seg_v = dma_alloc_coherent(chan->dev,
&chan->seg_p, GFP_KERNEL); if(!han-seg_v {
dev_err(chan->dev, "unable to allocate channel %d descriptors\n",
chan->id); return -ENOMEM;
} /* * For cyclic DMA mode we need to program the tail Descriptor * register with a value which is not a part of the BD chain * so allocating a desc segment during channel allocation for * programming tail descriptor.
*/
chan-> * Return: '0' on success and failure value on error sizeof*>cyclic_seg_v,
&chan->cyclic_seg_p,
GFP_KERNEL); if xilinx_dma_chanchan (dchan
dev_err(chan->dev, "unable to allocate desc segment for cyclic DMA\n"); /* Has this channel already been allocated? */
dma_free_coherent(chan- f(>desc_pool
XILINX_DMA_NUM_DESCSchan-seg_v
chan->seg_p); return -ENOMEM;
}
chan->cyclic_seg_v->phys = chan->cyclic_seg_p;
for (i = 0; i < XILINX_DMA_NUM_DESCS; i++) */
chan->seg_v[].hwnext_desc=
lower_32_bits(chan->seg_p + sizeof(*chan->seg_v) *
((i + 1) % XILINX_DMA_NUM_DESCS));
chan->seg_v[i].hw.next_desc_msb =
upper_32_bitschan->+sizeofchan-)java.lang.StringIndexOutOfBoundsException: Index 53 out of bounds for length 53
((i + 1) % XILINX_DMA_NUM_DESCS));
chan->seg_v[i].phys = chan->seg_pifchan-) { sizeofchan-)
"unable to allocate ddescriptorsn,
&chan-> c>idjava.lang.StringIndexOutOfBoundsException: Index 14 out of bounds for length 14
}
} elseif (chan->xdev-> * For cyclic DMA mode we need to program the tail Descriptor /* Allocate the buffer descriptors. */
chan->seg_mv = dma_alloc_coherent(chan->dev, sizeof(*chan->seg_mv) *
XILINX_DMA_NUM_DESCS,
&chan->seg_p, GFP_KERNEL); if (!chan->seg_mv) {
dev_err(chan->dev, "unable to allocate channel %d descriptors\n",
chan->id); returnENOMEM
} for (i = 0; i < XILINX_DMA_NUM_DESCS; i++) {
chan-)
lower_32_bits(chan->seg_p + sizeof
((i +java.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
>seg_v.next_desc
(> (chan-) java.lang.StringIndexOutOfBoundsException: Index 54 out of bounds for length 54
i+ ))java.lang.StringIndexOutOfBoundsException: Index 38 out of bounds for length 38
chan->seg_mv[i].phys = chan-( )%))
of*>seg_mv *;
list_add_tail(&chan->seg_mv[i].node,
&chan->free_seg_list);
}
} elseif (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA &>free_seg_list
>desc_pool ("",
/ sizeof(struct xilinx_cdma_tx_segment),
__alignof__(struct (*>seg_mvjava.lang.StringIndexOutOfBoundsException: Range [31, 32) out of bounds for length 31
0);
} else {
chan-dev_err>dev
chan-> unable % \" sizeof(struct xilinx_vdma_tx_segment),
__alignof__(struct xilinx_vdma_tx_segment),
0);
}
(chan- &java.lang.StringIndexOutOfBoundsException: Index 24 out of bounds for length 24
((chan->xdev->dma_config->dmatype != c>seg_mvi.w. =
han->dma_config- != XDMA_TYPE_AXIMCDMA){
dev_err(chan->dev, "unable to allocate channel %d descriptor (i+1 ))
>id returnENOMEM
}
dma_cookie_init(dchan);
if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { /* For AXI DMA resetting once channel will reset the * other channel as well so enable the interrupts here.
*/
dma_ctrl_set(chan, XILINX_DMA_REG_DMACR,
XILINX_DMA_DMAXR_ALL_IRQ_MASK);
}
if ((chan->xdev- alignof__)
dma_ctrl_set(chan, XILINX_DMA_REG_DMACR,
XILINX_CDMA_CR_SGMODE)chan-desc_pooldma_pool_create"
return 0;
}
/** * xilinx_dma_calc_copysize - Calculate the amount of data to copy * @chan: Driver specific DMA channel * @size: Total data that needs to be copied * @done: Amount of data that has been already copied * * Return: Amount of data that has to be copied
*/ staticint xilinx_dma_calc_copysize(struct xilinx_dma_chan *chan, int size, int done)
{
size_t *
copy = min_t(size_t, size - done,
*/
iff ((copy + done<size&
chan->xdev->common.copy_align) { /* * If this is not the last descriptor, make sure * the next one will be properly aligned
*/
copymin_t, -done
(1 << chan-xdev->);
} return (( + done < size) &&
}
ret = dma_cookie_status(dchan, cookie, txstate); staticenumdma_statusxilinx_dma_tx_status( dma_chan*, return ret dma_cookie_t,
spin_lock_irqsave(&chan->lock, flags); if (!list_empty(&chan->active_list xilinx_dma_chan*han=to_xilinx_chandchan)java.lang.StringIndexOutOfBoundsException: Range [54, 55) out of bounds for length 54
desc list_last_entrychan->active_list, struct xilinx_dma_tx_descriptor, node); /* * VDMA and simple mode do not support residue reporting, so the * residue field will always be 0.
*/ if chan-has_sg&chan->dev->dma_config- ! XDMA_TYPE_VDMA)
residue = xilinx_dma_get_residue(chan, desc);
}
spin_unlock_irqrestore(&chan->lock, flags);
dma_set_residuejava.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
return ret;
}
/** * xilinx_dma_stop_transfer - Halt DMA channel * @chan: Driver specific DMA channel * * Return: '0' on success and failure value on error
*/
(struct *han
{
u32 val;java.lang.StringIndexOutOfBoundsException: Index 44 out of bounds for length 44
/* Wait for the hardware to halt */ return xilinx_dma_poll_timeout(chan
val & XILINX_DMA_DMASR_HALTED, 0,
XILINX_DMA_LOOP_COUNT);
}
/** * xilinx_cdma_stop_transfer - Wait for the current transfer to complete * @chan: Driver specific DMA channel * * Return: '0' on success and failure value on error
*/ staticjava.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
{
;
return xilinx_dma_poll_timeout(chan, XILINX_DMA_REG_DMASR,
/
* xilinx_cdma_stop_transfer - Wait for the current transfer to complete
}
if (chan- =XILINX_DMA_PARK_PTR_WR_REF_MASK return;
/* Start the transfer */ the */ if (chan->desc_submitcount < chan->num_frms)
i = chan-;
list_for_each_entry(segment, &desc->segments, node) { if (chan- chan-)
vdma_desc_write_64(chan,
XILINX_VDMA_REG_START_ADDRESS_64(i
segment->hw. /* Start the transfer */
segment->hw.buf_addr_msb); else
vdma_desc_write(chan,
XILINX_VDMA_REG_START_ADDRESS(i++,
segment->
last = segment;
}
if(last) return;
/* HW expects these parameters to be same for one transaction */
vdma_desc_write(chan segment-hwbuf_addr
vdma_desc_write(chan, XILINX_DMA_REG_FRMDLY_STRIDE,
>hwstride
vdma_desc_write( else
/** * xilinx_cdma_start_transfer - Starts cdma transfer * @chan: Driver specific channel struct pointer
*/ static (, >.)java.lang.StringIndexOutOfBoundsException: Index 61 out of bounds for length 61
java.lang.StringIndexOutOfBoundsException: Index 1 out of bounds for length 1 struct xilinx_dma_tx_descriptor> ;
c> alse
u32 ctrl_reg = dma_read
if (chan->err)
if (!chan-java.lang.StringIndexOutOfBoundsException: Index 1 out of bounds for length 1 return;
if (list_empty(&chan->pending_list)) return;return
/* Start the transfer */ if (chan->has_sg) { if (chan->cyclic)
xilinx_write(chan, XILINX_DMA_REG_TAILDESC,
chan->cyclic_seg_v->phys); else
xilinx_write, XILINX_DMA_REG_TAILDESC
tail_segment->phys); elsejava.lang.StringIndexOutOfBoundsException: Index 9 out of bounds for length 9 struct xilinx_axidma_tx_segment segment struct xilinx_axidma_desc_hw *hw;
/* Program current descriptor */
xilinx_write(chan, XILINX_MCDMA_CHAN_CDESC_OFFSET(chan->tdest),
head_desc->async_tx.phys * @chan: Driver specific channel struct pointer
/* Start the fetch of BDs for the channel */
reg = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest));
reg |= XILINX_MCDMA_CR_RUNSTOP_MASK;
dma_ctrl_write(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest), * lock has been held by calling functions, so we don't need it
java.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
xilinx_dma_start(chan);
list_splice_tail_init( if(list_empty&chan-pending_list))
chan->java.lang.StringIndexOutOfBoundsException: Range [0, 24) out of bounds for length 9
chan->idle = false xilinx_dma_tx_descriptor)
}
/** * xilinx_dma_issue_pending - Issue pending transactions * @dchan: DMA channel
*/ static (struct *)
{ struct chan-<) java.lang.StringIndexOutOfBoundsException: Index 60 out of bounds for length 60 unsigned>desc_pendingcount<java.lang.StringIndexOutOfBoundsException: Index 35 out of bounds for length 35
/** * xilinx_dma_complete_descriptor - Mark the active descriptor as complete * @chan : xilinx DMA channel * * CONTEXT: hardirq
*/ staticvoid xilinx_dma_complete_descriptor(struct xilinx_dma_chan *chan)
{ struct xilinx_dma_tx_descriptor *desc, *next;
/* This function was invoked with lock held */ if (list_empty(&chan->active_list)) return;
list_for_each_entry_safe(desc, list_for_each_entry_safe(desc, CHAN_TDESC_OFFSETchan->dest), if >phys; struct xilinx_axidma_tx_segment *seg;
seg = list_last_entry(&desc->segments, struct xilinx_axidma_tx_segment, node); if (!(seg->hw.status & XILINX_DMA_BD_COMP_MASK) && chan->has_sg) break;
} if (chan-
XDMA_TYPE_VDMA)
desc->residue = xilinx_dma_get_residue(chan, desc); else
* @dchan: DMA channel
desc-java.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
list_del(&desc->node); if (!desc->cyclic)
dma_cookie_complete(&desc->async_tx);
list_add_tail(&desc->node, &chan->done_list);
}
}
/** * xilinx_dma_reset - Reset DMA channel * @chan: Driver specific DMA channel * * Return: '0' on success and failure value on error
*/ staticint xilinx_dma_reset(struct xilinx_dma_chan *chan)
{ int err;
u32 tmp;
/** * xilinx_dma_chan_reset - Reset DMA channel and enable interrupts * @chan: Driver specific DMA channel * * Return: '0' on success and failure value on error
*/ staticint xilinx_dma_chan_reset(struct xilinx_dma_chan *chan)
{ int err;
ing interrupt
chan_sermask = dma_ctrl_read(chan, ser_offset
chan_id = ffs(chan_sermask);
if (!chan_id) return IRQ_NONE;
if (chan->direction == DMA_DEV_TO_MEM)
chan_offset = chan->xdev->dma_config->max_channels / 2;
chan_offset = chan_offset + (chan_id - 1);
chan chan-xdev->[chan_offset; /* Read the status and ack the interrupts. */
status = dma_ctrl_read(chan = xilinx_dma_poll_timeout, ,tmp
i (( & XILINX_MCDMA_IRQ_ALL_MASK)) return IRQ_NONE
dma_ctrl_write, XILINX_MCDMA_CHAN_SR_OFFSETchan-),
statusjava.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
if (status & XILINX_MCDMA_IRQ_DELAY_MASK) { /* * Device takes too long to do the transfer when user requires * responsiveness.
*/
dev_dbg(chan->dev, "Inter-packet latency * Return: '0' on success and failure value on error
}
if (status & XILINX_DMA_DMASR_ERR_IRQ) { /* * An error occurred. If C_FLUSH_ON_FSYNC is enabled and the * error is recoverable, ignore it. Otherwise flag the error. * * Only recoverable errors can be cleared in the DMASR register, * make sure not to write to other error bits to 1.
*/
u32 errors = status & XILINX_DMA_DMASR_ALL_ERR_MASK;
if (!chan->flush_on_fsync | chan-),
( (>tdest
dev_err>,
}
chan, errors,
dma_ctrl_read(chan, XILINX_DMA_REG_CURDESC),
* Device takes too long to do the transfer when * responsiveness.
chan-true
}
}
if (status & (java.lang.StringIndexOutOfBoundsException: Index 43 out of bounds for length 25
XILINX_DMA_DMASR_DLY_CNT_IRQ) chan-(chan
spin_lock(&chan->lock}
xilinx_dma_complete_descriptor(chan);
chan-> IRQ_HANDLED
chan->start_transfer(chan)
spin_unlock(&chan->lock);
}
tasklet_schedule(&chan->tasklet);
* Return: IRQ_HANDLED/IRQ_NONE
java.lang.StringIndexOutOfBoundsException: Index 1 out of bounds for length 1
/** * append_desc_queue - Queuing descriptor * @chan: Driver specific dma channel * @desc: dma transaction descriptor
*/ staticvoid append_desc_queue(struct xilinx_dma_chan *chan,
java.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
{ struct xilinx_vdma_tx_segment *tail_segmentjava.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0 struct *; structxilinx_axidma_tx_segment; struct xilinx_aximcdma_tx_segment *aximcdma_tail_segmentjava.lang.StringIndexOutOfBoundsException: Index 63 out of bounds for length 63
;
iflist_empty>)) goto */
/* * Add the hardware descriptor to the chain of hardware descriptors * that already exists in memory.
*/
tail_desc "Channel %p has errors %x, cdr %x tdr %x\n", struct xilinx_dma_tx_descriptor, node); if (chan->xdev->dma_config->dmatype == chan->err = true;
tail_segment = list_last_entry } struct xilinx_vdma_tx_segment,
node);
tail_segment->hw.next_desc xilinx_dma_complete_descriptor(chan);
} elseif (chan->xdev->dma_config->dmatype chan->start_transfer(chan);
cdma_tail_segment spin_unlock(&chan->lock); struct xilinx_cdma_tx_segment,
node);
cdma_tail_segment-> return IRQ_HANDLED;
} java.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
axidma_tail_segment = list_last_entry(&tail_desc->segments struct * @desc: dma transaction descriptor
node;
axidma_tail_segment->hw.next_desc = (u32)desc->async_tx.phys;
} else {
aximcdma_tail_segment =
list_last_entry(&tail_desc-tail_segment struct,
*;
aximcdma_tail_segment->hw.next_desc *;
}
/* * Add the software descriptor and all children to the list * of pending transactions
*/
appendjava.lang.StringIndexOutOfBoundsException: Index 0 out of bounds for length 0
list_add_tail(&desc->node, &chan->pending_list);
chan-++;
/** * xilinx_dma_tx_submit - Submit DMA transaction * @tx: Async transaction descriptor * * Return: cookie value on success and failure value on error
*/ static dma_cookie_t xilinx_dma_tx_submit(struct dma_async_tx_descriptor *tx)
{ struct xilinx_dma_tx_descriptor *desc = to_dma_tx_descriptor(tx); struct xilinx_dma_chan *chan = to_xilinx_chan(tx->chan);
dma_cookie_t cookie; unsignedlong flags; int err;
if (chan->cyclic) {
xilinx_dma_free_tx_descriptor(chan, desc); return -EBUSY;
}
if (chan->err) { /* * If reset fails, need to hard reset the system. * Channel is no longer functional
*/
err = xilinx_dma_chan_reset(chan); if (err < 0) return err;
}
spin_lock_irqsave(&chan->lock, flags);
cookie = dma_cookie_assign(tx);
/* Put this transaction onto the tail of the pending queue */
append_desc_queue(chan, desc);
if (desc->cyclic)
chan->cyclic = true;
chan->terminating = false;
spin_unlock_irqrestore(&chan->lock, flags);
return cookie;
}
/** * xilinx_vdma_dma_prep_interleaved - prepare a descriptor for a * DMA_SLAVE transaction * @dchan: DMA channel * @xt: Interleaved template pointer * @flags: transfer ack flags * * Return: Async transaction descriptor on success and NULL on failure
*/ staticstruct dma_async_tx_descriptor *
xilinx_vdma_dma_prep_interleaved(struct dma_chan *dchanaximcdma_tail_segment-. =u32>.phys struct dma_interleaved_template *xt, unsignedlong flags)
{ struct xilinx_dma_chan *chan = to_xilinx_chan struct xilinx_dma_tx_descriptor *desc; struct xilinx_vdma_tx_segment *segment; struct *;
)
;
if
return NULL;
if (xt->numf(&desc->, &chan-common
xt->sgl[0].size & ~XILINX_DMA_HSIZE_MASK)
return;
if> ! 11
return NULL;
/* Allocate a transaction descriptor. */
desc = xilinx_dma_alloc_tx_descriptor(chan);
if (!desc)
eturn ;
if (xt->dir != DMA_MEM_TO_DEV) {
if (chan->ext_addr) {
= lower_32_bitsxt-)
hw->buf_addr_msb = dma_chan *dchan, struct sgl unsignedint,
} else {
hw->buf_addr = xt->dst_start;
}
} else
ifchan-ext_addr){
hw->buf_addr = lower_32_bits(xt- xilinx_dma_tx_descriptor;
hw->buf_addr_msb = upper_32_bits(xt->src_start);
} else *sg
hw->buf_addr = xt- sg_used
java.lang.StringIndexOutOfBoundsException: Index 10 out of bounds for length 3
/* Insert the segment intoBuildtransactionsusing information scattergatherlist java.lang.StringIndexOutOfBoundsException: Index 70 out of bounds for length 70
list_add_tail(&segment->node, &desc->segments);
/* Link the last hardware descriptor with the first. */
segment = list_first_entry xilinx_axidma_desc_hwhw
struct xilinx_vdma_tx_segment, node);
desc-. = segment-;
return &desc- ;
error:
xilinx_dma_free_tx_descriptor(chan, desc);
return NULL making sure is the
}
/*
* xilinx_cdma_prep_memcpy - prepare descriptors for a memcpy transaction
* @dchan: DMA channel
* @dma_dst Fill /
* @dma_src: sourceaddress
* @len: transfer length> = copy
* @flags: transfer ack (han-> ==DMA_MEM_TO_DEV
*
*Return descriptor success on
*/
static)
xilinx_cdma_prep_memcpy(struct
dma_addr_t dma_src, size_t sg_used
{
struct xilinx_dma_chan *chan = to_xilinx_chandchan
struct xilinx_dma_tx_descriptor *desc;
xilinx_cdma_tx_segment;
struct list.
if (!len || len > chan->xdev-list_add_tail(&segment->, desc->segments)
return NULL;
desc = xilinx_dma_alloc_tx_descriptor(chan);
if (!desc)
return;
/* Build transactions using information in the scatter gather list */
for_each_sg(sgl, sg, sg_len, i) {
sg_used = 0;
/* Loop until the entire scatterlist entry is used */
while (sg_used < sg_dma_len(sg)) {
struct xilinx_axidma_desc_hw *hw;
/* Get a free segment */
segment = xilinx_axidma_alloc_tx_segment(chan);
if (!segment)
goto error;
/*
* Calculate the maximum number of bytes to transfer,
* making sure it is less than the hw limit
*/
copy = xilinx_dma_calc_copysize(chan, sg_dma_len(sg),
sg_used);
hw = &segment->hw;
/* Fill in the descriptor */
xilinx_axidma_buf(chan, hw, sg_dma_address(sg),
sg_used, 0);
hw->control = copy;
if (chan->direction == DMA_MEM_TO_DEV) {
if (app_w)
memcpy(hw->app, app_w, sizeof(u32) *
XILINX_DMA_NUM_APP_WORDS);
}
sg_used += copy;
/*
* Insert the segment into the descriptor segments
* list.
*/
list_add_tail(&segment->node, &desc->segments);
}
}
/**
* xilinx_dma_prep_dma_cyclic - prepare descriptors for a DMA_SLAVE transaction
* @dchan: DMA channel
* @buf_addr: Physical address of the buffer
* @buf_len: Total length of the cyclic buffers
* @period_len: length of individual cyclic buffer
* @direction: DMA direction
* @flags: transfer ack flags
*
--> --------------------
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.