dmaengine: move drivers to dma_transfer_direction
fixup usage of dma direction by introducing dma_transfer_direction, this patch moves dma/drivers/* to use new enum Cc: Jassi Brar <jaswinder.singh@linaro.org> Cc: Russell King <rmk+kernel@arm.linux.org.uk> Cc: Viresh Kumar <viresh.kumar@st.com> Cc: Linus Walleij <linus.walleij@linaro.org> Cc: Nicolas Ferre <nicolas.ferre@atmel.com> Cc: Mika Westerberg <mika.westerberg@iki.fi> Cc: H Hartley Sweeten <hartleys@visionengravers.com> Cc: Li Yang <leoli@freescale.com> Cc: Zhang Wei <zw@zh-kernel.org> Cc: Sascha Hauer <s.hauer@pengutronix.de> Cc: Guennadi Liakhovetski <g.liakhovetski@gmx.de> Cc: Shawn Guo <shawn.guo@freescale.com> Cc: Yong Wang <yong.y.wang@intel.com> Cc: Tomoya MORINAGA <tomoya-linux@dsn.lapis-semi.com> Cc: Boojin Kim <boojin.kim@samsung.com> Cc: Barry Song <Baohua.Song@csr.com> Acked-by: Mika Westerberg <mika.westerberg@iki.fi> Acked-by: Linus Walleij <linus.walleij@linaro.org> Acked-by: Viresh Kumar <viresh.kumar@st.com> Acked-by: Nicolas Ferre <nicolas.ferre@atmel.com> Signed-off-by: Vinod Koul <vinod.koul@linux.intel.com>
This commit is contained in:
@@ -377,7 +377,7 @@ static void mxs_dma_free_chan_resources(struct dma_chan *chan)
|
||||
|
||||
static struct dma_async_tx_descriptor *mxs_dma_prep_slave_sg(
|
||||
struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_data_direction direction,
|
||||
unsigned int sg_len, enum dma_transfer_direction direction,
|
||||
unsigned long append)
|
||||
{
|
||||
struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan);
|
||||
@@ -450,7 +450,7 @@ static struct dma_async_tx_descriptor *mxs_dma_prep_slave_sg(
|
||||
ccw->bits |= CCW_CHAIN;
|
||||
ccw->bits |= CCW_HALT_ON_TERM;
|
||||
ccw->bits |= CCW_TERM_FLUSH;
|
||||
ccw->bits |= BF_CCW(direction == DMA_FROM_DEVICE ?
|
||||
ccw->bits |= BF_CCW(direction == DMA_DEV_TO_MEM ?
|
||||
MXS_DMA_CMD_WRITE : MXS_DMA_CMD_READ,
|
||||
COMMAND);
|
||||
|
||||
@@ -472,7 +472,7 @@ err_out:
|
||||
|
||||
static struct dma_async_tx_descriptor *mxs_dma_prep_dma_cyclic(
|
||||
struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
|
||||
size_t period_len, enum dma_data_direction direction)
|
||||
size_t period_len, enum dma_transfer_direction direction)
|
||||
{
|
||||
struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan);
|
||||
struct mxs_dma_engine *mxs_dma = mxs_chan->mxs_dma;
|
||||
@@ -515,7 +515,7 @@ static struct dma_async_tx_descriptor *mxs_dma_prep_dma_cyclic(
|
||||
ccw->bits |= CCW_IRQ;
|
||||
ccw->bits |= CCW_HALT_ON_TERM;
|
||||
ccw->bits |= CCW_TERM_FLUSH;
|
||||
ccw->bits |= BF_CCW(direction == DMA_FROM_DEVICE ?
|
||||
ccw->bits |= BF_CCW(direction == DMA_DEV_TO_MEM ?
|
||||
MXS_DMA_CMD_WRITE : MXS_DMA_CMD_READ, COMMAND);
|
||||
|
||||
dma_addr += period_len;
|
||||
|
Reference in New Issue
Block a user