Skip to content

Commit 099a9a9

Browse files
ADESTMvinodkoul
authored andcommitted
dmaengine: stm32-dma: add device_pause/device_resume support
At any time, a DMA transfer can be suspended to be restarted later before the end of the DMA transfer. In order to restart from the point where the transfer was stopped, DMA_SxNDTR has to be read after disabling the channel by clearing the EN bit in DMA_SxCR register, to know the number of data items already collected. Peripheral and/or memory addresses have to be updated in order to adjust the address pointers. SxNDTR register has to be updated with the remaining number of data items to be transferred (the value read when the channel was disabled). Then the channel can be re-enabled to resume the transfer from the point it was suspended. If the channel was configured in circular or double-buffer mode, the circular or double-buffer mode must be disabled before re-enabling the channel to be able to reconfigure SxNDTR register and re-activate circular or double-buffer mode on next Transfer Complete interrupt where channel will be disabled by HW. This is due to the fact that on resume, re-writing SxNDTR register value updates internal HW auto-reload data counter, and then it truncates all next transfers after a pause/resume sequence. Signed-off-by: Amelie Delaunay <amelie.delaunay@foss.st.com> Link: https://lore.kernel.org/r/20220505115611.38845-5-amelie.delaunay@foss.st.com Signed-off-by: Vinod Koul <vkoul@kernel.org>
1 parent baa1424 commit 099a9a9

1 file changed

Lines changed: 234 additions & 13 deletions

File tree

drivers/dma/stm32-dma.c

Lines changed: 234 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -208,6 +208,7 @@ struct stm32_dma_chan {
208208
u32 threshold;
209209
u32 mem_burst;
210210
u32 mem_width;
211+
enum dma_status status;
211212
};
212213

213214
struct stm32_dma_device {
@@ -485,6 +486,7 @@ static void stm32_dma_stop(struct stm32_dma_chan *chan)
485486
}
486487

487488
chan->busy = false;
489+
chan->status = DMA_COMPLETE;
488490
}
489491

490492
static int stm32_dma_terminate_all(struct dma_chan *c)
@@ -595,11 +597,11 @@ static void stm32_dma_start_transfer(struct stm32_dma_chan *chan)
595597
stm32_dma_dump_reg(chan);
596598

597599
/* Start DMA */
600+
chan->busy = true;
601+
chan->status = DMA_IN_PROGRESS;
598602
reg->dma_scr |= STM32_DMA_SCR_EN;
599603
stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), reg->dma_scr);
600604

601-
chan->busy = true;
602-
603605
dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan);
604606
}
605607

@@ -627,6 +629,95 @@ static void stm32_dma_configure_next_sg(struct stm32_dma_chan *chan)
627629
}
628630
}
629631

632+
static void stm32_dma_handle_chan_paused(struct stm32_dma_chan *chan)
633+
{
634+
struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
635+
u32 dma_scr;
636+
637+
/*
638+
* Read and store current remaining data items and peripheral/memory addresses to be
639+
* updated on resume
640+
*/
641+
dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id));
642+
/*
643+
* Transfer can be paused while between a previous resume and reconfiguration on transfer
644+
* complete. If transfer is cyclic and CIRC and DBM have been deactivated for resume, need
645+
* to set it here in SCR backup to ensure a good reconfiguration on transfer complete.
646+
*/
647+
if (chan->desc && chan->desc->cyclic) {
648+
if (chan->desc->num_sgs == 1)
649+
dma_scr |= STM32_DMA_SCR_CIRC;
650+
else
651+
dma_scr |= STM32_DMA_SCR_DBM;
652+
}
653+
chan->chan_reg.dma_scr = dma_scr;
654+
655+
/*
656+
* Need to temporarily deactivate CIRC/DBM until next Transfer Complete interrupt, otherwise
657+
* on resume NDTR autoreload value will be wrong (lower than the initial period length)
658+
*/
659+
if (chan->desc && chan->desc->cyclic) {
660+
dma_scr &= ~(STM32_DMA_SCR_DBM | STM32_DMA_SCR_CIRC);
661+
stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr);
662+
}
663+
664+
chan->chan_reg.dma_sndtr = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id));
665+
666+
dev_dbg(chan2dev(chan), "vchan %pK: paused\n", &chan->vchan);
667+
}
668+
669+
static void stm32_dma_post_resume_reconfigure(struct stm32_dma_chan *chan)
670+
{
671+
struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
672+
struct stm32_dma_sg_req *sg_req;
673+
u32 dma_scr, status, id;
674+
675+
id = chan->id;
676+
dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id));
677+
678+
/* Clear interrupt status if it is there */
679+
status = stm32_dma_irq_status(chan);
680+
if (status)
681+
stm32_dma_irq_clear(chan, status);
682+
683+
if (!chan->next_sg)
684+
sg_req = &chan->desc->sg_req[chan->desc->num_sgs - 1];
685+
else
686+
sg_req = &chan->desc->sg_req[chan->next_sg - 1];
687+
688+
/* Reconfigure NDTR with the initial value */
689+
stm32_dma_write(dmadev, STM32_DMA_SNDTR(chan->id), sg_req->chan_reg.dma_sndtr);
690+
691+
/* Restore SPAR */
692+
stm32_dma_write(dmadev, STM32_DMA_SPAR(id), sg_req->chan_reg.dma_spar);
693+
694+
/* Restore SM0AR/SM1AR whatever DBM/CT as they may have been modified */
695+
stm32_dma_write(dmadev, STM32_DMA_SM0AR(id), sg_req->chan_reg.dma_sm0ar);
696+
stm32_dma_write(dmadev, STM32_DMA_SM1AR(id), sg_req->chan_reg.dma_sm1ar);
697+
698+
/* Reactivate CIRC/DBM if needed */
699+
if (chan->chan_reg.dma_scr & STM32_DMA_SCR_DBM) {
700+
dma_scr |= STM32_DMA_SCR_DBM;
701+
/* Restore CT */
702+
if (chan->chan_reg.dma_scr & STM32_DMA_SCR_CT)
703+
dma_scr &= ~STM32_DMA_SCR_CT;
704+
else
705+
dma_scr |= STM32_DMA_SCR_CT;
706+
} else if (chan->chan_reg.dma_scr & STM32_DMA_SCR_CIRC) {
707+
dma_scr |= STM32_DMA_SCR_CIRC;
708+
}
709+
stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr);
710+
711+
stm32_dma_configure_next_sg(chan);
712+
713+
stm32_dma_dump_reg(chan);
714+
715+
dma_scr |= STM32_DMA_SCR_EN;
716+
stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr);
717+
718+
dev_dbg(chan2dev(chan), "vchan %pK: reconfigured after pause/resume\n", &chan->vchan);
719+
}
720+
630721
static void stm32_dma_handle_chan_done(struct stm32_dma_chan *chan, u32 scr)
631722
{
632723
if (!chan->desc)
@@ -635,10 +726,14 @@ static void stm32_dma_handle_chan_done(struct stm32_dma_chan *chan, u32 scr)
635726
if (chan->desc->cyclic) {
636727
vchan_cyclic_callback(&chan->desc->vdesc);
637728
stm32_dma_sg_inc(chan);
638-
if (scr & STM32_DMA_SCR_DBM)
729+
/* cyclic while CIRC/DBM disable => post resume reconfiguration needed */
730+
if (!(scr & (STM32_DMA_SCR_CIRC | STM32_DMA_SCR_DBM)))
731+
stm32_dma_post_resume_reconfigure(chan);
732+
else if (scr & STM32_DMA_SCR_DBM)
639733
stm32_dma_configure_next_sg(chan);
640734
} else {
641735
chan->busy = false;
736+
chan->status = DMA_COMPLETE;
642737
if (chan->next_sg == chan->desc->num_sgs) {
643738
vchan_cookie_complete(&chan->desc->vdesc);
644739
chan->desc = NULL;
@@ -679,8 +774,12 @@ static irqreturn_t stm32_dma_chan_irq(int irq, void *devid)
679774

680775
if (status & STM32_DMA_TCI) {
681776
stm32_dma_irq_clear(chan, STM32_DMA_TCI);
682-
if (scr & STM32_DMA_SCR_TCIE)
683-
stm32_dma_handle_chan_done(chan, scr);
777+
if (scr & STM32_DMA_SCR_TCIE) {
778+
if (chan->status == DMA_PAUSED && !(scr & STM32_DMA_SCR_EN))
779+
stm32_dma_handle_chan_paused(chan);
780+
else
781+
stm32_dma_handle_chan_done(chan, scr);
782+
}
684783
status &= ~STM32_DMA_TCI;
685784
}
686785

@@ -715,6 +814,107 @@ static void stm32_dma_issue_pending(struct dma_chan *c)
715814
spin_unlock_irqrestore(&chan->vchan.lock, flags);
716815
}
717816

817+
static int stm32_dma_pause(struct dma_chan *c)
818+
{
819+
struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
820+
unsigned long flags;
821+
int ret;
822+
823+
if (chan->status != DMA_IN_PROGRESS)
824+
return -EPERM;
825+
826+
spin_lock_irqsave(&chan->vchan.lock, flags);
827+
ret = stm32_dma_disable_chan(chan);
828+
/*
829+
* A transfer complete flag is set to indicate the end of transfer due to the stream
830+
* interruption, so wait for interrupt
831+
*/
832+
if (!ret)
833+
chan->status = DMA_PAUSED;
834+
spin_unlock_irqrestore(&chan->vchan.lock, flags);
835+
836+
return ret;
837+
}
838+
839+
static int stm32_dma_resume(struct dma_chan *c)
840+
{
841+
struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
842+
struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
843+
struct stm32_dma_chan_reg chan_reg = chan->chan_reg;
844+
u32 id = chan->id, scr, ndtr, offset, spar, sm0ar, sm1ar;
845+
struct stm32_dma_sg_req *sg_req;
846+
unsigned long flags;
847+
848+
if (chan->status != DMA_PAUSED)
849+
return -EPERM;
850+
851+
scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id));
852+
if (WARN_ON(scr & STM32_DMA_SCR_EN))
853+
return -EPERM;
854+
855+
spin_lock_irqsave(&chan->vchan.lock, flags);
856+
857+
/* sg_reg[prev_sg] contains original ndtr, sm0ar and sm1ar before pausing the transfer */
858+
if (!chan->next_sg)
859+
sg_req = &chan->desc->sg_req[chan->desc->num_sgs - 1];
860+
else
861+
sg_req = &chan->desc->sg_req[chan->next_sg - 1];
862+
863+
ndtr = sg_req->chan_reg.dma_sndtr;
864+
offset = (ndtr - chan_reg.dma_sndtr) << STM32_DMA_SCR_PSIZE_GET(chan_reg.dma_scr);
865+
spar = sg_req->chan_reg.dma_spar;
866+
sm0ar = sg_req->chan_reg.dma_sm0ar;
867+
sm1ar = sg_req->chan_reg.dma_sm1ar;
868+
869+
/*
870+
* The peripheral and/or memory addresses have to be updated in order to adjust the
871+
* address pointers. Need to check increment.
872+
*/
873+
if (chan_reg.dma_scr & STM32_DMA_SCR_PINC)
874+
stm32_dma_write(dmadev, STM32_DMA_SPAR(id), spar + offset);
875+
else
876+
stm32_dma_write(dmadev, STM32_DMA_SPAR(id), spar);
877+
878+
if (!(chan_reg.dma_scr & STM32_DMA_SCR_MINC))
879+
offset = 0;
880+
881+
/*
882+
* In case of DBM, the current target could be SM1AR.
883+
* Need to temporarily deactivate CIRC/DBM to finish the current transfer, so
884+
* SM0AR becomes the current target and must be updated with SM1AR + offset if CT=1.
885+
*/
886+
if ((chan_reg.dma_scr & STM32_DMA_SCR_DBM) && (chan_reg.dma_scr & STM32_DMA_SCR_CT))
887+
stm32_dma_write(dmadev, STM32_DMA_SM1AR(id), sm1ar + offset);
888+
else
889+
stm32_dma_write(dmadev, STM32_DMA_SM0AR(id), sm0ar + offset);
890+
891+
/* NDTR must be restored otherwise internal HW counter won't be correctly reset */
892+
stm32_dma_write(dmadev, STM32_DMA_SNDTR(id), chan_reg.dma_sndtr);
893+
894+
/*
895+
* Need to temporarily deactivate CIRC/DBM until next Transfer Complete interrupt,
896+
* otherwise NDTR autoreload value will be wrong (lower than the initial period length)
897+
*/
898+
if (chan_reg.dma_scr & (STM32_DMA_SCR_CIRC | STM32_DMA_SCR_DBM))
899+
chan_reg.dma_scr &= ~(STM32_DMA_SCR_CIRC | STM32_DMA_SCR_DBM);
900+
901+
if (chan_reg.dma_scr & STM32_DMA_SCR_DBM)
902+
stm32_dma_configure_next_sg(chan);
903+
904+
stm32_dma_dump_reg(chan);
905+
906+
/* The stream may then be re-enabled to restart transfer from the point it was stopped */
907+
chan->status = DMA_IN_PROGRESS;
908+
chan_reg.dma_scr |= STM32_DMA_SCR_EN;
909+
stm32_dma_write(dmadev, STM32_DMA_SCR(id), chan_reg.dma_scr);
910+
911+
spin_unlock_irqrestore(&chan->vchan.lock, flags);
912+
913+
dev_dbg(chan2dev(chan), "vchan %pK: resumed\n", &chan->vchan);
914+
915+
return 0;
916+
}
917+
718918
static int stm32_dma_set_xfer_param(struct stm32_dma_chan *chan,
719919
enum dma_transfer_direction direction,
720920
enum dma_slave_buswidth *buswidth,
@@ -982,10 +1182,12 @@ static struct dma_async_tx_descriptor *stm32_dma_prep_dma_cyclic(
9821182
}
9831183

9841184
/* Enable Circular mode or double buffer mode */
985-
if (buf_len == period_len)
1185+
if (buf_len == period_len) {
9861186
chan->chan_reg.dma_scr |= STM32_DMA_SCR_CIRC;
987-
else
1187+
} else {
9881188
chan->chan_reg.dma_scr |= STM32_DMA_SCR_DBM;
1189+
chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_CT;
1190+
}
9891191

9901192
/* Clear periph ctrl if client set it */
9911193
chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_PFCTRL;
@@ -1095,24 +1297,36 @@ static bool stm32_dma_is_current_sg(struct stm32_dma_chan *chan)
10951297
{
10961298
struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
10971299
struct stm32_dma_sg_req *sg_req;
1098-
u32 dma_scr, dma_smar, id;
1300+
u32 dma_scr, dma_smar, id, period_len;
10991301

11001302
id = chan->id;
11011303
dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id));
11021304

1305+
/* In cyclic CIRC but not DBM, CT is not used */
11031306
if (!(dma_scr & STM32_DMA_SCR_DBM))
11041307
return true;
11051308

11061309
sg_req = &chan->desc->sg_req[chan->next_sg];
1310+
period_len = sg_req->len;
11071311

1312+
/* DBM - take care of a previous pause/resume not yet post reconfigured */
11081313
if (dma_scr & STM32_DMA_SCR_CT) {
11091314
dma_smar = stm32_dma_read(dmadev, STM32_DMA_SM0AR(id));
1110-
return (dma_smar == sg_req->chan_reg.dma_sm0ar);
1315+
/*
1316+
* If transfer has been pause/resumed,
1317+
* SM0AR is in the range of [SM0AR:SM0AR+period_len]
1318+
*/
1319+
return (dma_smar >= sg_req->chan_reg.dma_sm0ar &&
1320+
dma_smar < sg_req->chan_reg.dma_sm0ar + period_len);
11111321
}
11121322

11131323
dma_smar = stm32_dma_read(dmadev, STM32_DMA_SM1AR(id));
1114-
1115-
return (dma_smar == sg_req->chan_reg.dma_sm1ar);
1324+
/*
1325+
* If transfer has been pause/resumed,
1326+
* SM1AR is in the range of [SM1AR:SM1AR+period_len]
1327+
*/
1328+
return (dma_smar >= sg_req->chan_reg.dma_sm1ar &&
1329+
dma_smar < sg_req->chan_reg.dma_sm1ar + period_len);
11161330
}
11171331

11181332
static size_t stm32_dma_desc_residue(struct stm32_dma_chan *chan,
@@ -1152,7 +1366,7 @@ static size_t stm32_dma_desc_residue(struct stm32_dma_chan *chan,
11521366

11531367
residue = stm32_dma_get_remaining_bytes(chan);
11541368

1155-
if (!stm32_dma_is_current_sg(chan)) {
1369+
if (chan->desc->cyclic && !stm32_dma_is_current_sg(chan)) {
11561370
n_sg++;
11571371
if (n_sg == chan->desc->num_sgs)
11581372
n_sg = 0;
@@ -1192,7 +1406,12 @@ static enum dma_status stm32_dma_tx_status(struct dma_chan *c,
11921406
u32 residue = 0;
11931407

11941408
status = dma_cookie_status(c, cookie, state);
1195-
if (status == DMA_COMPLETE || !state)
1409+
if (status == DMA_COMPLETE)
1410+
return status;
1411+
1412+
status = chan->status;
1413+
1414+
if (!state)
11961415
return status;
11971416

11981417
spin_lock_irqsave(&chan->vchan.lock, flags);
@@ -1381,6 +1600,8 @@ static int stm32_dma_probe(struct platform_device *pdev)
13811600
dd->device_prep_slave_sg = stm32_dma_prep_slave_sg;
13821601
dd->device_prep_dma_cyclic = stm32_dma_prep_dma_cyclic;
13831602
dd->device_config = stm32_dma_slave_config;
1603+
dd->device_pause = stm32_dma_pause;
1604+
dd->device_resume = stm32_dma_resume;
13841605
dd->device_terminate_all = stm32_dma_terminate_all;
13851606
dd->device_synchronize = stm32_dma_synchronize;
13861607
dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |

0 commit comments

Comments
 (0)