35 #if defined( LDMA_PRESENT ) && ( LDMA_COUNT == 1 ) 
   53 #if defined( LDMA_IRQ_HANDLER_TEMPLATE ) 
   58 void LDMA_IRQHandler(
void)
 
   62   uint32_t pending = LDMA_IntGetEnabled();
 
   65   while (pending & LDMA_IF_ERROR)
 
   72     uint32_t mask = 0x1 << ch;
 
   90 void LDMA_DeInit(
void)
 
   92   NVIC_DisableIRQ(LDMA_IRQn);
 
  112 void LDMA_EnableChannelRequest(
int ch, 
bool enable)
 
  114   EFM_ASSERT(ch < DMA_CHAN_COUNT);
 
  137 void LDMA_Init(
const LDMA_Init_t *init)
 
  139   EFM_ASSERT(init != NULL);
 
  140   EFM_ASSERT(!((init->ldmaInitCtrlNumFixed << _LDMA_CTRL_NUMFIXED_SHIFT)
 
  141                & ~_LDMA_CTRL_NUMFIXED_MASK));
 
  142   EFM_ASSERT(!((init->ldmaInitCtrlSyncPrsClrEn << _LDMA_CTRL_SYNCPRSCLREN_SHIFT)
 
  143                & ~_LDMA_CTRL_SYNCPRSCLREN_MASK));
 
  144   EFM_ASSERT(!((init->ldmaInitCtrlSyncPrsSetEn << _LDMA_CTRL_SYNCPRSSETEN_SHIFT)
 
  145                & ~_LDMA_CTRL_SYNCPRSSETEN_MASK));
 
  150   LDMA->CTRL = (init->ldmaInitCtrlNumFixed << _LDMA_CTRL_NUMFIXED_SHIFT)
 
  151                | (init->ldmaInitCtrlSyncPrsClrEn << _LDMA_CTRL_SYNCPRSCLREN_SHIFT)
 
  152                | (init->ldmaInitCtrlSyncPrsSetEn << _LDMA_CTRL_SYNCPRSSETEN_SHIFT);
 
  159   LDMA->IEN = LDMA_IEN_ERROR;
 
  160   LDMA->IFC = 0xFFFFFFFF;
 
  162   NVIC_ClearPendingIRQ(LDMA_IRQn);
 
  165   NVIC_SetPriority(LDMA_IRQn, init->ldmaInitIrqPriority);
 
  167   NVIC_EnableIRQ(LDMA_IRQn);
 
  183 void LDMA_StartTransfer(
int ch,
 
  184                         const LDMA_TransferCfg_t *transfer,
 
  185                         const LDMA_Descriptor_t  *descriptor)
 
  189   uint32_t chMask = 1 << ch;
 
  191   EFM_ASSERT(ch < DMA_CHAN_COUNT);
 
  192   EFM_ASSERT(transfer != NULL);
 
  193   EFM_ASSERT(!(transfer->ldmaReqSel & ~_LDMA_CH_REQSEL_MASK));
 
  195   EFM_ASSERT(!((transfer->ldmaCtrlSyncPrsClrOff << _LDMA_CTRL_SYNCPRSCLREN_SHIFT)
 
  196                & ~_LDMA_CTRL_SYNCPRSCLREN_MASK));
 
  197   EFM_ASSERT(!((transfer->ldmaCtrlSyncPrsClrOn << _LDMA_CTRL_SYNCPRSCLREN_SHIFT)
 
  198                & ~_LDMA_CTRL_SYNCPRSCLREN_MASK));
 
  199   EFM_ASSERT(!((transfer->ldmaCtrlSyncPrsSetOff << _LDMA_CTRL_SYNCPRSSETEN_SHIFT)
 
  200                & ~_LDMA_CTRL_SYNCPRSSETEN_MASK));
 
  201   EFM_ASSERT(!((transfer->ldmaCtrlSyncPrsSetOn << _LDMA_CTRL_SYNCPRSSETEN_SHIFT)
 
  202                & ~_LDMA_CTRL_SYNCPRSSETEN_MASK));
 
  204   EFM_ASSERT(!((transfer->ldmaCfgArbSlots << _LDMA_CH_CFG_ARBSLOTS_SHIFT)
 
  205                & ~_LDMA_CH_CFG_ARBSLOTS_MASK));
 
  206   EFM_ASSERT(!((transfer->ldmaCfgSrcIncSign << _LDMA_CH_CFG_SRCINCSIGN_SHIFT)
 
  207                & ~_LDMA_CH_CFG_SRCINCSIGN_MASK ) );
 
  208   EFM_ASSERT(!((transfer->ldmaCfgDstIncSign << _LDMA_CH_CFG_DSTINCSIGN_SHIFT)
 
  209                & ~_LDMA_CH_CFG_DSTINCSIGN_MASK));
 
  210   EFM_ASSERT(!((transfer->ldmaLoopCnt << _LDMA_CH_LOOP_LOOPCNT_SHIFT)
 
  211                & ~_LDMA_CH_LOOP_LOOPCNT_MASK));
 
  213   LDMA->CH[ch].REQSEL = transfer->ldmaReqSel;
 
  214   LDMA->CH[ch].LOOP = (transfer->ldmaLoopCnt << _LDMA_CH_LOOP_LOOPCNT_SHIFT);
 
  215   LDMA->CH[ch].CFG = (transfer->ldmaCfgArbSlots << _LDMA_CH_CFG_ARBSLOTS_SHIFT)
 
  216                      | (transfer->ldmaCfgSrcIncSign << _LDMA_CH_CFG_SRCINCSIGN_SHIFT)
 
  217                      | (transfer->ldmaCfgDstIncSign << _LDMA_CH_CFG_DSTINCSIGN_SHIFT);
 
  220   LDMA->CH[ch].LINK = (uint32_t)descriptor & _LDMA_CH_LINK_LINKADDR_MASK;
 
  231   if (transfer->ldmaReqDis)
 
  233     LDMA->REQDIS |= chMask;
 
  236   if (transfer->ldmaDbgHalt)
 
  238     LDMA->DBGHALT |= chMask;
 
  243   if (transfer->ldmaCtrlSyncPrsClrOff)
 
  245     tmp &= ~_LDMA_CTRL_SYNCPRSCLREN_MASK
 
  246            | (~transfer->ldmaCtrlSyncPrsClrOff << _LDMA_CTRL_SYNCPRSCLREN_SHIFT);
 
  249   if (transfer->ldmaCtrlSyncPrsClrOn)
 
  251     tmp |= transfer->ldmaCtrlSyncPrsClrOn << _LDMA_CTRL_SYNCPRSCLREN_SHIFT;
 
  254   if (transfer->ldmaCtrlSyncPrsSetOff)
 
  256     tmp &= ~_LDMA_CTRL_SYNCPRSSETEN_MASK
 
  257            | (~transfer->ldmaCtrlSyncPrsSetOff << _LDMA_CTRL_SYNCPRSSETEN_SHIFT);
 
  260   if (transfer->ldmaCtrlSyncPrsSetOn)
 
  262     tmp |= transfer->ldmaCtrlSyncPrsSetOn << _LDMA_CTRL_SYNCPRSSETEN_SHIFT;
 
  268   LDMA->LINKLOAD = chMask;      
 
  284 void LDMA_StopTransfer(
int ch)
 
  286   uint32_t chMask = 1 << ch;
 
  288   EFM_ASSERT(ch < DMA_CHAN_COUNT);
 
  291     LDMA->IEN &= ~chMask;
 
  306 bool LDMA_TransferDone(
int ch)
 
  309   uint32_t chMask = 1 << ch;
 
  311   EFM_ASSERT(ch < DMA_CHAN_COUNT);
 
  314     if (((LDMA->CHEN & chMask) == 0)
 
  315         && ((LDMA->CHDONE & chMask) == chMask))
 
  338 uint32_t LDMA_TransferRemainingCount(
int ch)
 
  340   uint32_t remaining, done, iflag;
 
  341   uint32_t chMask = 1 << ch;
 
  343   EFM_ASSERT(ch < DMA_CHAN_COUNT);
 
  348     remaining = LDMA->CH[ch].CTRL;
 
  353   remaining = (remaining & _LDMA_CH_CTRL_XFERCNT_MASK)
 
  354               >> _LDMA_CH_CTRL_XFERCNT_SHIFT;
 
  356   if (done || ((remaining == 0) && iflag))
 
  361   return remaining + 1;
 
Clock management unit (CMU) API. 
 
#define CORE_DECLARE_IRQ_STATE
 
Emlib peripheral API "assert" implementation. 
 
RAM and peripheral bit-field set and clear API. 
 
#define CORE_ENTER_ATOMIC()  
 
#define CORE_ATOMIC_SECTION(yourcode)
 
Core interrupt handling API. 
 
void CMU_ClockEnable(CMU_Clock_TypeDef clock, bool enable)
Enable/disable a clock. 
 
#define CORE_EXIT_ATOMIC()    
 
Direct memory access (LDMA) API. 
 
__STATIC_INLINE void BUS_RegMaskedClear(volatile uint32_t *addr, uint32_t mask)
Perform a masked clear operation on peripheral register address. 
 
__STATIC_INLINE void BUS_RegBitWrite(volatile uint32_t *addr, unsigned int bit, unsigned int val)
Perform a single-bit write operation on a peripheral register.