if (!atc_chan_is_cyclic(atchan))
                dma_cookie_complete(txd);
 
-       /* Remove transfer node from the active list. */
-       list_del_init(&desc->desc_node);
        spin_unlock_irqrestore(&atchan->lock, flags);
 
        dma_descriptor_unmap(txd);
  */
 static void atc_advance_work(struct at_dma_chan *atchan)
 {
+       struct at_desc *desc;
        unsigned long flags;
 
        dev_vdbg(chan2dev(&atchan->chan_common), "advance_work\n");
        spin_lock_irqsave(&atchan->lock, flags);
        if (atc_chan_is_enabled(atchan) || list_empty(&atchan->active_list))
                return spin_unlock_irqrestore(&atchan->lock, flags);
-       spin_unlock_irqrestore(&atchan->lock, flags);
 
-       atc_chain_complete(atchan, atc_first_active(atchan));
+       desc = atc_first_active(atchan);
+       /* Remove the transfer node from the active list. */
+       list_del_init(&desc->desc_node);
+       spin_unlock_irqrestore(&atchan->lock, flags);
+       atc_chain_complete(atchan, desc);
 
        /* advance work */
        spin_lock_irqsave(&atchan->lock, flags);