struct ixgbe_tx_buffer *first,
                         const u8 hdr_len)
 {
-       dma_addr_t dma;
        struct sk_buff *skb = first->skb;
        struct ixgbe_tx_buffer *tx_buffer;
        union ixgbe_adv_tx_desc *tx_desc;
-       struct skb_frag_struct *frag = &skb_shinfo(skb)->frags[0];
-       unsigned int data_len = skb->data_len;
-       unsigned int size = skb_headlen(skb);
-       unsigned int paylen = skb->len - hdr_len;
+       struct skb_frag_struct *frag;
+       dma_addr_t dma;
+       unsigned int data_len, size;
        u32 tx_flags = first->tx_flags;
        u32 cmd_type = ixgbe_tx_cmd_type(skb, tx_flags);
        u16 i = tx_ring->next_to_use;
 
        tx_desc = IXGBE_TX_DESC(tx_ring, i);
 
-       ixgbe_tx_olinfo_status(tx_desc, tx_flags, paylen);
+       ixgbe_tx_olinfo_status(tx_desc, tx_flags, skb->len - hdr_len);
+
+       size = skb_headlen(skb);
+       data_len = skb->data_len;
 
 #ifdef IXGBE_FCOE
        if (tx_flags & IXGBE_TX_FLAGS_FCOE) {
 
 #endif
        dma = dma_map_single(tx_ring->dev, skb->data, size, DMA_TO_DEVICE);
-       if (dma_mapping_error(tx_ring->dev, dma))
-               goto dma_error;
 
-       /* record length, and DMA address */
-       dma_unmap_len_set(first, len, size);
-       dma_unmap_addr_set(first, dma, dma);
+       tx_buffer = first;
+
+       for (frag = &skb_shinfo(skb)->frags[0];; frag++) {
+               if (dma_mapping_error(tx_ring->dev, dma))
+                       goto dma_error;
+
+               /* record length, and DMA address */
+               dma_unmap_len_set(tx_buffer, len, size);
+               dma_unmap_addr_set(tx_buffer, dma, dma);
 
-       tx_desc->read.buffer_addr = cpu_to_le64(dma);
+               tx_desc->read.buffer_addr = cpu_to_le64(dma);
 
-       for (;;) {
                while (unlikely(size > IXGBE_MAX_DATA_PER_TXD)) {
                        tx_desc->read.cmd_type_len =
                                cpu_to_le32(cmd_type ^ IXGBE_MAX_DATA_PER_TXD);
                                tx_desc = IXGBE_TX_DESC(tx_ring, 0);
                                i = 0;
                        }
+                       tx_desc->read.olinfo_status = 0;
 
                        dma += IXGBE_MAX_DATA_PER_TXD;
                        size -= IXGBE_MAX_DATA_PER_TXD;
 
                        tx_desc->read.buffer_addr = cpu_to_le64(dma);
-                       tx_desc->read.olinfo_status = 0;
                }
 
                if (likely(!data_len))
                        tx_desc = IXGBE_TX_DESC(tx_ring, 0);
                        i = 0;
                }
+               tx_desc->read.olinfo_status = 0;
 
 #ifdef IXGBE_FCOE
                size = min_t(unsigned int, data_len, skb_frag_size(frag));
 
                dma = skb_frag_dma_map(tx_ring->dev, frag, 0, size,
                                       DMA_TO_DEVICE);
-               if (dma_mapping_error(tx_ring->dev, dma))
-                       goto dma_error;
 
                tx_buffer = &tx_ring->tx_buffer_info[i];
-               dma_unmap_len_set(tx_buffer, len, size);
-               dma_unmap_addr_set(tx_buffer, dma, dma);
-
-               tx_desc->read.buffer_addr = cpu_to_le64(dma);
-               tx_desc->read.olinfo_status = 0;
-
-               frag++;
        }
 
        /* write last descriptor with RS and EOP bits */