priv->rx_buf[rx_queue_idx][i] = skb;
mapping = (dma_addr_t *)skb->cb;
*mapping = dma_map_single(&priv->pdev->dev,
- skb_tail_pointer_rsl(skb),
+ skb_tail_pointer(skb),
priv->rxbuffersize, DMA_FROM_DEVICE);
if (dma_mapping_error(&priv->pdev->dev, *mapping)) {
dev_kfree_skb_any(skb);
priv->rx_buf[rx_queue_idx][priv->rx_idx[rx_queue_idx]] =
skb;
*((dma_addr_t *)skb->cb) = dma_map_single(&priv->pdev->dev,
- skb_tail_pointer_rsl(skb),
+ skb_tail_pointer(skb),
priv->rxbuffersize, DMA_FROM_DEVICE);
if (dma_mapping_error(&priv->pdev->dev, *((dma_addr_t *)skb->cb))) {
dev_kfree_skb_any(skb);
#define IW_CUSTOM_MAX 256 /* In bytes */
#endif
-#define skb_tail_pointer_rsl(skb) skb_tail_pointer(skb)
-
#define queue_delayed_work_rsl(x, y, z) queue_delayed_work(x, y, z)
#define INIT_DELAYED_WORK_RSL(x, y, z) INIT_DELAYED_WORK(x, y)