}
                /* Allocate iommu entries for that segment */
                paddr = (unsigned long) SG_ENT_PHYS_ADDRESS(s);
-               npages = iommu_nr_pages(paddr, slen);
+               npages = iommu_num_pages(paddr, slen, IO_PAGE_SIZE);
                entry = iommu_range_alloc(dev, iommu, npages, &handle);
 
                /* Handle failure */
                        iopte_t *base;
 
                        vaddr = s->dma_address & IO_PAGE_MASK;
-                       npages = iommu_nr_pages(s->dma_address, s->dma_length);
+                       npages = iommu_num_pages(s->dma_address, s->dma_length,
+                                                IO_PAGE_SIZE);
                        iommu_range_free(iommu, vaddr, npages);
 
                        entry = (vaddr - iommu->page_table_map_base)
 
                if (!len)
                        break;
-               npages = iommu_nr_pages(dma_handle, len);
+               npages = iommu_num_pages(dma_handle, len, IO_PAGE_SIZE);
                iommu_range_free(iommu, dma_handle, npages);
 
                entry = ((dma_handle - iommu->page_table_map_base)
 
 
 #define SG_ENT_PHYS_ADDRESS(SG)        (__pa(sg_virt((SG))))
 
-static inline unsigned long iommu_nr_pages(unsigned long vaddr,
-                                           unsigned long slen)
-{
-       unsigned long npages;
-
-       npages = IO_PAGE_ALIGN(vaddr + slen) - (vaddr & IO_PAGE_MASK);
-       npages >>= IO_PAGE_SHIFT;
-
-       return npages;
-}
-
 static inline int is_span_boundary(unsigned long entry,
                                   unsigned long shift,
                                   unsigned long boundary_size,
                                   struct scatterlist *sg)
 {
        unsigned long paddr = SG_ENT_PHYS_ADDRESS(outs);
-       int nr = iommu_nr_pages(paddr, outs->dma_length + sg->length);
+       int nr = iommu_num_pages(paddr, outs->dma_length + sg->length,
+                                IO_PAGE_SIZE);
 
        return iommu_is_span_boundary(entry, nr, shift, boundary_size);
 }
 
                }
                /* Allocate iommu entries for that segment */
                paddr = (unsigned long) SG_ENT_PHYS_ADDRESS(s);
-               npages = iommu_nr_pages(paddr, slen);
+               npages = iommu_num_pages(paddr, slen, IO_PAGE_SIZE);
                entry = iommu_range_alloc(dev, iommu, npages, &handle);
 
                /* Handle failure */
                        unsigned long vaddr, npages;
 
                        vaddr = s->dma_address & IO_PAGE_MASK;
-                       npages = iommu_nr_pages(s->dma_address, s->dma_length);
+                       npages = iommu_num_pages(s->dma_address, s->dma_length,
+                                                IO_PAGE_SIZE);
                        iommu_range_free(iommu, vaddr, npages);
                        /* XXX demap? XXX */
                        s->dma_address = DMA_ERROR_CODE;
 
                if (!len)
                        break;
-               npages = iommu_nr_pages(dma_handle, len);
+               npages = iommu_num_pages(dma_handle, len, IO_PAGE_SIZE);
                iommu_range_free(iommu, dma_handle, npages);
 
                entry = ((dma_handle - iommu->page_table_map_base) >> IO_PAGE_SHIFT);