int __weak main(void)
{
clock_t start, end;
- double xa_t, mt_t;
- unsigned long xa_m, mt_m;
+ double xa_t = 0, mt_t;
+ unsigned long xa_m = 0, mt_m;
void *entry = &main;
unsigned long i, max = 200000;
struct rusage sru, eru;
-
/* xarray first */
radix_tree_init();
DEFINE_XARRAY(xa);
}
getrusage(RUSAGE_SELF, &eru);
- start = sru.ru_utime.tv_usec;
- end = eru.ru_utime.tv_usec;
+ start = sru.ru_utime.tv_usec + sru.ru_utime.tv_sec * 1000000;
+ end = eru.ru_utime.tv_usec + eru.ru_utime.tv_sec * 1000000;
for (i = 0; i <= max; i++) {
BUG_ON(entry != xa_load(&xa, i));
}
rcu_barrier();
- xa_t = ((double) (end - start)) / CLOCKS_PER_SEC;
+ xa_t = ((double) (end - start)) / 1000000;
xa_m = xa_get_alloc_size();
printk("xa %lu inserts: %fs using %luK in %d allocations\n",
max, xa_t, xa_m/1024, nr_allocated);
/* Maple Tree tests*/
maple_tree_init();
DEFINE_MTREE(mt);
-
getrusage(RUSAGE_SELF, &sru);
for (i = 0; i <= max; i++) {
mtree_insert(&mt, i, entry, GFP_KERNEL);
}
+
getrusage(RUSAGE_SELF, &eru);
- start = sru.ru_utime.tv_usec;
- end = eru.ru_utime.tv_usec;
+ start = sru.ru_utime.tv_usec + sru.ru_utime.tv_sec * 1000000;
+ end = eru.ru_utime.tv_usec + eru.ru_utime.tv_sec * 1000000;
for (i = 0; i <= max; i++) {
BUG_ON(entry != mtree_load(&mt, i));
}
rcu_barrier();
- mt_t = ((double) (end - start)) / CLOCKS_PER_SEC;
+ mt_t = ((double) (end - start))/1000000;
mt_m = mt_get_alloc_size();
printk("mt %lu inserts: %fs using %luK in %d allocations\n",
max, mt_t, mt_m/1024, nr_allocated);
-// mt_dump(&mt);
mtree_destroy(&mt);
- printk(" Delta : %f (%f%% of xa time) %ldK\n",
- xa_t - mt_t, mt_t/xa_t * 100,
- (signed long)(xa_m - mt_m)/1024);
rcu_barrier();
+ printk(" Delta : %f seconds (%f%% of xa time) %ldK\n",
+ mt_t - xa_t, mt_t/xa_t * 100,
+ (signed long)(mt_m - xa_m)/1024);
+ rcu_barrier();
+ BUG_ON(nr_allocated);
return 0;
}