static void performance_shuffle(tree *newtree, unsigned long length, ranctx *prng_state)
 {
-       unsigned long i, x;
+       unsigned long i, x, index;
        struct partition_group size_group[length];
        unsigned long num_groups = 0;
        unsigned long randnum;
 
        partition_struct(newtree, length, (struct partition_group *)&size_group, &num_groups);
+
+       /* FIXME: this group shuffle is currently a no-op. */
        for (i = num_groups - 1; i > 0; i--) {
                struct partition_group tmp;
                randnum = ranval(prng_state) % (i + 1);
        }
 
        for (x = 0; x < num_groups; x++) {
-               for (i = size_group[x].start + size_group[x].length - 1; i > size_group[x].start; i--) {
+               for (index = size_group[x].length - 1; index > 0; index--) {
                        tree tmp;
+
+                       i = size_group[x].start + index;
                        if (DECL_BIT_FIELD_TYPE(newtree[i]))
                                continue;
-                       randnum = ranval(prng_state) % (i + 1);
+                       randnum = ranval(prng_state) % (index + 1);
+                       randnum += size_group[x].start;
                        // we could handle this case differently if desired
                        if (DECL_BIT_FIELD_TYPE(newtree[randnum]))
                                continue;