* cr level needs an update.
  */
 static void ext4_mb_choose_next_group_p2_aligned(struct ext4_allocation_context *ac,
-                       enum criteria *new_cr, ext4_group_t *group, ext4_group_t ngroups)
+                       enum criteria *new_cr, ext4_group_t *group)
 {
        struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb);
        struct ext4_group_info *iter;
  * order. Updates *new_cr if cr level needs an update.
  */
 static void ext4_mb_choose_next_group_goal_fast(struct ext4_allocation_context *ac,
-               enum criteria *new_cr, ext4_group_t *group, ext4_group_t ngroups)
+               enum criteria *new_cr, ext4_group_t *group)
 {
        struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb);
        struct ext4_group_info *grp = NULL;
  * much and fall to CR_GOAL_LEN_SLOW in that case.
  */
 static void ext4_mb_choose_next_group_best_avail(struct ext4_allocation_context *ac,
-               enum criteria *new_cr, ext4_group_t *group, ext4_group_t ngroups)
+               enum criteria *new_cr, ext4_group_t *group)
 {
        struct ext4_sb_info *sbi = EXT4_SB(ac->ac_sb);
        struct ext4_group_info *grp = NULL;
        }
 
        if (*new_cr == CR_POWER2_ALIGNED) {
-               ext4_mb_choose_next_group_p2_aligned(ac, new_cr, group, ngroups);
+               ext4_mb_choose_next_group_p2_aligned(ac, new_cr, group);
        } else if (*new_cr == CR_GOAL_LEN_FAST) {
-               ext4_mb_choose_next_group_goal_fast(ac, new_cr, group, ngroups);
+               ext4_mb_choose_next_group_goal_fast(ac, new_cr, group);
        } else if (*new_cr == CR_BEST_AVAIL_LEN) {
-               ext4_mb_choose_next_group_best_avail(ac, new_cr, group, ngroups);
+               ext4_mb_choose_next_group_best_avail(ac, new_cr, group);
        } else {
                /*
                 * TODO: For CR=2, we can arrange groups in an rb tree sorted by