static void shrink_node(pg_data_t *pgdat, struct scan_control *sc)
 {
        struct reclaim_state *reclaim_state = current->reclaim_state;
-       unsigned long nr_reclaimed, nr_scanned;
+       unsigned long nr_reclaimed, nr_scanned, nr_node_reclaimed;
        struct lruvec *target_lruvec;
        bool reclaimable = false;
 
                reclaim_state->reclaimed_slab = 0;
        }
 
+       nr_node_reclaimed = sc->nr_reclaimed - nr_reclaimed;
+
        /* Record the subtree's reclaim efficiency */
        if (!sc->proactive)
                vmpressure(sc->gfp_mask, sc->target_mem_cgroup, true,
-                          sc->nr_scanned - nr_scanned,
-                          sc->nr_reclaimed - nr_reclaimed);
+                          sc->nr_scanned - nr_scanned, nr_node_reclaimed);
 
-       if (sc->nr_reclaimed - nr_reclaimed)
+       if (nr_node_reclaimed)
                reclaimable = true;
 
        if (current_is_kswapd()) {
            test_bit(LRUVEC_CONGESTED, &target_lruvec->flags))
                reclaim_throttle(pgdat, VMSCAN_THROTTLE_CONGESTED);
 
-       if (should_continue_reclaim(pgdat, sc->nr_reclaimed - nr_reclaimed,
-                                   sc))
+       if (should_continue_reclaim(pgdat, nr_node_reclaimed, sc))
                goto again;
 
        /*