#define WMULT_SHIFT    32
 
+/*
+ * Shift right and round:
+ */
+#define RSR(x, y) (((x) + (1UL << ((y) - 1))) >> (y))
+
 static unsigned long
 calc_delta_mine(unsigned long delta_exec, unsigned long weight,
                struct load_weight *lw)
        u64 tmp;
 
        if (unlikely(!lw->inv_weight))
-               lw->inv_weight = WMULT_CONST / lw->weight;
+               lw->inv_weight = (WMULT_CONST - lw->weight/2) / lw->weight + 1;
 
        tmp = (u64)delta_exec * weight;
        /*
         * Check whether we'd overflow the 64-bit multiplication:
         */
-       if (unlikely(tmp > WMULT_CONST)) {
-               tmp = ((tmp >> WMULT_SHIFT/2) * lw->inv_weight)
-                               >> (WMULT_SHIFT/2);
-       } else {
-               tmp = (tmp * lw->inv_weight) >> WMULT_SHIFT;
-       }
+       if (unlikely(tmp > WMULT_CONST))
+               tmp = RSR(RSR(tmp, WMULT_SHIFT/2) * lw->inv_weight,
+                       WMULT_SHIFT/2);
+       else
+               tmp = RSR(tmp * lw->inv_weight, WMULT_SHIFT);
 
        return (unsigned long)min(tmp, (u64)(unsigned long)LONG_MAX);
 }