struct damon_intervals_goal intervals_goal;
        unsigned long min_nr_regions;
        unsigned long max_nr_regions;
+/* private: internal use only */
+       /*
+        * @aggr_interval to @sample_interval ratio.
+        * Core-external components call damon_set_attrs() with &damon_attrs
+        * that this field is unset.  In the case, damon_set_attrs() sets this
+        * field of resulting &damon_attrs.  Core-internal components such as
+        * kdamond_tune_intervals() calls damon_set_attrs() with &damon_attrs
+        * that this field is set.  In the case, damon_set_attrs() just keep
+        * it.
+        */
+       unsigned long aggr_samples;
 };
 
 /**
         * update
         */
        unsigned long next_ops_update_sis;
+       /*
+        * number of sample intervals that should be passed before next
+        * intervals tuning
+        */
+       unsigned long next_intervals_tune_sis;
        /* for waiting until the execution of the kdamond_fn is started */
        struct completion kdamond_started;
        /* for scheme quotas prioritization */
 
        if (attrs->sample_interval > attrs->aggr_interval)
                return -EINVAL;
 
+       /* calls from core-external doesn't set this. */
+       if (!attrs->aggr_samples)
+               attrs->aggr_samples = attrs->aggr_interval / sample_interval;
+
        ctx->next_aggregation_sis = ctx->passed_sample_intervals +
                attrs->aggr_interval / sample_interval;
        ctx->next_ops_update_sis = ctx->passed_sample_intervals +
        }
 }
 
+static unsigned long damon_get_intervals_score(struct damon_ctx *c)
+{
+       struct damon_target *t;
+       struct damon_region *r;
+       unsigned long sz_region, max_access_events = 0, access_events = 0;
+       unsigned long target_access_events;
+       unsigned long goal_bp = c->attrs.intervals_goal.access_bp;
+
+       damon_for_each_target(t, c) {
+               damon_for_each_region(r, t) {
+                       sz_region = damon_sz_region(r);
+                       max_access_events += sz_region * c->attrs.aggr_samples;
+                       access_events += sz_region * r->nr_accesses;
+               }
+       }
+       target_access_events = max_access_events * goal_bp / 10000;
+       return access_events * 10000 / target_access_events;
+}
+
+static unsigned long damon_feed_loop_next_input(unsigned long last_input,
+               unsigned long score);
+
+static unsigned long damon_get_intervals_adaptation_bp(struct damon_ctx *c)
+{
+       unsigned long score_bp, adaptation_bp;
+
+       score_bp = damon_get_intervals_score(c);
+       adaptation_bp = damon_feed_loop_next_input(100000000, score_bp) /
+               10000;
+       /*
+        * adaptaion_bp ranges from 1 to 20,000.  Avoid too rapid reduction of
+        * the intervals by rescaling [1,10,000] to [5000, 10,000].
+        */
+       if (adaptation_bp <= 10000)
+               adaptation_bp = 5000 + adaptation_bp / 2;
+       return adaptation_bp;
+}
+
+static void kdamond_tune_intervals(struct damon_ctx *c)
+{
+       unsigned long adaptation_bp;
+       struct damon_attrs new_attrs;
+       struct damon_intervals_goal *goal;
+
+       adaptation_bp = damon_get_intervals_adaptation_bp(c);
+       if (adaptation_bp == 10000)
+               return;
+
+       new_attrs = c->attrs;
+       goal = &c->attrs.intervals_goal;
+       new_attrs.sample_interval = min(goal->max_sample_us,
+                       c->attrs.sample_interval * adaptation_bp / 10000);
+       new_attrs.sample_interval = max(goal->min_sample_us,
+                       new_attrs.sample_interval);
+       new_attrs.aggr_interval = new_attrs.sample_interval *
+               c->attrs.aggr_samples;
+       damon_set_attrs(c, &new_attrs);
+}
+
 static void damon_split_region_at(struct damon_target *t,
                                  struct damon_region *r, unsigned long sz_r);
 
        ctx->next_aggregation_sis = ctx->attrs.aggr_interval / sample_interval;
        ctx->next_ops_update_sis = ctx->attrs.ops_update_interval /
                sample_interval;
+       ctx->next_intervals_tune_sis = ctx->next_aggregation_sis *
+               ctx->attrs.intervals_goal.aggrs;
 
        damon_for_each_scheme(scheme, ctx) {
                apply_interval = scheme->apply_interval_us ?
                sample_interval = ctx->attrs.sample_interval ?
                        ctx->attrs.sample_interval : 1;
                if (ctx->passed_sample_intervals >= next_aggregation_sis) {
+                       if (ctx->attrs.intervals_goal.aggrs &&
+                                       ctx->passed_sample_intervals >=
+                                       ctx->next_intervals_tune_sis) {
+                               ctx->next_intervals_tune_sis +=
+                                       ctx->attrs.aggr_samples *
+                                       ctx->attrs.intervals_goal.aggrs;
+                               kdamond_tune_intervals(ctx);
+                               sample_interval = ctx->attrs.sample_interval ?
+                                       ctx->attrs.sample_interval : 1;
+
+                       }
                        ctx->next_aggregation_sis = next_aggregation_sis +
                                ctx->attrs.aggr_interval / sample_interval;