@@ -27,12 +27,14 @@
static void print_stat(struct seq_file *m, struct blk_rq_stat *stat)
{
- if (stat->nr_samples) {
- seq_printf(m, "samples=%d, mean=%lld, min=%llu, max=%llu",
- stat->nr_samples, stat->mean, stat->min, stat->max);
- } else {
+ if (!stat->nr_samples) {
seq_puts(m, "samples=0");
+ return;
}
+
+ seq_printf(m, "samples=%d, mean=%llu, min=%llu, max=%llu, lmd=%llu",
+ stat->nr_samples, stat->mean, stat->min, stat->max,
+ stat->lmd);
}
static int queue_poll_stat_show(void *data, struct seq_file *m)
@@ -17,14 +17,21 @@ struct blk_queue_stats {
bool enable_accounting;
};
-void blk_rq_stat_init_staging(struct blk_rq_stat_staging *stat)
+void blk_rq_stat_reset(struct blk_rq_stat_staging *stat)
{
stat->min = -1ULL;
stat->max = 0;
stat->batch = 0;
+ stat->lmd_batch = 0;
stat->nr_samples = 0;
}
+void blk_rq_stat_init_staging(struct blk_rq_stat_staging *stat)
+{
+ blk_rq_stat_reset(stat);
+ stat->mean_last = 0;
+}
+
void blk_rq_stat_init(struct blk_rq_stat *stat)
{
stat->min = -1ULL;
@@ -42,8 +49,12 @@ void blk_rq_stat_collect(struct blk_rq_stat *dst,
dst->mean = div_u64(src->batch + dst->mean * dst->nr_samples,
dst->nr_samples + src->nr_samples);
+ dst->lmd = div_u64(src->lmd_batch + dst->lmd * dst->nr_samples,
+ dst->nr_samples + src->nr_samples);
dst->nr_samples += src->nr_samples;
+ /* pass mean back for lmd computation */
+ src->mean_last = dst->mean;
}
void blk_rq_stat_merge(struct blk_rq_stat *dst, struct blk_rq_stat *src)
@@ -57,6 +68,9 @@ void blk_rq_stat_merge(struct blk_rq_stat *dst, struct blk_rq_stat *src)
dst->mean = div_u64(src->mean * src->nr_samples +
dst->mean * dst->nr_samples,
dst->nr_samples + src->nr_samples);
+ dst->lmd = div_u64(src->lmd * src->nr_samples +
+ dst->lmd * dst->nr_samples,
+ dst->nr_samples + src->nr_samples);
dst->nr_samples += src->nr_samples;
}
@@ -67,6 +81,9 @@ void blk_rq_stat_add(struct blk_rq_stat_staging *stat, u64 value)
stat->max = max(stat->max, value);
stat->batch += value;
stat->nr_samples++;
+
+ if (value < stat->mean_last)
+ stat->lmd_batch += stat->mean_last - value;
}
void blk_stat_add(struct request *rq, u64 now)
@@ -113,7 +130,7 @@ static void blk_stat_timer_fn(struct timer_list *t)
for (bucket = 0; bucket < cb->buckets; bucket++) {
blk_rq_stat_collect(&cb->stat[bucket],
&cpu_stat[bucket]);
- blk_rq_stat_init_staging(&cpu_stat[bucket]);
+ blk_rq_stat_reset(&cpu_stat[bucket]);
}
}
@@ -170,5 +170,11 @@ void blk_rq_stat_collect(struct blk_rq_stat *dst,
void blk_rq_stat_merge(struct blk_rq_stat *dst, struct blk_rq_stat *src);
void blk_rq_stat_init(struct blk_rq_stat *);
void blk_rq_stat_init_staging(struct blk_rq_stat_staging *stat);
+/*
+ * Prepare stat to the next statistics round. Similar to
+ * blk_rq_stat_init_staging, but retains some information
+ * about the previous round (see last_mean).
+ */
+void blk_rq_stat_reset(struct blk_rq_stat_staging *stat);
#endif
@@ -445,13 +445,16 @@ struct blk_rq_stat {
u64 mean;
u64 min;
u64 max;
+ u64 lmd; /* left mean deviation */
u32 nr_samples;
};
struct blk_rq_stat_staging {
+ u64 mean_last;
u64 min;
u64 max;
u64 batch;
+ u64 lmd_batch;
u32 nr_samples;
};