[CPUFREQ] ondemand/conservative: sanitize sampling_rate restrictions

Limit sampling rate to transition_latency * 100 or kernel limits.
If sampling_rate is tried to be set too low, set the lowest allowed value.

Signed-off-by: Thomas Renninger <trenn@suse.de>
Signed-off-by: Dave Jones <davej@redhat.com>
diff --git a/drivers/cpufreq/cpufreq_conservative.c b/drivers/cpufreq/cpufreq_conservative.c
index 0912d7c..8d541c6 100644
--- a/drivers/cpufreq/cpufreq_conservative.c
+++ b/drivers/cpufreq/cpufreq_conservative.c
@@ -54,8 +54,20 @@
 			(MIN_SAMPLING_RATE_RATIO * jiffies_to_usecs(10))
 #define MIN_SAMPLING_RATE			\
 			(def_sampling_rate / MIN_SAMPLING_RATE_RATIO)
+/* Above MIN_SAMPLING_RATE will vanish with its sysfs file soon
+ * Define the minimal settable sampling rate to the greater of:
+ *   - "HW transition latency" * 100 (same as default sampling / 10)
+ *   - MIN_STAT_SAMPLING_RATE
+ * To avoid that userspace shoots itself.
+*/
+static unsigned int minimum_sampling_rate(void)
+{
+	return max(def_sampling_rate / 10, MIN_STAT_SAMPLING_RATE);
+}
+
+/* This will also vanish soon with removing sampling_rate_max */
 #define MAX_SAMPLING_RATE			(500 * def_sampling_rate)
-#define DEF_SAMPLING_RATE_LATENCY_MULTIPLIER	(1000)
+#define LATENCY_MULTIPLIER			(1000)
 #define DEF_SAMPLING_DOWN_FACTOR		(1)
 #define MAX_SAMPLING_DOWN_FACTOR		(10)
 #define TRANSITION_LATENCY_LIMIT		(10 * 1000 * 1000)
@@ -208,13 +220,11 @@
 	ret = sscanf(buf, "%u", &input);
 
 	mutex_lock(&dbs_mutex);
-	if (ret != 1 || input > MAX_SAMPLING_RATE ||
-	    input < MIN_SAMPLING_RATE) {
+	if (ret != 1) {
 		mutex_unlock(&dbs_mutex);
 		return -EINVAL;
 	}
-
-	dbs_tuners_ins.sampling_rate = input;
+	dbs_tuners_ins.sampling_rate = max(input, minimum_sampling_rate());
 	mutex_unlock(&dbs_mutex);
 
 	return count;
@@ -540,11 +550,9 @@
 			if (latency == 0)
 				latency = 1;
 
-			def_sampling_rate = 10 * latency *
-					DEF_SAMPLING_RATE_LATENCY_MULTIPLIER;
-
-			if (def_sampling_rate < MIN_STAT_SAMPLING_RATE)
-				def_sampling_rate = MIN_STAT_SAMPLING_RATE;
+			def_sampling_rate =
+				max(10 * latency * LATENCY_MULTIPLIER,
+				    MIN_STAT_SAMPLING_RATE);
 
 			dbs_tuners_ins.sampling_rate = def_sampling_rate;