adaptive-monitoring-framework
Changes
tigris/pom.xml 2(+1 -1)
Details
tigris/pom.xml 2(+1 -1)
diff --git a/tigris/pom.xml b/tigris/pom.xml
index a93dfa8..48640b6 100644
--- a/tigris/pom.xml
+++ b/tigris/pom.xml
@@ -6,7 +6,7 @@
<groupId>br.ufrgs.inf.prosoft</groupId>
<artifactId>tigris</artifactId>
- <version>0.14.0-SNAPSHOT</version>
+ <version>0.15.0-SNAPSHOT</version>
<properties>
<aspectj.version>1.8.9</aspectj.version>
diff --git a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/TigrisCoordinator.java b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/TigrisCoordinator.java
index 21dffe9..4e070d3 100644
--- a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/TigrisCoordinator.java
+++ b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/TigrisCoordinator.java
@@ -182,9 +182,9 @@ public class TigrisCoordinator implements Runnable {
Granularity granularity = new Granularity(tigrisCriteria.granularity(), signature);
//TODO: this is completely different now, need to review all the usage of sampling here
- if (tigrisConfiguration.adaptiveSamplingRate() && sampling.isPerformanceBaselineEnabled()) {
- sampling.addPerformanceBaselineItem(granularity, endTime - startTime);
- }
+// if (tigrisConfiguration.adaptiveSamplingRate() && sampling.isPerformanceBaselineEnabled()) {
+// sampling.addPerformanceBaselineItem(granularity, endTime - startTime);
+// }
//trace only allowed by lightweight metrics
// boolean shouldSample = sampling.simpleSamplingDecision();
diff --git a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/MonitoringCycle.java b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/MonitoringCycle.java
index f0c6769..d7afe7f 100644
--- a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/MonitoringCycle.java
+++ b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/MonitoringCycle.java
@@ -7,8 +7,9 @@ public class MonitoringCycle {
private double stdProcTimesPopulation;
private long sampleSize;
private long populationSize;
+ private long monitoringCycleTime;
- public MonitoringCycle(FrequencyDataSet sample, FrequencyDataSet population) {
+ public MonitoringCycle(FrequencyDataSet sample, FrequencyDataSet population, long monitoringCycleTime) {
this.averageProcTimesSample = sample.getMeanExecutionTime();
this.stdProcTimesSample = sample.getStdExecutionTime();
this.sampleSize = sample.getTotalItems();
@@ -16,6 +17,7 @@ public class MonitoringCycle {
this.averageProcTimesPopulation = population.getMeanExecutionTime();
this.stdProcTimesPopulation = population.getStdExecutionTime();
this.populationSize = population.getTotalItems();
+ this.monitoringCycleTime = monitoringCycleTime;
}
public MonitoringCycle() {
@@ -25,6 +27,11 @@ public class MonitoringCycle {
this.averageProcTimesPopulation = 0;
this.stdProcTimesPopulation = 0;
this.populationSize = 0;
+ this.monitoringCycleTime = 0;
+ }
+
+ public long getMonitoringCycleTime() {
+ return monitoringCycleTime;
}
@Override
@@ -36,6 +43,7 @@ public class MonitoringCycle {
", stdProcTimesPopulation=" + stdProcTimesPopulation +
", sampleSize=" + sampleSize +
", populationSize=" + populationSize +
+ ", monitoringCycleTime=" + monitoringCycleTime +
'}';
}
diff --git a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/PerformanceBaselineDataSet.java b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/PerformanceBaselineDataSet.java
index eb40cde..cd29e4c 100644
--- a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/PerformanceBaselineDataSet.java
+++ b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/PerformanceBaselineDataSet.java
@@ -1,169 +1,271 @@
package br.ufrgs.inf.prosoft.tigris.sampling;
+import com.google.common.util.concurrent.AtomicDouble;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
-import org.apache.commons.math3.stat.descriptive.moment.Mean;
-import org.apache.commons.math3.stat.descriptive.moment.Variance;
-import org.apache.commons.math3.util.FastMath;
+import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
+import org.apache.commons.math3.stat.inference.TestUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicLong;
public class PerformanceBaselineDataSet {
Logger logger = LoggerFactory.getLogger(PerformanceBaselineDataSet.class);
- double weights[] = {0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.24, 0.24, 0.24, 0.24, 0.24, 0.24, 0.24, 0.24, 0.24, 0.24, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.26, 0.26, 0.26, 0.26, 0.26, 0.26, 0.26, 0.26, 0.26, 0.26, 0.27, 0.27, 0.27, 0.27, 0.27, 0.27, 0.27, 0.27, 0.27, 0.27, 0.28, 0.28, 0.28, 0.28, 0.28, 0.28, 0.28, 0.28, 0.28, 0.29, 0.29, 0.29, 0.29, 0.29, 0.29, 0.29, 0.29, 0.29, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, 0.32, 0.32, 0.32, 0.32, 0.32, 0.32, 0.32, 0.32, 0.33, 0.33, 0.33, 0.33, 0.33, 0.33, 0.33, 0.34, 0.34, 0.34, 0.34, 0.34, 0.34, 0.34, 0.34, 0.35, 0.35, 0.35, 0.35, 0.35, 0.35, 0.35, 0.36, 0.36, 0.36, 0.36, 0.36, 0.36, 0.36, 0.37, 0.37, 0.37, 0.37, 0.37, 0.37, 0.37, 0.38, 0.38, 0.38, 0.38, 0.38, 0.38, 0.38, 0.39, 0.39, 0.39, 0.39, 0.39, 0.39, 0.39, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.41, 0.41, 0.41, 0.41, 0.41, 0.41, 0.42, 0.42, 0.42, 0.42, 0.42, 0.42, 0.42, 0.43, 0.43, 0.43, 0.43, 0.43, 0.43, 0.44, 0.44, 0.44, 0.44, 0.44, 0.45, 0.45, 0.45, 0.45, 0.45, 0.45, 0.46, 0.46, 0.46, 0.46, 0.46, 0.46, 0.47, 0.47, 0.47, 0.47, 0.47, 0.48, 0.48, 0.48, 0.48, 0.48, 0.48, 0.49, 0.49, 0.49, 0.49, 0.49, 0.5, 0.5, 0.5, 0.5, 0.5, 0.51, 0.51, 0.51, 0.51, 0.51, 0.52, 0.52, 0.52, 0.52, 0.52, 0.53, 0.53, 0.53, 0.53, 0.53, 0.54, 0.54, 0.54, 0.54, 0.54, 0.55, 0.55, 0.55, 0.55, 0.56, 0.56, 0.56, 0.56, 0.56, 0.57, 0.57, 0.57, 0.57, 0.57, 0.58, 0.58, 0.58, 0.58, 0.59, 0.59, 0.59, 0.59, 0.6, 0.6, 0.6, 0.6, 0.6, 0.61, 0.61, 0.61, 0.61, 0.62, 0.62, 0.62, 0.62, 0.63, 0.63, 0.63, 0.63, 0.64, 0.64, 0.64, 0.64, 0.65, 0.65, 0.65, 0.65, 0.66, 0.66, 0.66, 0.66, 0.67, 0.67, 0.67, 0.67, 0.68, 0.68, 0.68, 0.68, 0.69, 0.69, 0.69, 0.69, 0.7, 0.7, 0.7, 0.71, 0.71, 0.71, 0.71, 0.72, 0.72, 0.72, 0.73, 0.73, 0.73, 0.73, 0.74, 0.74, 0.74, 0.74, 0.75, 0.75, 0.75, 0.76, 0.76, 0.76, 0.77, 0.77, 0.77, 0.77, 0.78, 0.78, 0.78, 0.79, 0.79, 0.79, 0.8, 0.8, 0.8, 0.8, 0.81, 0.81, 0.81, 0.82, 0.82, 0.82, 0.83, 0.83, 0.83, 0.84, 0.84, 0.84, 0.85, 0.85, 0.85, 0.86, 0.86, 0.86, 0.87, 0.87, 0.87, 0.88, 0.88, 0.88, 0.89, 0.89, 0.89, 0.9, 0.9, 0.9, 0.91, 0.91, 0.91, 0.92, 0.92, 0.92, 0.93, 0.93, 0.94, 0.94, 0.94, 0.95, 0.95, 0.95, 0.96, 0.96, 0.96, 0.97, 0.97, 0.98, 0.98, 0.98, 0.99, 0.99};
+ DescriptiveStatistics baselinesPerSecondToNormal = new DescriptiveStatistics(300);
+ Map<Long, Map<String, SummaryStatistics>> baselinesPerSecondToResponseTimes = new ConcurrentHashMap<>();
+ private static Map<String, SummaryStatistics> currentBaseline = new ConcurrentHashMap<>();
- private Map<Granularity, DescriptiveStatistics> granularityBaseline = new ConcurrentHashMap<>();
- private DescriptiveStatistics overallBaseline = new DescriptiveStatistics(1200);
- private DescriptiveStatistics historicOfBaselines = new DescriptiveStatistics(1200);
- private int n = 0;
- public void addItem(Granularity item, long executionTime) {
- DescriptiveStatistics statistics = granularityBaseline.getOrDefault(item, new DescriptiveStatistics(1200));
- statistics.addValue(executionTime);
- granularityBaseline.put(item, statistics);
- n++;
+ DescriptiveStatistics monitoringPerSecondToNormal = new DescriptiveStatistics(300);
+ Map<Long, Map<String, SummaryStatistics>> monitoringPerSecondToResponseTimes = new ConcurrentHashMap<>();
+ Map<String, SummaryStatistics> currentMonitoring = new ConcurrentHashMap<>();
- overallBaseline.addValue(executionTime);
+ public void addPerformanceBaselineItem(Granularity item, long executionTime) {
+ SummaryStatistics currentBaselineForItem = currentBaseline.getOrDefault(item.name, new SummaryStatistics());
+ currentBaselineForItem.addValue(executionTime);
+ currentBaseline.put(item.name, currentBaselineForItem);
}
- //não comparar baseline com sample, não faz sentido - ou é sample com sample, ou é baseline com sample
- //comparar baseline com sample, diz o overhead do sampling
- //comparar baseline com baseline histórico diz a carga da aplicação
- public boolean isAppStruggling() {
- historicOfBaselines.addValue(overallBaseline.getMean());
+ public void addMonitoringItem(Granularity item, long executionTime) {
+ SummaryStatistics currentMonitoringForItem = currentMonitoring.getOrDefault(item.name, new SummaryStatistics());
+ currentMonitoringForItem.addValue(executionTime);
+ currentMonitoring.put(item.name, currentMonitoringForItem);
+ }
+
+ public void trackBaselinePerSecond(long reqsPerSecond) {
+ if (reqsPerSecond == 0)
+ return;
+
+ baselinesPerSecondToNormal.addValue(reqsPerSecond);
+ baselinesPerSecondToResponseTimes.put(reqsPerSecond, new ConcurrentHashMap<>(currentBaseline));
+ currentBaseline.clear();
+ }
+
+ public void trackMonitoringPerSecond(long operationsPerSecond) {
+ if (operationsPerSecond == 0)
+ return;
+
+ monitoringPerSecondToNormal.addValue(operationsPerSecond);
+ monitoringPerSecondToResponseTimes.put(operationsPerSecond, new ConcurrentHashMap<>(currentMonitoring));
+ currentMonitoring.clear();
+ }
+
+ public Map<String, SummaryStatistics> getBaselineNormal() {
+ double[] sortedValues = baselinesPerSecondToNormal.getSortedValues();
- return historicOfBaselines.getPercentile(50) < overallBaseline.getMean();
+ return baselinesPerSecondToResponseTimes
+ .get((long)sortedValues[sortedValues.length / 2]);
}
+ public Map<String, SummaryStatistics> getMonitoringNormal() {
+ double[] sortedValues = monitoringPerSecondToNormal.getSortedValues();
- public Apdex getApdexResultsPerEvent(Map<Granularity, DescriptiveStatistics> sampledDataSet) {
- long satisfied = 0, tolerated = 0, n = 0;
- for (Map.Entry<Granularity, DescriptiveStatistics> baselineEntry : granularityBaseline.entrySet()) {
- DescriptiveStatistics stats = baselineEntry.getValue();
- double mean = stats.getMean();
- double std = stats.getStandardDeviation();
-// if (stats.getN() == stats.getWindowSize()) {
-// mean = new Mean().evaluate(stats.getValues(), weights);
-// std = FastMath.sqrt(new Variance().evaluate(stats.getValues(), weights));
+ return monitoringPerSecondToResponseTimes.get((long)sortedValues[sortedValues.length / 2]);
+ }
+
+ public boolean isBaselineUnderAveragePlusStd(AtomicDouble decrease) {
+ if (baselinesPerSecondToNormal.getN() == 0) {
+ return true;
+ }
+
+ double[] sortedValues = baselinesPerSecondToNormal.getSortedValues();
+ Map<String, SummaryStatistics> normal = baselinesPerSecondToResponseTimes.get((long)sortedValues[sortedValues.length / 2]);
+
+ AtomicLong failed = new AtomicLong();
+ AtomicLong success = new AtomicLong();
+
+ SummaryStatistics decreaseStats = new SummaryStatistics();
+// normal.forEach((normalName, normalStats) -> {
+// DescriptiveStatistics currentStats = currentBaseline.get(normalName);
+// //compare only those methods that have the same amount of traces
+//// if (currentStats != null && currentStats.getN() > 2 && normalStats.getN() == currentStats.getN()
+//// && TestUtils.tTest(normalStats.getMean(),
+//// currentStats, 0.05)) {
+//// success.getAndIncrement();
+//// } else { // TODO: should we decrement when not available?
+//// failed.getAndIncrement();
+//// decreaseStats.addValue((currentStats.getMean() - normalStats.getMean()) / normalStats.getMean());
+//// }
+//
+// if (currentStats != null && normalStats.getN() > 0 && normalStats.getN() == currentStats.getN()) {
+// double threshold = normalStats.getMean() + normalStats.getStandardDeviation();
+// if (currentStats.getMean() < threshold) {
+// success.getAndIncrement();
+// } else {
+// decreaseStats.addValue((currentStats.getMean() - normalStats.getMean()) / normalStats.getMean());
+// failed.getAndIncrement();
+// }
// }
+// });
+//
+// decrease.set(decreaseStats.getMax());
+// return success.get() > failed.get();
- double meanPlusStd = mean + std;
- DescriptiveStatistics descriptiveStatistics = sampledDataSet.get(baselineEntry.getKey());
- if (descriptiveStatistics == null)
- continue;
- for (double value : descriptiveStatistics.getValues()) {
- if (value <= meanPlusStd) {
- satisfied++;
- }
- if (value > meanPlusStd &&
- value < mean + (2 * std)) {
- tolerated++;
+
+
+ DescriptiveStatistics normalStatsMedians = new DescriptiveStatistics();
+ DescriptiveStatistics currentStatsMedians = new DescriptiveStatistics();
+
+ //comparing with t-test
+ normal.forEach((normalName, normalStats) -> {
+ SummaryStatistics currentStats = currentBaseline.get(normalName);
+ //compare only those methods that have the same amount of traces
+ if (currentStats != null && currentStats.getN() > 2 && normalStats.getN() > 2) {
+ normalStatsMedians.addValue(normalStats.getMean());
+ currentStatsMedians.addValue(currentStats.getMean());
+
+ double threshold = normalStats.getMean() + normalStats.getStandardDeviation();
+ if (currentStats.getMean() > threshold) {
+ failed.getAndIncrement();
+ } else {
+ decreaseStats.addValue((normalStats.getMean() - currentStats.getMean()) / currentStats.getMean());
+ success.getAndIncrement();
}
- n++;
}
+ });
+
+ if (!Double.isNaN(decreaseStats.getMax())) {
+ decrease.set(decreaseStats.getMax());
+ } else {
+ decrease.set(0);
+ }
+
+ if (normalStatsMedians.getN() < 2) {
+ return true;
+ }
+
+ //for lusearch
+ if (success.get() + failed.get() > 1000) {
+ return success.get() > failed.get();
+ }
+
+ return //!TestUtils.pairedTTest(normalStatsMedians.getValues(), currentStatsMedians.getValues(), 0.05) ||
+ failed.get() <= 2;
+ }
+
+ public boolean isMonitoringUnderAveragePlusStd(AtomicDouble increase) {
+ if (monitoringPerSecondToNormal.getN() == 0) {
+ return true;
}
- return new Apdex(satisfied, tolerated, n);
+ double[] sortedValues = monitoringPerSecondToNormal.getSortedValues();
+ Map<String, SummaryStatistics> normal =
+ monitoringPerSecondToResponseTimes.get((long)sortedValues[sortedValues.length / 2]);
-// long satisfied = 0, tolerated = 0, n = 0;
-// for (Map.Entry<Granularity, DescriptiveStatistics> baselineEntry : granularityBaseline.entrySet()) {
-// DescriptiveStatistics stats = baselineEntry.getValue();
-// double mean = stats.getMean();
-// double std = stats.getStandardDeviation();
-//// if (stats.getN() == stats.getWindowSize()) {
-//// mean = new Mean().evaluate(stats.getValues(), weights);
-//// std = FastMath.sqrt(new Variance().evaluate(stats.getValues(), weights));
+ AtomicLong failed = new AtomicLong();
+ AtomicLong success = new AtomicLong();
+
+ SummaryStatistics increaseStats = new SummaryStatistics();
+// normal.forEach((normalName, normalStats) -> {
+// DescriptiveStatistics currentStats = currentMonitoring.get(normalName);
+// //compare only those methods that have the same amount of traces
+//// if (currentStats != null && currentStats.getN() > 2 && normalStats.getN() == currentStats.getN()
+//// && TestUtils.tTest(normalStats.getMean(),
+//// currentStats, 0.05)) {
+//// success.getAndIncrement();
+//// increaseStats.addValue((normalStats.getMean() - currentStats.getMean()) / currentStats.getMean());
+//// } else {
+//// failed.getAndIncrement();
//// }
//
-// double meanPlusStd = mean + std;
-// DescriptiveStatistics descriptiveStatistics = sampledDataSet.get(baselineEntry.getKey());
-// if (descriptiveStatistics == null)
-// continue;
-// for (double value : descriptiveStatistics.getValues()) {
-//// if (value <= mean) {
-//// satisfied++;
-//// }
-//// if (value > mean &&
-//// value < meanPlusStd) {
-//// tolerated++;
-//// }
-// if (value <= meanPlusStd) {
-// satisfied++;
-// }
-// if (value > meanPlusStd &&
-// value < mean + (2 * std)) {
-// tolerated++;
+// if (currentStats != null && normalStats.getN() > 0 && normalStats.getN() == currentStats.getN()) {
+// double threshold = normalStats.getMean() + normalStats.getStandardDeviation();
+// if (currentStats.getMean() > threshold) {
+// failed.getAndIncrement();
+// } else if (currentStats.getMean() < normalStats.getMean()) {
+// increaseStats.addValue((normalStats.getMean() - currentStats.getMean()) / currentStats.getMean());
+// success.getAndIncrement();
// }
-// n++;
// }
+// });
+
+// if (!Double.isNaN(increaseStats.getMax())) {
+// increase.set(increaseStats.getMax());
+// } else {
+// increase.set(0);
// }
-// return new Apdex(satisfied, tolerated, n);
- }
+// return failed.get() <= 2;
- /**
- * Compare the results against the overall statistics
- * However, some methods may be really fast and some really huge -
- * if any discrepancy found, maybe we should use getApdexResultsPerEvent
- *
- * @param sampledDataSet
- * @param lastSampledTimes
- * @return
- */
- public Apdex getApdexResults(Map<Granularity, DescriptiveStatistics> sampledDataSet, DescriptiveStatistics lastSampledTimes) {
- long satisfied = 0, tolerated = 0, n = 0;
- double overallMean = getOverallAvg();
- double overallStd = getOverallStd();
-
- if (overallBaseline.getN() == overallBaseline.getWindowSize()) {
- overallMean = new Mean().evaluate(overallBaseline.getValues(), weights);
- overallStd = FastMath.sqrt(new Variance().evaluate(overallBaseline.getValues(), weights));
- }
- double meanPlusStd = overallMean + overallStd;
+ DescriptiveStatistics normalStatsMedians = new DescriptiveStatistics();
+ DescriptiveStatistics currentStatsMedians = new DescriptiveStatistics();
- for (DescriptiveStatistics granularityTraces : sampledDataSet.values()) {
- for (double value : granularityTraces.getValues()) {
-// for (double value : lastSampledTimes.getValues()) {
-// if (value <= overallMean) {
-// satisfied++;
-// }
-// if (value > overallMean &&
-// value < meanPlusStd) {
-// tolerated++;
-// }
- if (value <= meanPlusStd) {
- satisfied++;
- }
- if (value > meanPlusStd &&
- value < overallMean + (2 * overallStd)) {
- tolerated++;
+ //comparing with t-test
+ normal.forEach((normalName, normalStats) -> {
+ SummaryStatistics currentStats = currentMonitoring.get(normalName);
+ //compare only those methods that have the same amount of traces
+ if (currentStats != null && currentStats.getN() > 2 && normalStats.getN() > 2) {
+ normalStatsMedians.addValue(normalStats.getMean());
+ currentStatsMedians.addValue(currentStats.getMean());
+
+ double threshold = normalStats.getMean() + normalStats.getStandardDeviation();
+ if (currentStats.getMean() > threshold) {
+ failed.getAndIncrement();
+ } else {
+ increaseStats.addValue((normalStats.getMean() - currentStats.getMean()) / currentStats.getMean());
+ success.getAndIncrement();
}
- n++;
}
+ });
+
+ if (!Double.isNaN(increaseStats.getMax())) {
+ increase.set(increaseStats.getMax());
+ } else {
+ increase.set(0);
}
- return new Apdex(satisfied, tolerated, n);
- }
- public double getOverallAvg() {
- return overallBaseline.getMean();
- }
+ if (normalStatsMedians.getN() < 2) {
+ return true;
+ }
- public double getOverallStd() {
- return overallBaseline.getStandardDeviation();
- }
+ //for lusearch
+ if (success.get() + failed.get() > 1000) {
+ return success.get() > failed.get();
+ }
- public long getTotalItems() {
- return n;
- }
+// return (normalStatsMedians.getMean() + normalStatsMedians.getStandardDeviation()) > currentStatsMedians.getMean()
+// || TestUtils.pairedTTest(normalStatsMedians.getValues(), currentStatsMedians.getValues(), 0.05);
+
+ // returns true iff mean difference is =/= 0, means are different
+ return //!TestUtils.pairedTTest(normalStatsMedians.getValues(), currentStatsMedians.getValues(), 0.05) ||
+ failed.get() <= 2;
- public void clear() {
- //TODO should we no clean this?
- n = 0;
-// overallBaseline.clear();
-// granularityBaseline.clear();
+ // To test the (2-sided) hypothesis <code>mean 1 = mean 2 </code> at
+ // * the 95%, use
+ // * <br><code>tTest(sampleStats1, sampleStats2, 0.05) </code>
+// return TestUtils.tTest(normalStatsMedians, currentStatsMedians, 0.05);
+
+// if (currentStats.getMean() <= threshold) {
+// success.getAndIncrement();
+// } else {
+// failed.getAndIncrement();
+// }
+// }
+// });
+//
+// return success.get() > failed.get();
}
+
+// public SummaryStatistics getNormalBaselineAverage() {
+// double[] sortedValues = baselinesPerSecondToNormal.getSortedValues();
+//
+// SummaryStatistics averageStats = new SummaryStatistics();
+//
+// Map<String, DescriptiveStatistics> methodToResponseTimes = baselinesPerSecondToResponseTimes.get(sortedValues[sortedValues.length / 2]);
+//
+// methodToResponseTimes.forEach((name, stats) -> {
+// double[] values = stats.getValues();
+// for (int i = 0; i < values.length; i++) {
+// if (values.length == 5) {
+// averageStats.addValue();
+// }
+// }
+// });
+//
+// return (mean.get() / methodToResponseTimes.size());
+// }
}
diff --git a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/Sampling.java b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/Sampling.java
index 896b6f2..846e92a 100644
--- a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/Sampling.java
+++ b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/Sampling.java
@@ -1,5 +1,7 @@
package br.ufrgs.inf.prosoft.tigris.sampling;
+import com.google.common.util.concurrent.AtomicDouble;
+import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.math3.distribution.BinomialDistribution;
import org.apache.commons.math3.ml.neuralnet.sofm.util.ExponentialDecayFunction;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
@@ -9,6 +11,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.math.BigDecimal;
+import java.time.Duration;
+import java.time.Instant;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@@ -22,6 +26,7 @@ public class Sampling {
private final long cycleLengthInMilliseconds;
private BinomialDistribution binomialDistSampling;
private double samplingRate; // in percentage, 0 to 1
+ private double initialSamplingRate; // in percentage, 0 to 1
//control vars
private boolean performanceBaselineEnabled = false;
@@ -33,9 +38,6 @@ public class Sampling {
private PerformanceBaselineDataSet performanceBaselineDataSet = new PerformanceBaselineDataSet();
private Map<Granularity, DescriptiveStatistics> sampledDataSet = new ConcurrentHashMap<>();
- //PBA history
-// private Queue<PerformanceBaselineDataSet> lastFourPerformanceBaselineDataSets = new CircularFifoQueue<>(4);
-
Logger logger = LoggerFactory.getLogger(Sampling.class);
/**
@@ -46,6 +48,7 @@ public class Sampling {
private double z = 1.96, p = 0.5, e = 0.05;
public Sampling(double initialSamplingRate, long cycleLengthInMilliseconds, boolean adaptiveSamplingRate) {
+ this.initialSamplingRate = initialSamplingRate;
this.samplingRate = initialSamplingRate;
this.adaptiveSamplingRate = adaptiveSamplingRate;
this.cycleLengthInMilliseconds = cycleLengthInMilliseconds;
@@ -54,13 +57,14 @@ public class Sampling {
}
private Object binomialDistSamplingLock = new Object();
- private void resetSamplingDistribution(){
+
+ private void resetSamplingDistribution() {
synchronized (this.binomialDistSamplingLock) {
this.binomialDistSampling = new BinomialDistribution(1, samplingRate);
}
}
- public boolean simpleSamplingDecision(){
+ public boolean simpleSamplingDecision() {
synchronized (this.binomialDistSamplingLock) {
return binomialDistSampling.sample() == 1; // sampling rate evaluation
}
@@ -77,12 +81,12 @@ public class Sampling {
boolean simpleSamplingDecision = simpleSamplingDecision();
if (adaptiveSamplingRate
- && simpleSamplingDecision
- && population.getProportion(granularity) >= sample.getProportion(granularity)
+ && simpleSamplingDecision
+ && population.getProportion(granularity) >= sample.getProportion(granularity)
) // sample has not enough items of that granularity compared to the population)
- {
- return true;
- }
+ {
+ return true;
+ }
return simpleSamplingDecision;
}
@@ -93,108 +97,86 @@ public class Sampling {
boolean hasComparedMean = tTestEvaluation(decayingConfidenceFactor);
return adaptiveSamplingRate
- // margin of error is lower than threshold
+ // margin of error is lower than threshold
// && getSampleSizeErrorMargin(z * decayingConfidenceFactor) < e
- // the sample has the min sample size based on the population
- && hasMinimumSize
- // proportion test
- && hasSameProportion
- // t-test
- && hasComparedMean;
+ // the sample has the min sample size based on the population
+ && hasMinimumSize
+ // proportion test
+ && hasSameProportion
+ // t-test
+ && hasComparedMean;
}
private Object samplingRateLock = new Object();
- public void adaptSamplingRate() {
- synchronized (samplingRateLock) {
- if (this.sampledDataSet.isEmpty()) {
- logger.info("No sampled data, doing nothing...");
-// if no monitoringImpact, increase by 1%
- samplingRate += 0.01;
- if (samplingRate > 1)
- samplingRate = 1;
+ private boolean reducedInPreviousBaseline = false;
- logger.info("New sampling rate: {}", samplingRate);
- this.resetSamplingDistribution();
- return;
- }
+ public void adaptSamplingRate(PerformanceBaselineDataSet monitoring,
+ int currentOperationsPerSecond,
+ double currentSamplingRate) {
+ synchronized (samplingRateLock) {
-// Apdex apdex = this.performanceBaselineDataSet.getApdexResults(this.sampledDataSet, this.lastSampledTimes);
-// double baselineImpact = performanceBaselineDataSet.getBaselineImpactedByWorkload();
- if (!this.performanceBaselineDataSet.isAppStruggling()) {
- //TODO decreases based on how the average compares to the history?
- logger.info("App is not struggling, increasing the current sampling rate {} by {}%", samplingRate, 0.25);
- samplingRate = samplingRate + (samplingRate * 0.25);
-
-// //if we have just 1 tolerated, the monitoringImpact will not be zero anymore
-// if (monitoringImpact <= 0.1) {
-// logger.info("No monitoring monitoringImpact detected: {}, increasing the sampling rate...", monitoringImpact);
-// //if no monitoringImpact, increase by 10%
-// samplingRate += 0.1;
-// } else
-// //otherwise stays the same - not necessary here
-// if (monitoringImpact > 0.1 && monitoringImpact <= 0.2) {
-// logger.info("Minimal monitoring monitoringImpact detected: {}, keeping it the same...", monitoringImpact);
-// } else if (monitoringImpact > 0.2) {
-// double reduction = monitoringImpact - 0.2;
-// logger.info("Monitoring monitoringImpact detected: {}, decreasing the current sampling rate {} by {}%", monitoringImpact, samplingRate, reduction);
-//// logger.info("{}, {}, {}", apdex.getSatisfied(), apdex.getTolerated(), apdex.getN());
-//// logger.info("{}", this.performanceBaselineDataSet.getOverallAvg());
-//// logger.info("{}", this.performanceBaselineDataSet.getOverallStd());
-//// logger.info("{}", this.performanceBaselineDataSet.getTotalItems());
-//
-// //reduce by the amount of overhead
-// samplingRate = samplingRate - (samplingRate * (reduction / 1d));
-// }
- } else { //app is struggling
- Apdex apdex = this.performanceBaselineDataSet.getApdexResultsPerEvent(this.sampledDataSet);
- double monitoringImpact = (1 - ((apdex.getSatisfied() + 0.5 * apdex.getTolerated()) / apdex.getN()));
- logger.info("App is struggling, decreasing the current sampling rate {} by {}%", samplingRate, monitoringImpact);
- samplingRate = samplingRate - (samplingRate * monitoringImpact);
+ AtomicDouble factor = new AtomicDouble();
+ if (performanceBaselineEnabled) {
+ //TODO: every second, saves the req/s and "closes" the baseline
+ // save and open a new one
+// if (Duration.between(baselineWindowStart, Instant.now()).getSeconds() > baselineWindow) {
+ performanceBaselineEnabled = false;
+// }
+
+ //is baseline behavior under the "normal" baseline behavior?
+ if (monitoring.isBaselineUnderAveragePlusStd(factor)) {
+ // app not struggling
+ // keeping the sampling rate
+ logger.info("Baseline: app not struggling -> keeping sampling rate");
+ reducedInPreviousBaseline = false;
+ } else {
+ // app struggling
+ // reduce the sampling rate by X%
+ logger.info("Baseline: app struggling -> reduce sampling rate by {}", factor.get());
+ samplingRate -= (samplingRate * factor.get()) / 100;
+ reducedInPreviousBaseline = true;
+ }
+
+ monitoring.trackBaselinePerSecond(currentOperationsPerSecond);
+ } else {
+ //is monitoring behavior under the "normal" behavior?
+ if (monitoring.isMonitoringUnderAveragePlusStd(factor)) {
+ //increase the sampling rate by 1%
+ logger.info("Monitoring: no impact -> increase sampling rate by {}", factor.get());
+ samplingRate += (samplingRate * factor.get()) / 100;
+ reducedInPreviousBaseline = false;
+ } else {
+ if (reducedInPreviousBaseline) {
+ logger.info("App still struggling, reducing sampling rate by 1%.");
+ samplingRate -= 0.01;
+ }
+
+ if (Duration.between(baselineWindowStart, Instant.now()).getSeconds() > 3) {
+ baselineWindowStart = Instant.now();
+ logger.info("Enabling performance baseline, because monitoring seems degraded.");
+ performanceBaselineEnabled = true;
+ }
+ }
+ monitoring.trackMonitoringPerSecond(currentOperationsPerSecond);
}
+ if (samplingRate > initialSamplingRate)
+ samplingRate = initialSamplingRate;
+
if (samplingRate < 0.01)
samplingRate = 0.01;
- if (samplingRate > 1)
- samplingRate = 1;
-
- //update the binomial with the new sampling rate distribution
- resetSamplingDistribution();
- logger.info("New sampling rate: {}", samplingRate);
+ this.resetSamplingDistribution();
+ logger.info("New sampling rate: {} -> {}", currentSamplingRate, samplingRate);
}
- }
-
- private long maxOps = 0;
- private long minOps = Integer.MAX_VALUE;
- public double adaptSamplingRateInverselyProportional(long currentOps) {
- synchronized (samplingRateLock) {
-
- if (minOps > currentOps)
- minOps = currentOps;
-
- if (currentOps > maxOps)
- maxOps = currentOps;
- samplingRate = 1 - ((double) (currentOps - minOps) / (maxOps - minOps));
-
- if (samplingRate == 0)
- samplingRate = 0.01;
-
- //update the binomial with the new sampling rate distribution
- resetSamplingDistribution();
- logger.info("New sampling rate: {}", samplingRate);
- return samplingRate;
- }
}
- public void addPerformanceBaselineItem(Granularity granularity, long executionTime) {
- if(this.performanceBaselineDataSet.getTotalItems() < minimumSampleSize) {
- this.performanceBaselineDataSet.addItem(granularity, executionTime);
- }
- }
+// public void addPerformanceBaselineItem(Granularity granularity, long executionTime) {
+// this.performanceBaselineDataSet.addItem(granularity, executionTime);
+// }
-// DescriptiveStatistics lastSampledTimes = new DescriptiveStatistics(1200);
public void addSampledItem(Granularity granularity, long executionTime) {
sample.addItem(granularity, executionTime);
@@ -203,7 +185,7 @@ public class Sampling {
sampledDataSet.put(granularity, statistics);
}
- public long getMonitoringCycleTime(){
+ public long getMonitoringCycleTime() {
return (System.currentTimeMillis() - startTime);
}
@@ -211,7 +193,7 @@ public class Sampling {
return performanceBaselineEnabled;
}
- public double decayingConfidenceFactor(long timeInMilliseconds){
+ public double decayingConfidenceFactor(long timeInMilliseconds) {
synchronized (decayingPrecisionLock) {
return new BigDecimal(decayingPrecision.value(timeInMilliseconds))
.setScale(4, BigDecimal.ROUND_FLOOR).doubleValue();
@@ -277,6 +259,7 @@ public class Sampling {
}
private Object decayingPrecisionLock = new Object();
+
public void startMonitoringCycle() {
synchronized (decayingPrecisionLock) {
this.decayingPrecision = new ExponentialDecayFunction(1, 0.1, cycleLengthInMilliseconds);
@@ -289,36 +272,45 @@ public class Sampling {
}
public MonitoringCycle endMonitoringCycle() {
- MonitoringCycle monitoringCycle = new MonitoringCycle(getSample(), getPopulation());
+ MonitoringCycle monitoringCycle = new MonitoringCycle(getSample(), getPopulation(), getMonitoringCycleTime());
logger.info("Adaptive Sampling Monitoring Cycle Finished: {}", monitoringCycle);
startMonitoringCycle();
return monitoringCycle;
}
- private Long minimumSampleSize;
- public void managePerformanceBaseline() {
- if (performanceBaselineEnabled) { //is it already enabled?
- if (this.performanceBaselineDataSet.getTotalItems() >= minimumSampleSize) { //got enough traces for PB
- logger.info("Collected performance baseline of {} traces", this.performanceBaselineDataSet.getTotalItems());
- performanceBaselineEnabled = false;
- minimumSampleSize = null;
-// lastFourPerformanceBaselineDataSets.add(this.performanceBaselineDataSet);
-
- adaptSamplingRate(); //adapt the sampling rate based on heuristic
- this.performanceBaselineDataSet.clear();
- }
- return;
- }
-
+ private int baselineWindow;
+ private Instant baselineWindowStart = Instant.now();
+
+// public void managePerformanceBaseline(PerformanceBaselineDataSet performanceMonitoring) {
+// if (performanceBaselineEnabled) { //is it already enabled?
+// if (Duration.between(baselineWindowStart, Instant.now()).getSeconds() > baselineWindow) {
+//// logger.info("Collected performance baseline of {} traces", this.performanceBaselineDataSet.getTotalItems());
+// performanceBaselineEnabled = false;
+// this.performanceBaselineDataSet.clear();
+// }
+// return;
+// }
+//
// double chance = new BinomialDistribution(1, 0.1d).sample();
// if (chance == 1) {
- minimumSampleSize = getMinimumSampleSize(this.population.getTotalItems());
- if (minimumSampleSize > 0) {
- logger.info("Enabling performance baseline that needs {} traces.", minimumSampleSize);
- performanceBaselineEnabled = true;
- }
+// baselineWindow = 2;//RandomUtils.nextInt(2, 4);
+// baselineWindowStart = Instant.now();
+//
+// logger.info("Enabling performance baseline that needs {} seconds of traces.", baselineWindow);
+// performanceBaselineEnabled = true;
// }
- }
+//
+//
+//
+//// double chance = new BinomialDistribution(1, 0.1d).sample();
+//// if (chance == 1) {
+//// baselineWindow = 2;//RandomUtils.nextInt(2, 4);
+//// baselineWindowStart = Instant.now();
+////
+//// logger.info("Enabling performance baseline that needs {} seconds of traces.", baselineWindow);
+//// performanceBaselineEnabled = true;
+//// }
+// }
public FrequencyDataSet getSample() {
return sample;
@@ -339,4 +331,25 @@ public class Sampling {
public boolean isAdaptiveSamplingRate() {
return adaptiveSamplingRate;
}
+
+ SummaryStatistics inverselyStats = new SummaryStatistics();
+ public double adaptSamplingRateInverselyProportional(int operationsPerSecond) {
+ if (operationsPerSecond == 0)
+ return samplingRate;
+
+ inverselyStats.addValue(operationsPerSecond);
+ double newSamplingRate =
+ initialSamplingRate
+ * ((operationsPerSecond - inverselyStats.getMin()) / (inverselyStats.getMax() - inverselyStats.getMin()));
+ logger.info("New sampling rate: {} -> {}", samplingRate, newSamplingRate);
+ samplingRate = newSamplingRate;
+
+ if (samplingRate > initialSamplingRate)
+ samplingRate = initialSamplingRate;
+
+ if (samplingRate < 0.01)
+ samplingRate = 0.01;
+
+ return getSamplingRate();
+ }
}
diff --git a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/SamplingAspect.java b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/SamplingAspect.java
index de23118..7ba67f5 100644
--- a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/SamplingAspect.java
+++ b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/SamplingAspect.java
@@ -2,7 +2,6 @@ package br.ufrgs.inf.prosoft.tigris.sampling;
import br.ufrgs.inf.prosoft.tigris.configuration.annotation.TigrisConfiguration;
import br.ufrgs.inf.prosoft.tigris.exceptions.ConfigurationException;
-import br.ufrgs.inf.prosoft.tigris.monitoring.util.threads.NamedThreads;
import br.ufrgs.inf.prosoft.tigris.utils.ConfigurationUtils;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
@@ -11,13 +10,6 @@ import org.aspectj.lang.annotation.Pointcut;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
import static java.lang.System.nanoTime;
@Aspect
@@ -83,9 +75,9 @@ public class SamplingAspect implements Runnable {
sampling = new Sampling(tigrisConfiguration.samplingPercentage(), tigrisConfiguration.cycleTimeInMilliseconds(), tigrisConfiguration.adaptiveSamplingRate());
- adaptiveSamplingExecutor.scheduleWithFixedDelay(
- this::run,
- 5, 1, TimeUnit.SECONDS);
+// adaptiveSamplingExecutor.scheduleWithFixedDelay(
+// this::run,
+// 5, 1, TimeUnit.SECONDS);
}
static Logger logger = LoggerFactory.getLogger(SamplingAspect.class);
@@ -94,10 +86,10 @@ public class SamplingAspect implements Runnable {
public static boolean samplingEnabled = true;
- private final ScheduledExecutorService adaptiveSamplingExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreads(
- "adaptive-sampling",
- "readiness evaluation, pba trigger and sampling adaptation"
- ));
+// private final ScheduledExecutorService adaptiveSamplingExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreads(
+// "adaptive-sampling",
+// "readiness evaluation, pba trigger and sampling adaptation"
+// ));
@Around("anyCall()")
public Object aroundMethods(ProceedingJoinPoint joinPoint) throws Throwable {
@@ -109,47 +101,49 @@ public class SamplingAspect implements Runnable {
long endTime = nanoTime();
long totalTime = endTime - startTime;
+ String signature = joinPoint.getSignature().toString() + joinPoint.getArgs()[0].toString(); //TODO this is to distinguish traces in H2 or lusearch / also run with Xalan / cassandra / tradebeans?
+ Granularity granularity = new Granularity(GranularityType.METHOD, signature);
+
if (samplingEnabled) {
- String signature = joinPoint.getSignature().toString() + joinPoint.getArgs()[0].toString(); //TODO this is to distinguish traces in H2 or lusearch / also run with Xalan / cassandra / tradebeans?
- Granularity granularity = new Granularity(GranularityType.METHOD, signature);
+// String signature = joinPoint.getSignature().toString() + joinPoint.getArgs()[0].toString(); //TODO this is to distinguish traces in H2 or lusearch / also run with Xalan / cassandra / tradebeans?
+// Granularity granularity = new Granularity(GranularityType.METHOD, signature);
if (sampling.isPerformanceBaselineEnabled()) {
- sampling.addPerformanceBaselineItem(granularity, totalTime);
+ monitoring.addPerformanceBaselineItem(granularity, totalTime);
return result;
}
boolean decision = sampling.samplingDecision(granularity, totalTime);
if (decision)
-// sampling.addSampledItem(granularity, nanoTime() - startTime);
- sampling.addSampledItem(granularity, totalTime);
+ sampling.addSampledItem(granularity, nanoTime() - startTime);
}
+ monitoring.addMonitoringItem(granularity, totalTime);
return result;
}
- static double currentSamplingRate;
- static MonitoringCycle cycle = new MonitoringCycle();
- //used for the baseline with inversely proportional sampling
- public static long currentOps = 0;
- public static long currentUsers = 0;
public static boolean adaptSamplingRateInverselyProportionalOps = false;
- public static boolean adaptSamplingRateInverselyProportionalUsers = false;
- @Override
- public void run() {
- if (adaptSamplingRateInverselyProportionalOps) {
- currentSamplingRate = sampling.adaptSamplingRateInverselyProportional(currentOps);
- return;
- }
+ public static void addOperationsPerSecondAndAdapt (int operationsPerSecond) {
- if (adaptSamplingRateInverselyProportionalUsers) {
- currentSamplingRate = sampling.adaptSamplingRateInverselyProportional(currentUsers);
+ if (adaptSamplingRateInverselyProportionalOps) {
+ currentSamplingRate = sampling.adaptSamplingRateInverselyProportional(operationsPerSecond);
return;
}
if (SamplingAspect.enabled && sampling.isAdaptiveSamplingRate()) {
- sampling.managePerformanceBaseline();
currentSamplingRate = sampling.getSamplingRate();
+ currentNormalBehavior = monitoring.baselinesPerSecondToNormal.getPercentile(50);
+ currentNormalMonitoring = monitoring.monitoringPerSecondToNormal.getPercentile(50);
+
+ // response times are used only to increase/decrease the sampling rate
+ // if keep the response times under the error margin (compared to the baseline times) -> increase
+ // otherwise decrease it
+
+ //adapt the sampling rate based on heuristic every new second
+ sampling.adaptSamplingRate(monitoring,
+ operationsPerSecond,
+ currentSamplingRate);
if (!sampling.isPerformanceBaselineEnabled() && sampling.isReady()) {
logger.info("Sample is ready, releasing for analysis and resetting...");
@@ -158,10 +152,49 @@ public class SamplingAspect implements Runnable {
}
}
+ public static PerformanceBaselineDataSet monitoring = new PerformanceBaselineDataSet();
+
+
+ static double currentSamplingRate;
+ static double currentNormalMonitoring;
+ static double currentNormalBehavior;
+ static MonitoringCycle cycle = new MonitoringCycle();
+
+ @Override
+ public void run() {
+// if (adaptSamplingRateInverselyProportionalOps) {
+// currentSamplingRate = sampling.adaptSamplingRateInverselyProportional(currentOps);
+// return;
+// }
+//
+// if (adaptSamplingRateInverselyProportionalUsers) {
+// currentSamplingRate = sampling.adaptSamplingRateInverselyProportional(currentUsers);
+// return;
+// }
+//
+// if (SamplingAspect.enabled && sampling.isAdaptiveSamplingRate()) {
+// sampling.managePerformanceBaseline();
+// currentSamplingRate = sampling.getSamplingRate();
+//
+// if (!sampling.isPerformanceBaselineEnabled() && sampling.isReady()) {
+// logger.info("Sample is ready, releasing for analysis and resetting...");
+// cycle = sampling.endMonitoringCycle();
+// }
+// }
+ }
+
public static double getCurrentSamplingRate() {
return currentSamplingRate;
}
+ public static double getCurrentNormalMonitoring() {
+ return currentNormalMonitoring;
+ }
+
+ public static double getCurrentNormalBehavior() {
+ return currentNormalBehavior;
+ }
+
public static MonitoringCycle getCycle() {
return cycle;
}
diff --git a/tigris/src/test/java/br/ufrgs/inf/prosoft/tigris/Statistics.java b/tigris/src/test/java/br/ufrgs/inf/prosoft/tigris/Statistics.java
index da71044..c763899 100644
--- a/tigris/src/test/java/br/ufrgs/inf/prosoft/tigris/Statistics.java
+++ b/tigris/src/test/java/br/ufrgs/inf/prosoft/tigris/Statistics.java
@@ -4,6 +4,10 @@ import br.ufrgs.inf.prosoft.tigris.utils.StatisticalTest;
import org.apache.commons.math3.ml.neuralnet.sofm.util.ExponentialDecayFunction;
import org.apache.commons.math3.random.EmpiricalDistribution;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
+import org.apache.commons.math3.stat.descriptive.rank.Median;
+import org.apache.commons.math3.stat.descriptive.rank.Percentile;
+import org.apache.commons.math3.util.CentralPivotingStrategy;
+import org.apache.commons.math3.util.KthSelector;
import org.junit.Assert;
import org.junit.Test;
import umontreal.ssj.gof.GofStat;
@@ -100,4 +104,16 @@ public class Statistics {
}
+ @Test
+ public void testPercentileMedian(){
+ DescriptiveStatistics descriptiveStatistics = new DescriptiveStatistics();
+ descriptiveStatistics.addValue(2);
+ descriptiveStatistics.addValue(3);
+ Median median = new Median()
+ .withEstimationType(Percentile.EstimationType.R_2)
+ .withKthSelector(new KthSelector(new CentralPivotingStrategy()));
+ descriptiveStatistics.setPercentileImpl(median);
+ System.out.println(descriptiveStatistics.getPercentile(51));
+ }
+
}