package br.ufrgs.inf.prosoft.tigris.sampling;
import org.apache.commons.math3.distribution.BinomialDistribution;
import org.apache.commons.math3.ml.neuralnet.sofm.util.ExponentialDecayFunction;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.apache.commons.math3.stat.inference.TestUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.math.BigDecimal;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* The sampling control and decision.
*/
public class Sampling {
//constructor parameters - defined once
private final boolean adaptiveSamplingRate;
private final long cycleLengthInMilliseconds;
private BinomialDistribution binomialDistSampling;
private double samplingRate; // in percentage, 0 to 1
//control vars
private boolean performanceBaselineEnabled = false;
// recreated every new monitoring cycle
private long startTime;
private ExponentialDecayFunction decayingPrecision;
private FrequencyDataSet population = new FrequencyDataSet(), sample = new FrequencyDataSet();
private PerformanceBaselineDataSet performanceBaselineDataSet = new PerformanceBaselineDataSet();
private Map<Granularity, DescriptiveStatistics> sampledDataSet = new ConcurrentHashMap<>();
//PBA history
// private Queue<PerformanceBaselineDataSet> lastFourPerformanceBaselineDataSets = new CircularFifoQueue<>(4);
Logger logger = LoggerFactory.getLogger(Sampling.class);
/**
* z confidence value, ex: 1.96 for 95%
* p proportion of the population, 0.5 is default
* e margin of error, ex: 0.05 for 5%
*/
private double z = 1.96, p = 0.5, e = 0.05;
public Sampling(double initialSamplingRate, long cycleLengthInMilliseconds, boolean adaptiveSamplingRate) {
this.samplingRate = initialSamplingRate;
this.adaptiveSamplingRate = adaptiveSamplingRate;
this.cycleLengthInMilliseconds = cycleLengthInMilliseconds;
this.binomialDistSampling = new BinomialDistribution(1, samplingRate);
startMonitoringCycle();
}
private Object binomialDistSamplingLock = new Object();
private void resetSamplingDistribution(){
synchronized (this.binomialDistSamplingLock) {
this.binomialDistSampling = new BinomialDistribution(1, samplingRate);
}
}
public boolean simpleSamplingDecision(){
synchronized (this.binomialDistSamplingLock) {
return binomialDistSampling.sample() == 1; // sampling rate evaluation
}
}
public boolean samplingDecision(Granularity granularity, long executionTime) {
if (population.getTotalItems() == 0)
startTime = System.currentTimeMillis();
population.addItem(granularity, executionTime);
if (performanceBaselineEnabled) {
return false;
}
boolean simpleSamplingDecision = simpleSamplingDecision();
if (adaptiveSamplingRate
&& simpleSamplingDecision
&& population.getProportion(granularity) >= sample.getProportion(granularity)
) // sample has not enough items of that granularity compared to the population)
{
return true;
}
return simpleSamplingDecision;
}
public boolean isReady() {
double decayingConfidenceFactor = decayingConfidenceFactor(getMonitoringCycleTime());
boolean hasMinimumSize = sample.getTotalItems() > getMinimumSampleSize(z - (z * decayingConfidenceFactor));
boolean hasSameProportion = isSameProportion(decayingConfidenceFactor);
boolean hasComparedMean = tTestEvaluation(decayingConfidenceFactor);
return adaptiveSamplingRate
// margin of error is lower than threshold
// && getSampleSizeErrorMargin(z * decayingConfidenceFactor) < e
// the sample has the min sample size based on the population
&& hasMinimumSize
// proportion test
&& hasSameProportion
// t-test
&& hasComparedMean;
}
private Object samplingRateLock = new Object();
public void adaptSamplingRate() {
synchronized (samplingRateLock) {
if (this.sampledDataSet.isEmpty()) {
logger.info("No sampled data, doing nothing...");
// if no monitoringImpact, increase by 1%
samplingRate += 0.01;
if (samplingRate > 1)
samplingRate = 1;
logger.info("New sampling rate: {}", samplingRate);
this.resetSamplingDistribution();
return;
}
// Apdex apdex = this.performanceBaselineDataSet.getApdexResults(this.sampledDataSet, this.lastSampledTimes);
// double baselineImpact = performanceBaselineDataSet.getBaselineImpactedByWorkload();
if (!this.performanceBaselineDataSet.isAppStruggling()) {
//TODO decreases based on how the average compares to the history?
logger.info("App is not struggling, increasing the current sampling rate {} by {}%", samplingRate, 0.25);
samplingRate = samplingRate + (samplingRate * 0.25);
// //if we have just 1 tolerated, the monitoringImpact will not be zero anymore
// if (monitoringImpact <= 0.1) {
// logger.info("No monitoring monitoringImpact detected: {}, increasing the sampling rate...", monitoringImpact);
// //if no monitoringImpact, increase by 10%
// samplingRate += 0.1;
// } else
// //otherwise stays the same - not necessary here
// if (monitoringImpact > 0.1 && monitoringImpact <= 0.2) {
// logger.info("Minimal monitoring monitoringImpact detected: {}, keeping it the same...", monitoringImpact);
// } else if (monitoringImpact > 0.2) {
// double reduction = monitoringImpact - 0.2;
// logger.info("Monitoring monitoringImpact detected: {}, decreasing the current sampling rate {} by {}%", monitoringImpact, samplingRate, reduction);
//// logger.info("{}, {}, {}", apdex.getSatisfied(), apdex.getTolerated(), apdex.getN());
//// logger.info("{}", this.performanceBaselineDataSet.getOverallAvg());
//// logger.info("{}", this.performanceBaselineDataSet.getOverallStd());
//// logger.info("{}", this.performanceBaselineDataSet.getTotalItems());
//
// //reduce by the amount of overhead
// samplingRate = samplingRate - (samplingRate * (reduction / 1d));
// }
} else { //app is struggling
Apdex apdex = this.performanceBaselineDataSet.getApdexResultsPerEvent(this.sampledDataSet);
double monitoringImpact = (1 - ((apdex.getSatisfied() + 0.5 * apdex.getTolerated()) / apdex.getN()));
logger.info("App is struggling, decreasing the current sampling rate {} by {}%", samplingRate, monitoringImpact);
samplingRate = samplingRate - (samplingRate * monitoringImpact);
}
if (samplingRate < 0.01)
samplingRate = 0.01;
if (samplingRate > 1)
samplingRate = 1;
//update the binomial with the new sampling rate distribution
resetSamplingDistribution();
logger.info("New sampling rate: {}", samplingRate);
}
}
public void addPerformanceBaselineItem(Granularity granularity, long executionTime) {
if(this.performanceBaselineDataSet.getTotalItems() < minimumSampleSize) {
this.performanceBaselineDataSet.addItem(granularity, executionTime);
}
}
// DescriptiveStatistics lastSampledTimes = new DescriptiveStatistics(1200);
public void addSampledItem(Granularity granularity, long executionTime) {
sample.addItem(granularity, executionTime);
DescriptiveStatistics statistics = sampledDataSet.getOrDefault(granularity, new DescriptiveStatistics());
statistics.addValue(executionTime);
sampledDataSet.put(granularity, statistics);
}
public long getMonitoringCycleTime(){
return (System.currentTimeMillis() - startTime);
}
public boolean isPerformanceBaselineEnabled() {
return performanceBaselineEnabled;
}
public double decayingConfidenceFactor(long timeInMilliseconds){
synchronized (decayingPrecisionLock) {
return new BigDecimal(decayingPrecision.value(timeInMilliseconds))
.setScale(4, BigDecimal.ROUND_FLOOR).doubleValue();
}
}
private boolean tTestEvaluation(double decayingConfidenceFactor) {
SummaryStatistics sampleAsDescriptiveStatistics = sample.getAsDescriptiveStatistics();
if (sampleAsDescriptiveStatistics.getN() < 2) return true;
if (sampleAsDescriptiveStatistics.getVariance() == 0) return true;
SummaryStatistics populationAsDescriptiveStatistics = population.getAsDescriptiveStatistics();
double popMean = populationAsDescriptiveStatistics.getMean();
//for some reason, t-test returns false when the sets are exactly the same...
if (sample.getTotalItems() == population.getTotalItems())
return true;
double significanceLevel = 0.5 - (0.5 * decayingConfidenceFactor);
if (significanceLevel == 0.5) //maximum cycle time reached
return true;
else {
//To test the (one-sample t-test - compare with the population mean)
// hypothesis sample mean = mu at the 95% level
return TestUtils.tTest(popMean,
sampleAsDescriptiveStatistics,
0.5 - (0.5 * decayingConfidenceFactor));
}
}
//sample proportion is the same as population
private boolean isSameProportion(double decayingConfidenceFactor) {
return population.getGranularities().stream().allMatch(
granularity -> {
double popProportion = population.getProportion(granularity);
double samProportion = sample.getProportion(granularity);
double error = popProportion - (popProportion * decayingConfidenceFactor);
return samProportion <= popProportion + error &&
samProportion >= popProportion - error;
});
}
private long getMinimumSampleSize(long n) {
return getMinimumSampleSize(n, z);
}
private long getMinimumSampleSize(double precision) {
return getMinimumSampleSize(population.getTotalItems(), precision);
}
private long getMinimumSampleSize(long n, double precision) {
if (n <= 1) return 0;
long n_inf = (long) ((Math.pow(precision, 2) * p * (1 - p)) / Math.pow(e, 2));
return n_inf / (1 + ((n_inf - 1) / n));
}
private double getSampleSizeErrorMargin(double precision) {
if (population.getTotalItems() <= 1) return 0;
double e_n_inf = Math.sqrt((Math.pow(precision, 2) * p * (1 - p)) / sample.getTotalItems());
return e_n_inf * Math.sqrt((population.getTotalItems() - sample.getTotalItems()) / (population.getTotalItems() - 1));
}
private Object decayingPrecisionLock = new Object();
public void startMonitoringCycle() {
synchronized (decayingPrecisionLock) {
this.decayingPrecision = new ExponentialDecayFunction(1, 0.001, cycleLengthInMilliseconds);
}
this.sample.clear();
this.population.clear();
this.sampledDataSet.clear();
this.startTime = System.currentTimeMillis();
logger.info("Monitoring is reset...");
}
public MonitoringCycle endMonitoringCycle() {
MonitoringCycle monitoringCycle = new MonitoringCycle(
getSample().getMeanExecutionTime(),
getPopulation().getMeanExecutionTime(),
getSample().getTotalItems(),
getPopulation().getTotalItems());
logger.info("Adaptive Sampling Monitoring Cycle Finished: {}", monitoringCycle);
startMonitoringCycle();
return monitoringCycle;
}
private Long minimumSampleSize;
public void managePerformanceBaseline() {
if (performanceBaselineEnabled) { //is it already enabled?
if (this.performanceBaselineDataSet.getTotalItems() >= minimumSampleSize) { //got enough traces for PB
logger.info("Collected performance baseline of {} traces", this.performanceBaselineDataSet.getTotalItems());
performanceBaselineEnabled = false;
minimumSampleSize = null;
// lastFourPerformanceBaselineDataSets.add(this.performanceBaselineDataSet);
adaptSamplingRate(); //adapt the sampling rate
this.performanceBaselineDataSet.clear();
}
return;
}
// double chance = new BinomialDistribution(1, 0.1d).sample();
// if (chance == 1) {
minimumSampleSize = getMinimumSampleSize(this.population.getTotalItems());
if (minimumSampleSize > 0) {
logger.info("Enabling performance baseline that needs {} traces.", minimumSampleSize);
performanceBaselineEnabled = true;
}
// }
}
public FrequencyDataSet getSample() {
return sample;
}
public FrequencyDataSet getPopulation() {
return population;
}
public Map<Granularity, DescriptiveStatistics> getSampledTraces() {
return sampledDataSet;
}
public double getSamplingRate() {
return samplingRate;
}
public boolean isAdaptiveSamplingRate() {
return adaptiveSamplingRate;
}
}