adaptive-monitoring-framework
Changes
dacapo-experiments.md 18(+18 -0)
tigris/pom.xml 25(+24 -1)
tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/TigrisCoordinator.java 13(+3 -10)
Details
dacapo-experiments.md 18(+18 -0)
diff --git a/dacapo-experiments.md b/dacapo-experiments.md
new file mode 100644
index 0000000..0aeceb7
--- /dev/null
+++ b/dacapo-experiments.md
@@ -0,0 +1,18 @@
+go to dacapobench/benchmarks folder - which is the repo dev-chopin
+
+- Run tigris build: mvn clean install
+- Run h2 build: mvn clean install -DskipTests
+ - it generates the jar file in target/
+- Copy the jar to dacapo: cp ../../dacaposources/h2database/h2/target/h2-2.0.202-SNAPSHOT-Tigris.jar libs/h2/dist/jar/
+- Build dacapo: ant h2 [builds a specific benchmark, h2]
+- Run dacapo build: java -jar dacapo-evaluation-git+ae20e7d.jar h2
+
+-Xms2000m -Xmx2000m
+
+-javaagent:C:\Users\mertz\.m2\repository\org\aspectj\aspectjweaver\1.8.9\aspectjweaver-1.8.9.jar -verbose:class
+
+
+Steps to open with intellij
+- Build the benchmark with Dacapo
+- Open all the files and folder (dependencies) with intellij project structure
+
tigris/pom.xml 25(+24 -1)
diff --git a/tigris/pom.xml b/tigris/pom.xml
index 2c274d4..a93dfa8 100644
--- a/tigris/pom.xml
+++ b/tigris/pom.xml
@@ -6,7 +6,7 @@
<groupId>br.ufrgs.inf.prosoft</groupId>
<artifactId>tigris</artifactId>
- <version>0.12.0-SNAPSHOT</version>
+ <version>0.14.0-SNAPSHOT</version>
<properties>
<aspectj.version>1.8.9</aspectj.version>
@@ -211,6 +211,29 @@
</dependency>
</dependencies>
</plugin>
+
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>3.1.1</version>
+
+ <configuration>
+ <descriptorRefs>
+ <descriptorRef>jar-with-dependencies</descriptorRef>
+ </descriptorRefs>
+ </configuration>
+
+ <executions>
+ <execution>
+ <id>make-assembly</id>
+ <phase>package</phase>
+ <goals>
+ <goal>single</goal>
+ </goals>
+ </execution>
+ </executions>
+
+ </plugin>
</plugins>
</build>
diff --git a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/MonitorAspect.java b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/MonitorAspect.java
index c5565bc..8f830f7 100644
--- a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/MonitorAspect.java
+++ b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/MonitorAspect.java
@@ -1,40 +1,41 @@
-package br.ufrgs.inf.prosoft.tigris.monitoring.aspects;
-
-import ca.uqac.lif.bullwinkle.BnfParser;
-import org.aspectj.lang.ProceedingJoinPoint;
-import org.aspectj.lang.annotation.Around;
-import org.aspectj.lang.annotation.Aspect;
-import org.aspectj.lang.annotation.Pointcut;
-
-import java.io.IOException;
-
-@Aspect
-public class MonitorAspect {
-
- @Pointcut(
- //any execution except the own framework
- "(execution(!void *(..)) && !within(br.ufrgs.inf.prosoft..*) " +
- //avoid calls from repository while serializing objects, it is necessary if a hash could not be used
- //"&& !cflow(call(* br.ufrgs.inf.prosoft.tigris.monitoring.storage..*(..))) " +
- //conditional to enable and disable at runtime
- "&& if())"
- )
- public static boolean anyCall() {
- return TigrisCoordinator.isEnabled();
- }
-
- private TigrisCoordinator tigrisCoordinator;
-
- public MonitorAspect() throws IOException, BnfParser.InvalidGrammarException {
- tigrisCoordinator = new TigrisCoordinator();
- }
-
- @Around("anyCall()")
- public Object aroundMethods(ProceedingJoinPoint joinPoint) throws Throwable {
- if (tigrisCoordinator.isAllowed(joinPoint)) {
- return tigrisCoordinator.process(joinPoint);
- } else {
- return joinPoint.proceed();
- }
- }
-}
\ No newline at end of file
+//package br.ufrgs.inf.prosoft.tigris.monitoring.aspects;
+//
+//import ca.uqac.lif.bullwinkle.BnfParser;
+//import org.aspectj.lang.ProceedingJoinPoint;
+//import org.aspectj.lang.annotation.Around;
+//import org.aspectj.lang.annotation.Aspect;
+//import org.aspectj.lang.annotation.Pointcut;
+//
+//import java.io.IOException;
+//
+//@Aspect
+//public class MonitorAspect {
+//
+// @Pointcut(
+// //any execution except the own framework
+// "(execution(* *(..)) && !within(br.ufrgs.inf.prosoft..*) " +
+// //avoid calls from repository while serializing objects, it is necessary if a hash could not be used
+// //"&& !cflow(call(* br.ufrgs.inf.prosoft.tigris.monitoring.storage..*(..))) " +
+// //conditional to enable and disable at runtime
+// "&& if())"
+// )
+// public static boolean anyCall() {
+// return TigrisCoordinator.isEnabled();
+// }
+//
+// private TigrisCoordinator tigrisCoordinator;
+//
+// public MonitorAspect() throws IOException, BnfParser.InvalidGrammarException {
+// tigrisCoordinator = new TigrisCoordinator();
+// }
+//
+// @Around("anyCall()")
+// public Object aroundMethods(ProceedingJoinPoint joinPoint) throws Throwable {
+//// System.out.println("New interception: " + joinPoint.getSignature());
+// if (tigrisCoordinator.isAllowed(joinPoint)) {
+// return tigrisCoordinator.process(joinPoint);
+// } else {
+// return joinPoint.proceed();
+// }
+// }
+//}
\ No newline at end of file
diff --git a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/TigrisCoordinator.java b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/TigrisCoordinator.java
index c2bfacd..21dffe9 100644
--- a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/TigrisCoordinator.java
+++ b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/monitoring/aspects/TigrisCoordinator.java
@@ -58,11 +58,6 @@ public class TigrisCoordinator implements Runnable {
private final BnfParser parser = new BnfParser(Resources.getResource("tigrisdsl.bnf").openStream());
- private final ScheduledExecutorService samplingAdaptationExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreads(
- "sampling-adaptation",
- "adapting sampling rate and sample readiness"
- ));
-
private final ScheduledExecutorService lightweightAnalyzerExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreads(
"lightweight-analyzer",
"computing lightweight metrics and setting allowed methods"
@@ -90,12 +85,9 @@ public class TigrisCoordinator implements Runnable {
}
allowedPattern = Pattern.compile(componentScanConfig.allowed());
deniedPattern = Pattern.compile(componentScanConfig.denied());
- logger.info("@TigrisConfiguration will trace and cache methods into {} and deny {} package.", allowedPattern.pattern(), deniedPattern.pattern());
+ logger.info("@TigrisConfiguration will trace methods into {} and deny {} package.", allowedPattern.pattern(), deniedPattern.pattern());
sampling = new Sampling(tigrisConfiguration.samplingPercentage(), tigrisConfiguration.cycleTimeInMilliseconds(), tigrisConfiguration.adaptiveSamplingRate());
- //TODO when to run it?
- samplingAdaptationExecutor.scheduleWithFixedDelay(
- sampling, 120000, 450000, TimeUnit.MILLISECONDS);
repository = RepositoryFactory.getRepository(null, tigrisConfiguration.logRepository());
@@ -189,13 +181,14 @@ public class TigrisCoordinator implements Runnable {
Granularity granularity = new Granularity(tigrisCriteria.granularity(), signature);
+ //TODO: this is completely different now, need to review all the usage of sampling here
if (tigrisConfiguration.adaptiveSamplingRate() && sampling.isPerformanceBaselineEnabled()) {
sampling.addPerformanceBaselineItem(granularity, endTime - startTime);
}
//trace only allowed by lightweight metrics
// boolean shouldSample = sampling.simpleSamplingDecision();
- boolean shouldSample = sampling.samplingDecision(granularity);
+ boolean shouldSample = sampling.samplingDecision(granularity, endTime - startTime);
if (coarseMonitoringEnabled
&& detailedTrace
&& shouldSample) {
diff --git a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/FrequencyDataSet.java b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/FrequencyDataSet.java
index ad3677d..94bcce7 100644
--- a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/FrequencyDataSet.java
+++ b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/FrequencyDataSet.java
@@ -2,22 +2,19 @@ package br.ufrgs.inf.prosoft.tigris.sampling;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
-import java.util.HashMap;
+import java.math.BigDecimal;
+import java.math.MathContext;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
public class FrequencyDataSet {
- private Map<Granularity, Integer> granularityPopulation = new HashMap<>();
+ private Map<Granularity, Integer> granularityPopulation = new ConcurrentHashMap<>();
private long n;
public void addItem(Granularity item) {
- if (granularityPopulation.containsKey(item)){
- granularityPopulation.put(item, granularityPopulation.get(item) + 1);
- }
- else {
- granularityPopulation.put(item, 1);
- }
+ granularityPopulation.put(item, granularityPopulation.getOrDefault(item, 0) + 1);
n++;
}
@@ -29,7 +26,7 @@ public class FrequencyDataSet {
Integer occurrences = granularityPopulation.get(granularity);
if (occurrences == null)
return 0;
- return occurrences / getTotalItems();
+ return new BigDecimal(occurrences).divide(new BigDecimal(getTotalItems()), MathContext.DECIMAL128).doubleValue();
}
public Set<Granularity> getGranularities(){
@@ -47,4 +44,17 @@ public class FrequencyDataSet {
public Map<Granularity, Integer> getTraceFrequency() {
return granularityPopulation;
}
+
+ @Override
+ public String toString() {
+ return "FrequencyDataSet{" +
+ "granularityPopulation=" + granularityPopulation +
+ ", n=" + n +
+ '}';
+ }
+
+ public void clear() {
+ granularityPopulation.clear();
+ n = 0;
+ }
}
diff --git a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/Granularity.java b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/Granularity.java
index 49e6881..a575b76 100644
--- a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/Granularity.java
+++ b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/Granularity.java
@@ -25,4 +25,12 @@ public class Granularity {
public int hashCode() {
return Objects.hash(granularityType, name);
}
+
+ @Override
+ public String toString() {
+ return "Granularity{" +
+ "granularityType=" + granularityType +
+ ", name='" + name + '\'' +
+ '}';
+ }
}
diff --git a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/PerformanceBaselineDataSet.java b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/PerformanceBaselineDataSet.java
index bb9aa55..2ab10b1 100644
--- a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/PerformanceBaselineDataSet.java
+++ b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/PerformanceBaselineDataSet.java
@@ -1,23 +1,27 @@
package br.ufrgs.inf.prosoft.tigris.sampling;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
-import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
+import org.apache.commons.math3.stat.descriptive.moment.Mean;
+import org.apache.commons.math3.stat.descriptive.moment.Variance;
+import org.apache.commons.math3.util.FastMath;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.util.Map;
-import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
public class PerformanceBaselineDataSet {
- private Map<Granularity, SummaryStatistics> granularityBaseline = new ConcurrentHashMap<>();
- private SummaryStatistics overallBaseline = new SummaryStatistics();
- private long n;
+ Logger logger = LoggerFactory.getLogger(PerformanceBaselineDataSet.class);
+
+ double weights[] = {0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.06, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.07, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.08, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.09, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.11, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.12, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.13, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.14, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.15, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.16, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.17, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.18, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.19, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.21, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.22, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.23, 0.24, 0.24, 0.24, 0.24, 0.24, 0.24, 0.24, 0.24, 0.24, 0.24, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.26, 0.26, 0.26, 0.26, 0.26, 0.26, 0.26, 0.26, 0.26, 0.26, 0.27, 0.27, 0.27, 0.27, 0.27, 0.27, 0.27, 0.27, 0.27, 0.27, 0.28, 0.28, 0.28, 0.28, 0.28, 0.28, 0.28, 0.28, 0.28, 0.29, 0.29, 0.29, 0.29, 0.29, 0.29, 0.29, 0.29, 0.29, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, 0.32, 0.32, 0.32, 0.32, 0.32, 0.32, 0.32, 0.32, 0.33, 0.33, 0.33, 0.33, 0.33, 0.33, 0.33, 0.34, 0.34, 0.34, 0.34, 0.34, 0.34, 0.34, 0.34, 0.35, 0.35, 0.35, 0.35, 0.35, 0.35, 0.35, 0.36, 0.36, 0.36, 0.36, 0.36, 0.36, 0.36, 0.37, 0.37, 0.37, 0.37, 0.37, 0.37, 0.37, 0.38, 0.38, 0.38, 0.38, 0.38, 0.38, 0.38, 0.39, 0.39, 0.39, 0.39, 0.39, 0.39, 0.39, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.41, 0.41, 0.41, 0.41, 0.41, 0.41, 0.42, 0.42, 0.42, 0.42, 0.42, 0.42, 0.42, 0.43, 0.43, 0.43, 0.43, 0.43, 0.43, 0.44, 0.44, 0.44, 0.44, 0.44, 0.45, 0.45, 0.45, 0.45, 0.45, 0.45, 0.46, 0.46, 0.46, 0.46, 0.46, 0.46, 0.47, 0.47, 0.47, 0.47, 0.47, 0.48, 0.48, 0.48, 0.48, 0.48, 0.48, 0.49, 0.49, 0.49, 0.49, 0.49, 0.5, 0.5, 0.5, 0.5, 0.5, 0.51, 0.51, 0.51, 0.51, 0.51, 0.52, 0.52, 0.52, 0.52, 0.52, 0.53, 0.53, 0.53, 0.53, 0.53, 0.54, 0.54, 0.54, 0.54, 0.54, 0.55, 0.55, 0.55, 0.55, 0.56, 0.56, 0.56, 0.56, 0.56, 0.57, 0.57, 0.57, 0.57, 0.57, 0.58, 0.58, 0.58, 0.58, 0.59, 0.59, 0.59, 0.59, 0.6, 0.6, 0.6, 0.6, 0.6, 0.61, 0.61, 0.61, 0.61, 0.62, 0.62, 0.62, 0.62, 0.63, 0.63, 0.63, 0.63, 0.64, 0.64, 0.64, 0.64, 0.65, 0.65, 0.65, 0.65, 0.66, 0.66, 0.66, 0.66, 0.67, 0.67, 0.67, 0.67, 0.68, 0.68, 0.68, 0.68, 0.69, 0.69, 0.69, 0.69, 0.7, 0.7, 0.7, 0.71, 0.71, 0.71, 0.71, 0.72, 0.72, 0.72, 0.73, 0.73, 0.73, 0.73, 0.74, 0.74, 0.74, 0.74, 0.75, 0.75, 0.75, 0.76, 0.76, 0.76, 0.77, 0.77, 0.77, 0.77, 0.78, 0.78, 0.78, 0.79, 0.79, 0.79, 0.8, 0.8, 0.8, 0.8, 0.81, 0.81, 0.81, 0.82, 0.82, 0.82, 0.83, 0.83, 0.83, 0.84, 0.84, 0.84, 0.85, 0.85, 0.85, 0.86, 0.86, 0.86, 0.87, 0.87, 0.87, 0.88, 0.88, 0.88, 0.89, 0.89, 0.89, 0.9, 0.9, 0.9, 0.91, 0.91, 0.91, 0.92, 0.92, 0.92, 0.93, 0.93, 0.94, 0.94, 0.94, 0.95, 0.95, 0.95, 0.96, 0.96, 0.96, 0.97, 0.97, 0.98, 0.98, 0.98, 0.99, 0.99};
+
+ private Map<Granularity, DescriptiveStatistics> granularityBaseline = new ConcurrentHashMap<>();
+ private DescriptiveStatistics overallBaseline = new DescriptiveStatistics(1200);
+ private int n = 0;
public void addItem(Granularity item, long executionTime) {
- SummaryStatistics statistics = new SummaryStatistics();
- if (granularityBaseline.containsKey(item)){
- statistics = granularityBaseline.get(item);
- }
+ DescriptiveStatistics statistics = granularityBaseline.getOrDefault(item, new DescriptiveStatistics(1200));
statistics.addValue(executionTime);
granularityBaseline.put(item, statistics);
n++;
@@ -25,20 +29,46 @@ public class PerformanceBaselineDataSet {
overallBaseline.addValue(executionTime);
}
- public Apdex getApdexResultsPerEvent(Map<Granularity, DescriptiveStatistics> sampledDataSet){
+ //TODO compare baseline againts baseline, not against sampled items
+ //Solução 1: Guarda o baseline com a média mais baixa de todas - em teoria esse é o mais próximo da aplicação "real", sem influencia de monitoração ou falta de recursos - mínimo global
+ //compara o sample com o minimo global
+
+ //need to detect global max and min to avoid "getting used" to bad things
+ //diminuir as rodadas de baseline (espaços maiores), guardar max and min, olhar pra quartis talvez?
+ //se o q1 ta ficando maior, significa que a carga e overhead estão subindo, se está menor, está caindo
+ //questões: a aplicação está ok sem monitoração? essa é a melhor performance dela? é a pior? o quao distante dos topos está
+ //não comparar baseline com sample, não faz sentido - ou é sample com sample, ou é baseline com sample
+ //baseline com sample, diz o overhead do sampling
+ //baseline com baseline diz a carga da aplicação
+ public Apdex getApdexResultsPerEvent(Map<Granularity, DescriptiveStatistics> sampledDataSet) {
long satisfied = 0, tolerated = 0, n = 0;
- for (Granularity granularity : sampledDataSet.keySet()){
- SummaryStatistics stats = granularityBaseline.get(granularity);
- double meanPlusStd = stats.getMean() + stats.getStandardDeviation();
- for (double value: sampledDataSet.get(granularity).getValues()) {
- if (value < meanPlusStd){
+ for (Map.Entry<Granularity, DescriptiveStatistics> baselineEntry : granularityBaseline.entrySet()) {
+ DescriptiveStatistics stats = baselineEntry.getValue();
+ double mean = stats.getMean();
+ double std = stats.getStandardDeviation();
+ if (stats.getN() == stats.getWindowSize()) {
+ mean = new Mean().evaluate(stats.getValues(), weights);
+ std = FastMath.sqrt(new Variance().evaluate(stats.getValues(), weights));
+ }
+
+ double meanPlusStd = mean + std;
+ DescriptiveStatistics descriptiveStatistics = sampledDataSet.get(baselineEntry.getKey());
+ if (descriptiveStatistics == null)
+ continue;
+ for (double value : descriptiveStatistics.getValues()) {
+// if (value <= mean) {
+// satisfied++;
+// }
+// if (value > mean &&
+// value < meanPlusStd) {
+// tolerated++;
+// }
+ if (value <= meanPlusStd) {
satisfied++;
- continue;
}
if (value > meanPlusStd &&
- value < stats.getMean() + (2 * stats.getStandardDeviation())) {
+ value < mean + (2 * std)) {
tolerated++;
- continue;
}
n++;
}
@@ -46,22 +76,43 @@ public class PerformanceBaselineDataSet {
return new Apdex(satisfied, tolerated, n);
}
- public Apdex getApdexResults(Map<Granularity, DescriptiveStatistics> sampledDataSet){
+ /**
+ * Compare the results against the overall statistics
+ * However, some methods may be really fast and some really huge -
+ * if any discrepancy found, maybe we should use getApdexResultsPerEvent
+ *
+ * @param sampledDataSet
+ * @param lastSampledTimes
+ * @return
+ */
+ public Apdex getApdexResults(Map<Granularity, DescriptiveStatistics> sampledDataSet, DescriptiveStatistics lastSampledTimes) {
long satisfied = 0, tolerated = 0, n = 0;
- for (Granularity granularity : sampledDataSet.keySet()){
- //TODO is it ok to compare with the overall?
- //some methods may be really fast and some really huge
- //should we use getApdexResultsPerEvent?
- double meanPlusStd = getOverallAvg() + getOverallStd();
- for (double value: sampledDataSet.get(granularity).getValues()) {
- if (value < meanPlusStd){
+ double overallMean = getOverallAvg();
+ double overallStd = getOverallStd();
+
+ if (overallBaseline.getN() == overallBaseline.getWindowSize()) {
+ overallMean = new Mean().evaluate(overallBaseline.getValues(), weights);
+ overallStd = FastMath.sqrt(new Variance().evaluate(overallBaseline.getValues(), weights));
+ }
+
+ double meanPlusStd = overallMean + overallStd;
+
+ for (DescriptiveStatistics granularityTraces : sampledDataSet.values()) {
+ for (double value : granularityTraces.getValues()) {
+// for (double value : lastSampledTimes.getValues()) {
+// if (value <= overallMean) {
+// satisfied++;
+// }
+// if (value > overallMean &&
+// value < meanPlusStd) {
+// tolerated++;
+// }
+ if (value <= meanPlusStd) {
satisfied++;
- continue;
}
if (value > meanPlusStd &&
- value < getOverallAvg() + (2 * getOverallStd())) {
+ value < overallMean + (2 * overallStd)) {
tolerated++;
- continue;
}
n++;
}
@@ -77,11 +128,14 @@ public class PerformanceBaselineDataSet {
return overallBaseline.getStandardDeviation();
}
- public long getTotalItems(){
+ public long getTotalItems() {
return n;
}
- public Set<Granularity> getGranularities(){
- return granularityBaseline.keySet();
+ public void clear() {
+ //TODO should we no clean this?
+ n = 0;
+// overallBaseline.clear();
+// granularityBaseline.clear();
}
}
diff --git a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/Sampling.java b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/Sampling.java
index b4b79b3..3a5b3c0 100644
--- a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/Sampling.java
+++ b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/Sampling.java
@@ -1,32 +1,41 @@
package br.ufrgs.inf.prosoft.tigris.sampling;
-import org.apache.commons.collections4.queue.CircularFifoQueue;
import org.apache.commons.math3.distribution.BinomialDistribution;
import org.apache.commons.math3.ml.neuralnet.sofm.util.ExponentialDecayFunction;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
+import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.apache.commons.math3.stat.inference.TestUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.math.BigDecimal;
import java.util.Map;
-import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
/**
- * The type Sampling decision.
+ * The sampling control and decision.
*/
-public class Sampling implements Runnable {
+public class Sampling {
+ //constructor parameters - defined once
private final boolean adaptiveSamplingRate;
- private boolean samplingEnabled = true;
- private boolean performanceBaselineEnabled = false;
+ private final long cycleLengthInMilliseconds;
+ private BinomialDistribution binomialDistSampling;
private double samplingRate; // in percentage, 0 to 1
+
+ //control vars
+ private boolean performanceBaselineEnabled = false;
+
+ // recreated every new monitoring cycle
+ private long startTime;
+ private ExponentialDecayFunction decayingPrecision;
private FrequencyDataSet population = new FrequencyDataSet(), sample = new FrequencyDataSet();
private PerformanceBaselineDataSet performanceBaselineDataSet = new PerformanceBaselineDataSet();
+ private Map<Granularity, DescriptiveStatistics> lowestPerformanceBaselineDataSet = new ConcurrentHashMap<>();
private Map<Granularity, DescriptiveStatistics> sampledDataSet = new ConcurrentHashMap<>();
- private Queue<PerformanceBaselineDataSet> lastFourPerformanceBaselineDataSets = new CircularFifoQueue<>(4);
- private ExponentialDecayFunction decayingPrecision;
- private long startTime;
+
+ //PBA history
+// private Queue<PerformanceBaselineDataSet> lastFourPerformanceBaselineDataSets = new CircularFifoQueue<>(4);
Logger logger = LoggerFactory.getLogger(Sampling.class);
@@ -38,17 +47,29 @@ public class Sampling implements Runnable {
private double z = 1.96, p = 0.5, e = 0.05;
public Sampling(double initialSamplingRate, long cycleLengthInMilliseconds, boolean adaptiveSamplingRate) {
- samplingRate = initialSamplingRate;
- decayingPrecision = new ExponentialDecayFunction(1, 0.00001, cycleLengthInMilliseconds);
+ this.samplingRate = initialSamplingRate;
this.adaptiveSamplingRate = adaptiveSamplingRate;
+ this.cycleLengthInMilliseconds = cycleLengthInMilliseconds;
+ this.binomialDistSampling = new BinomialDistribution(1, samplingRate);
startMonitoringCycle();
}
+ private Object binomialDistSamplingLock = new Object();
+ private void resetSamplingDistribution(){
+ synchronized (this.binomialDistSamplingLock) {
+ this.binomialDistSampling = new BinomialDistribution(1, samplingRate);
+ }
+ }
+
public boolean simpleSamplingDecision(){
- return new BinomialDistribution(1, samplingRate).sample() == 1; // sampling rate evaluation
+ synchronized (this.binomialDistSamplingLock) {
+ return binomialDistSampling.sample() == 1; // sampling rate evaluation
+ }
}
- public boolean samplingDecision(Granularity granularity) {
+ public boolean samplingDecision(Granularity granularity, long executionTime) {
+ if (population.getTotalItems() == 0)
+ startTime = System.currentTimeMillis();
population.addItem(granularity);
if (performanceBaselineEnabled) {
@@ -56,52 +77,148 @@ public class Sampling implements Runnable {
}
boolean simpleSamplingDecision = simpleSamplingDecision();
- boolean decision;
- if (adaptiveSamplingRate) {
- decision = samplingEnabled
- && simpleSamplingDecision // sampling rate evaluation
- && population.getProportion(granularity) >= sample.getProportion(granularity); // sample has not enough items of that granularity compared to the population
- } else {
- decision = simpleSamplingDecision;
+ if (adaptiveSamplingRate
+ && simpleSamplingDecision
+ && population.getProportion(granularity) >= sample.getProportion(granularity)
+ ) // sample has not enough items of that granularity compared to the population)
+ {
+ return true;
+ }
+ return simpleSamplingDecision;
+ }
+
+ public boolean isReady() {
+ double decayingConfidenceFactor = decayingConfidenceFactor(getMonitoringCycleTime());
+ boolean hasMinimumSize = sample.getTotalItems() > getMinimumSampleSize(z - (z * decayingConfidenceFactor));
+ boolean hasSameProportion = isSameProportion(decayingConfidenceFactor);
+ boolean hasComparedMean = tTestEvaluation(decayingConfidenceFactor);
+
+ return adaptiveSamplingRate
+ // margin of error is lower than threshold
+// && getSampleSizeErrorMargin(z * decayingConfidenceFactor) < e
+ // the sample has the min sample size based on the population
+ && hasMinimumSize
+ // proportion test
+ && hasSameProportion
+ // t-test
+ && hasComparedMean;
+ }
+
+ private Object samplingRateLock = new Object();
+ public void adaptSamplingRate() {
+ synchronized (samplingRateLock) {
+ if (this.sampledDataSet.isEmpty()) {
+ logger.info("No sampled data, doing nothing...");
+ //if no impact, increase by 1%
+// samplingRate += 0.01;
+//
+// if (samplingRate > 1)
+// samplingRate = 1;
+//
+// logger.info("New sampling rate: {}", samplingRate);
+// this.resetSamplingDistribution();
+ return;
+ }
+
+// this.performanceBaselineDataSet.
+//
+// lowestPerformanceBaselineDataSet
+// Apdex apdex = this.performanceBaselineDataSet.getApdexResults(this.sampledDataSet, this.lastSampledTimes);
+ Apdex apdex = this.performanceBaselineDataSet.getApdexResultsPerEvent(this.sampledDataSet);
+ double impact = 1 - ((apdex.getSatisfied() + 0.5 * apdex.getTolerated()) / apdex.getN());
+
+ //if we have just 1 tolerated, the impact will not be zero anymore
+ if (impact <= 0.1) {
+ logger.info("No monitoring impact detected: {}, increasing the sampling rate...", impact);
+ //if no impact, increase by 10%
+ samplingRate += 0.1;
+ } else
+ //otherwise stays the same - not necessary here
+ if (impact > 0.1 && impact <= 0.2) {
+ logger.info("Minimal monitoring impact detected: {}, keeping it the same...", impact);
+ } else if (impact > 0.2) {
+ double reduction = impact - 0.2;
+ logger.info("Monitoring impact detected: {}, decreasing the current sampling rate {} by {}%", impact, samplingRate, reduction);
+// logger.info("{}, {}, {}", apdex.getSatisfied(), apdex.getTolerated(), apdex.getN());
+// logger.info("{}", this.performanceBaselineDataSet.getOverallAvg());
+// logger.info("{}", this.performanceBaselineDataSet.getOverallStd());
+// logger.info("{}", this.performanceBaselineDataSet.getTotalItems());
+
+ //reduce by the amount of overhead
+ samplingRate = samplingRate - (samplingRate * (reduction / 1d));
+ }
+
+ if (samplingRate < 0)
+ samplingRate = 0;
+
+ if (samplingRate > 1)
+ samplingRate = 1;
+
+ //update the binomial with the new sampling rate distribution
+ resetSamplingDistribution();
+ logger.info("New sampling rate: {}", samplingRate);
}
+ }
+
+ public void addPerformanceBaselineItem(Granularity granularity, long executionTime) {
+ if(this.performanceBaselineDataSet.getTotalItems() < minimumSampleSize) {
+ this.performanceBaselineDataSet.addItem(granularity, executionTime);
+ }
+ }
- if (decision)
- sample.addItem(granularity);
+ DescriptiveStatistics lastSampledTimes = new DescriptiveStatistics(1200);
+ public void addSampledItem(Granularity granularity, long startTime) {
+ sample.addItem(granularity);
- return decision;
+ DescriptiveStatistics statistics = sampledDataSet.getOrDefault(granularity, new DescriptiveStatistics());
+ long time = System.nanoTime() - startTime;
+ statistics.addValue(time);
+ lastSampledTimes.addValue(time);
+ sampledDataSet.put(granularity, statistics);
}
- public void setSamplingRate(double samplingRate){
- this.samplingRate = samplingRate;
+ public long getMonitoringCycleTime(){
+ return (System.currentTimeMillis() - startTime);
}
- public boolean isReady() {
- double decayingConfidenceFactor = decayingConfidenceFactor(getMonitoringCycleTime());
- return
- // margin of error is lower than threshold
- getSampleSizeErrorMargin(z * decayingConfidenceFactor) < e
- // the sample has the min sample size based on the population
- && sample.getTotalItems() > getMinimumSampleSize(z * decayingConfidenceFactor)
- // proportion test
- && isSameProportion(decayingConfidenceFactor)
- // t-test
- && tTestEvaluation(decayingConfidenceFactor);
+ public boolean isPerformanceBaselineEnabled() {
+ return performanceBaselineEnabled;
}
- private double decayingConfidenceFactor(long timeInMilliseconds){
- return decayingPrecision.value(timeInMilliseconds);
+ public double decayingConfidenceFactor(long timeInMilliseconds){
+ synchronized (decayingPrecisionLock) {
+ return new BigDecimal(decayingPrecision.value(timeInMilliseconds))
+ .setScale(4, BigDecimal.ROUND_FLOOR).doubleValue();
+ }
}
private boolean tTestEvaluation(double decayingConfidenceFactor) {
- //To test the (one-sample t-test - compare with the population mean)
- // hypothesis sample mean = mu at the 95% level
- return TestUtils.tTest(population.getAsDescriptiveStatistics().getMean(),
- sample.getAsDescriptiveStatistics(),
- 0.05 * decayingConfidenceFactor);
+ SummaryStatistics sampleAsDescriptiveStatistics = sample.getAsDescriptiveStatistics();
+ if (sampleAsDescriptiveStatistics.getN() < 2) return true;
+
+ if (sampleAsDescriptiveStatistics.getVariance() == 0) return true;
+
+ SummaryStatistics populationAsDescriptiveStatistics = population.getAsDescriptiveStatistics();
+ double popMean = populationAsDescriptiveStatistics.getMean();
+
+ //for some reason, t-test returns false when the sets are exactly the same...
+ if (sample.getTotalItems() == population.getTotalItems())
+ return true;
+
+ double significanceLevel = 0.5 - (0.5 * decayingConfidenceFactor);
+ if (significanceLevel == 0.5) //maximum cycle time reached
+ return true;
+ else {
+ //To test the (one-sample t-test - compare with the population mean)
+ // hypothesis sample mean = mu at the 95% level
+ return TestUtils.tTest(popMean,
+ sampleAsDescriptiveStatistics,
+ 0.5 - (0.5 * decayingConfidenceFactor));
+ }
}
//sample proportion is the same as population
- public boolean isSameProportion(double decayingConfidenceFactor) {
+ private boolean isSameProportion(double decayingConfidenceFactor) {
return population.getGranularities().stream().allMatch(
granularity -> {
double popProportion = population.getProportion(granularity);
@@ -113,131 +230,66 @@ public class Sampling implements Runnable {
});
}
- /**
- * @return the minimum sample size for the population
- */
- public long getMinimumSampleSize() {
- return getMinimumSampleSize(population.getTotalItems());
- }
-
- public long getMinimumSampleSize(long n) {
+ private long getMinimumSampleSize(long n) {
return getMinimumSampleSize(n, z);
}
- public long getMinimumSampleSize(double precision) {
+ private long getMinimumSampleSize(double precision) {
return getMinimumSampleSize(population.getTotalItems(), precision);
}
- public long getMinimumSampleSize(long n, double precision) {
+ private long getMinimumSampleSize(long n, double precision) {
+ if (n <= 1) return 0;
long n_inf = (long) ((Math.pow(precision, 2) * p * (1 - p)) / Math.pow(e, 2));
return n_inf / (1 + ((n_inf - 1) / n));
}
- public double getSampleSizeErrorMargin(double precision) {
+ private double getSampleSizeErrorMargin(double precision) {
+ if (population.getTotalItems() <= 1) return 0;
double e_n_inf = Math.sqrt((Math.pow(precision, 2) * p * (1 - p)) / sample.getTotalItems());
return e_n_inf * Math.sqrt((population.getTotalItems() - sample.getTotalItems()) / (population.getTotalItems() - 1));
}
- public long getMonitoringCycleTime(){
- return (System.currentTimeMillis() - startTime);
+ private Object decayingPrecisionLock = new Object();
+ public void startMonitoringCycle() {
+ synchronized (decayingPrecisionLock) {
+ this.decayingPrecision = new ExponentialDecayFunction(1, 0.001, cycleLengthInMilliseconds);
+ }
+ this.sample.clear();
+ this.population.clear();
+ this.sampledDataSet.clear();
+ this.startTime = System.currentTimeMillis();
+ logger.info("Monitoring is reset...");
}
public void endMonitoringCycle() {
- this.sampledDataSet = new ConcurrentHashMap<>();
- logger.info("Adaptive Sampling Monitoring Cycle Finished - Sample traces ({}): {}", getSample().getTotalItems(),
- getSample().getTraceFrequency());
- logger.info("Adaptive Sampling Monitoring Cycle Finished - Population traces ({}): {}", getPopulation().getTotalItems(),
- getPopulation().getTraceFrequency());
+ logger.info("Adaptive Sampling Monitoring Cycle Finished - Sample traces: {}", getSample().getTotalItems());
+ logger.info("Adaptive Sampling Monitoring Cycle Finished - Population traces: {}", getPopulation().getTotalItems());
startMonitoringCycle();
}
- private void startMonitoringCycle() {
- startTime = System.currentTimeMillis();
- }
-
- public boolean shouldCollectPerformanceBaseline() {
- return new BinomialDistribution(1, 0.1).sample() == 1;
- }
-
- public void adaptSamplingRate() {
- Apdex apdex = this.performanceBaselineDataSet.getApdexResults(this.sampledDataSet);
- double impact = 1 - ((apdex.getSatisfied() + 0.5 * apdex.getTolerated()) / apdex.getN());
- if (impact == 0) {
- logger.info("No monitoring impact detected, increasing the sampling rate...");
- //if no impact, increase by 1%
- samplingRate += 0.01;
- }
- if (impact > 0.1) {
- logger.info("Monitoring impact detected, decreasing the sampling rate by {}...", impact);
- //reduce by the amount of overhead
- samplingRate = samplingRate - impact;
- }
-
- //otherwise stays the same - not necessary here
- //if (impact > 0 && impact <= 0.1) { }
- }
-
- @Override
- public void run() {
- //TODO this is supposed to run from time to time based on triggers? Or every new trace collected?
-
- //this method deals with sampling rate adaptation procedures
- if (!adaptiveSamplingRate)
- return;
-
- if (isReady()) {
- logger.info("Sample is ready, releasing for analysis and resetting...");
- endMonitoringCycle();
- return;
- }
- if (shouldCollectPerformanceBaseline()) {
- logger.info("Enabling performance baseline...");
- enablePerformanceBaseline();
+ private Long minimumSampleSize;
+ public void managePerformanceBaseline() {
+ if (performanceBaselineEnabled) { //is it already enabled?
+ if (this.performanceBaselineDataSet.getTotalItems() >= minimumSampleSize) { //got enough traces for PB
+ logger.info("Collected performance baseline of {} traces", this.performanceBaselineDataSet.getTotalItems());
+ performanceBaselineEnabled = false;
+ minimumSampleSize = null;
+// lastFourPerformanceBaselineDataSets.add(this.performanceBaselineDataSet);
+ adaptSamplingRate(); //adapt the sampling rate
+ this.performanceBaselineDataSet.clear();
+ }
return;
}
- adaptSamplingRate();
- }
-
- private void enablePerformanceBaseline() {
- performanceBaselineEnabled = true;
- }
- public boolean isPerformanceBaselineEnabled() {
- return performanceBaselineEnabled;
- }
-
- private Long minimumSampleSize;
- public void addPerformanceBaselineItem(Granularity granularity, long executionTime) {
- if (minimumSampleSize == null) {
+ double chance = new BinomialDistribution(1, 0.3d).sample();
+ if (chance == 1) {
minimumSampleSize = getMinimumSampleSize(this.population.getTotalItems());
+ if (minimumSampleSize > 0) {
+ logger.info("Enabling performance baseline that needs {} traces.", minimumSampleSize);
+ performanceBaselineEnabled = true;
+ }
}
-
- logger.info("Collecting performance baseline for the next {} traces...",
- minimumSampleSize - this.performanceBaselineDataSet.getTotalItems());
- this.performanceBaselineDataSet.addItem(granularity, executionTime);
-
- if(this.performanceBaselineDataSet.getTotalItems() > minimumSampleSize) {
- //got enough traces for PB
- logger.info("Finished to collect the performance baseline, enabling sampling again...");
- samplingEnabled = true;
- minimumSampleSize = null;
- this.performanceBaselineEnabled = false;
- lastFourPerformanceBaselineDataSets.add(this.performanceBaselineDataSet);
- this.performanceBaselineDataSet = new PerformanceBaselineDataSet();
- }
- }
-
- public void addSampledItem(Granularity granularity, long executionTime) {
- DescriptiveStatistics statistics = new DescriptiveStatistics();
- if (sampledDataSet.containsKey(granularity)){
- statistics = sampledDataSet.get(granularity);
- }
- statistics.addValue(executionTime);
- sampledDataSet.put(granularity, statistics);
-
-
- //TODO run it every new trace collected?
- run();
}
public FrequencyDataSet getSample() {
@@ -247,4 +299,16 @@ public class Sampling implements Runnable {
public FrequencyDataSet getPopulation() {
return population;
}
+
+ public Map<Granularity, DescriptiveStatistics> getSampledTraces() {
+ return sampledDataSet;
+ }
+
+ public double getSamplingRate() {
+ return samplingRate;
+ }
+
+ public boolean isAdaptiveSamplingRate() {
+ return adaptiveSamplingRate;
+ }
}
diff --git a/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/SamplingAspect.java b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/SamplingAspect.java
new file mode 100644
index 0000000..32bb6b0
--- /dev/null
+++ b/tigris/src/main/java/br/ufrgs/inf/prosoft/tigris/sampling/SamplingAspect.java
@@ -0,0 +1,119 @@
+package br.ufrgs.inf.prosoft.tigris.sampling;
+
+import br.ufrgs.inf.prosoft.tigris.configuration.annotation.TigrisConfiguration;
+import br.ufrgs.inf.prosoft.tigris.exceptions.ConfigurationException;
+import br.ufrgs.inf.prosoft.tigris.monitoring.util.threads.NamedThreads;
+import br.ufrgs.inf.prosoft.tigris.utils.ConfigurationUtils;
+import org.aspectj.lang.ProceedingJoinPoint;
+import org.aspectj.lang.annotation.Around;
+import org.aspectj.lang.annotation.Aspect;
+import org.aspectj.lang.annotation.Pointcut;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import static java.lang.System.nanoTime;
+
+@Aspect
+public class SamplingAspect implements Runnable {
+
+ @Pointcut(
+ //any execution except the own framework
+ "(" +
+ "(execution(* org.dacapo.h2.TPCCSubmitter.runTransaction(..)) || " + //h2
+ "execution(* org.dacapo.lusearch.Search.doPagingSearch(..))) " + //lusearch
+// "(execution(* *(..)) && !within(br.ufrgs.inf.prosoft..*) " +
+ //avoid calls from repository while serializing objects, it is necessary if a hash could not be used
+ //"&& !cflow(call(* br.ufrgs.inf.prosoft.tigris.monitoring.storage..*(..))) " +
+ //conditional to enable and disable at runtime
+ "&& if())"
+ )
+ public static boolean anyCall() {
+ return enabled;
+ }
+
+ //tigris config
+ private final TigrisConfiguration tigrisConfiguration;
+ private final Sampling sampling;
+
+ public SamplingAspect() {
+ Class<?> configClass = ConfigurationUtils.getAvailableConfigurationClass(TigrisConfiguration.class);
+ if (configClass == null) {
+ logger.info("Tigris tracing disabled, there is no annotations.");
+ throw new ConfigurationException("Tigris tracing disabled, there is no annotations.");
+ }
+
+ tigrisConfiguration = configClass.getAnnotation(TigrisConfiguration.class);
+
+ logger.info("@TigrisConfiguration found.");
+
+ sampling = new Sampling(tigrisConfiguration.samplingPercentage(), tigrisConfiguration.cycleTimeInMilliseconds(), tigrisConfiguration.adaptiveSamplingRate());
+
+ adaptiveSamplingExecutor.scheduleWithFixedDelay(
+ this::run,
+ 5, 1, TimeUnit.SECONDS);
+ }
+
+ static Logger logger = LoggerFactory.getLogger(SamplingAspect.class);
+
+ public static boolean enabled = false;
+
+ public static boolean samplingEnabled = true;
+
+ private final ScheduledExecutorService adaptiveSamplingExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreads(
+ "adaptive-sampling",
+ "readiness evaluation, pba trigger and sampling adaptation"
+ ));
+
+ @Around("anyCall()")
+ public Object aroundMethods(ProceedingJoinPoint joinPoint) throws Throwable {
+ long startTime = nanoTime();
+ Object result = joinPoint.proceed();
+ long endTime = nanoTime();
+
+ if (samplingEnabled) {
+ String signature = joinPoint.getSignature().toString() +
+ joinPoint.getArgs()[0].toString(); //TODO this is to distinguish traces, need to change for other than h2
+ Granularity granularity = new Granularity(GranularityType.METHOD, signature);
+ if (sampling.isPerformanceBaselineEnabled()) {
+ sampling.addPerformanceBaselineItem(granularity, endTime - startTime);
+ return result;
+ }
+
+ boolean decision = sampling.samplingDecision(granularity, 0);
+
+ if (decision)
+ sampling.addSampledItem(granularity, startTime);
+ }
+
+ return result;
+ }
+
+ static List<Double> samplingRates = new ArrayList<>();
+ static int numberOfCycles = 0;
+
+ @Override
+ public void run() {
+ if (SamplingAspect.enabled && sampling.isAdaptiveSamplingRate()) {
+ sampling.managePerformanceBaseline();
+ samplingRates.add(sampling.getSamplingRate());
+
+ if (!sampling.isPerformanceBaselineEnabled() && sampling.isReady()) {
+ logger.info("Sample is ready, releasing for analysis and resetting...");
+ sampling.endMonitoringCycle();
+ numberOfCycles++;
+ }
+ }
+ }
+
+ public static void printResults(){
+ logger.info("Sampling Rates: {}", samplingRates);
+ logger.info("Number of finished cycles: {}", numberOfCycles);
+ }
+}
\ No newline at end of file
diff --git a/tigris/src/test/java/br/ufrgs/inf/prosoft/tigris/SamplingTest.java b/tigris/src/test/java/br/ufrgs/inf/prosoft/tigris/SamplingTest.java
index b290bfc..0f9fb3a 100644
--- a/tigris/src/test/java/br/ufrgs/inf/prosoft/tigris/SamplingTest.java
+++ b/tigris/src/test/java/br/ufrgs/inf/prosoft/tigris/SamplingTest.java
@@ -3,16 +3,24 @@ package br.ufrgs.inf.prosoft.tigris;
import br.ufrgs.inf.prosoft.tigris.sampling.Granularity;
import br.ufrgs.inf.prosoft.tigris.sampling.GranularityType;
import br.ufrgs.inf.prosoft.tigris.sampling.Sampling;
+import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.math3.ml.neuralnet.sofm.util.ExponentialDecayFunction;
+import org.apache.commons.math3.stat.inference.TestUtils;
import org.junit.Assert;
import org.junit.Test;
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
public class SamplingTest {
@Test
public void samplingDecisionWithNoChance(){
Sampling sampling = new Sampling(0, 100, false);
for (int i = 0; i < 2000; i++) {
- Assert.assertFalse(sampling.samplingDecision(new Granularity(GranularityType.METHOD, "function")));
+ Assert.assertFalse(sampling.samplingDecision(new Granularity(GranularityType.METHOD, "function"), 0));
}
Assert.assertEquals(0, sampling.getSample().getTotalItems());
Assert.assertEquals(2000, sampling.getPopulation().getTotalItems());
@@ -21,10 +29,78 @@ public class SamplingTest {
@Test
public void samplingDecisionWith100Chance(){
Sampling sampling = new Sampling(1, 100, false);
+ Granularity granularity = new Granularity(GranularityType.METHOD, "function");
for (int i = 0; i < 2000; i++) {
- Assert.assertTrue(sampling.samplingDecision(new Granularity(GranularityType.METHOD, "function")));
+ Assert.assertTrue(sampling.samplingDecision(granularity, 0));
+ sampling.addSampledItem(granularity, 0);
}
Assert.assertEquals(2000, sampling.getSample().getTotalItems());
Assert.assertEquals(2000, sampling.getPopulation().getTotalItems());
}
+
+ @Test
+ public void isReadyWithOneGranularity(){
+ Sampling sampling = new Sampling(1, 100, true);
+ Granularity granularity = new Granularity(GranularityType.METHOD, "function");
+ for (int i = 0; i < 2000; i++) {
+ Assert.assertTrue(sampling.samplingDecision(granularity, 0));
+ sampling.addSampledItem(granularity, 0);
+ }
+ Assert.assertTrue(sampling.isReady());
+ }
+
+ @Test
+ public void isReadyWithMultipleGranularity(){
+ Sampling sampling = new Sampling(1, 100000, true);
+ for (int i = 0; i < 1000; i++) {
+ Granularity granularity = new Granularity(GranularityType.METHOD, "function");
+ Assert.assertTrue(sampling.samplingDecision(granularity, i));
+ sampling.addSampledItem(granularity, 0);
+ }
+ for (int i = 0; i < 1000; i++) {
+ Granularity granularity = new Granularity(GranularityType.METHOD, "function2");
+ Assert.assertTrue(sampling.samplingDecision(granularity, i));
+ sampling.addSampledItem(granularity, 0);
+ }
+ for (int i = 0; i < 1000; i++) {
+ Granularity granularity = new Granularity(GranularityType.METHOD, "function3");
+ Assert.assertTrue(sampling.samplingDecision(granularity, i));
+ sampling.addSampledItem(granularity, 0);
+ }
+ Assert.assertTrue(sampling.isReady());
+ }
+
+ @Test
+ public void decayingConfidenceTest(){
+ Sampling sampling = new Sampling(0.5, 6000, true);
+
+ long startTime = System.currentTimeMillis();
+
+ while(sampling.decayingConfidenceFactor(System.currentTimeMillis() - startTime) > 0);
+
+ Assert.assertEquals(sampling.decayingConfidenceFactor(System.currentTimeMillis() - startTime), 0, 0);
+ }
+
+// @Test
+ public void decayingWeightGeneration(){
+// Sampling sampling = new Sampling(0.5, 3000, true);
+
+ ExponentialDecayFunction decay = new ExponentialDecayFunction(1, 0.01, 1200);
+ long startValue = 1;
+
+ List<Double> values = new ArrayList<>();
+
+ double value = new BigDecimal(decay.value(startValue))
+ .setScale(2, BigDecimal.ROUND_FLOOR).doubleValue();
+ while(value > 0) {
+ startValue++;
+ values.add(value);
+ value = new BigDecimal(decay.value(startValue))
+ .setScale(2, BigDecimal.ROUND_FLOOR).doubleValue();
+ }
+
+ Collections.reverse(values);
+ System.out.println(values);
+ System.out.println(values.size());
+ }
}