adaptive-caching-framework

cleaning and improving performance

12/11/2018 10:25:45 PM

Details

diff --git a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/Analyzer.java b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/Analyzer.java
index 1adccc0..bb58a2b 100644
--- a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/Analyzer.java
+++ b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/Analyzer.java
@@ -2,6 +2,7 @@ package br.ufrgs.inf.prosoft.adaptivecaching.analysis;
 
 import br.ufrgs.inf.prosoft.adaptivecaching.analysis.decision.flowchart.FlowchartWorkFlow;
 import br.ufrgs.inf.prosoft.adaptivecaching.analysis.decision.flowchart.model.MethodEntry;
+import br.ufrgs.inf.prosoft.adaptivecaching.analysis.decision.flowchart.stats.CacheabilityMetrics;
 import br.ufrgs.inf.prosoft.adaptivecaching.cachemanager.extensions.guava.GuavaCache;
 import br.ufrgs.inf.prosoft.adaptivecaching.cachemanager.extensions.guava.GuavaCacheManager;
 import br.ufrgs.inf.prosoft.adaptivecaching.cachemanager.model.Cache;
@@ -23,6 +24,9 @@ import com.mongodb.client.MongoDatabase;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
 import java.lang.annotation.Annotation;
 import java.util.List;
 import java.util.Set;
@@ -52,6 +56,8 @@ public class Analyzer implements Runnable {
     @Override
     public void run() {
 
+        logger.info("LightweightMetricAspect.metrics: {}", LightweightMetricAspect.metrics);
+
         if (!TracerAspect.analyzerEnabled) {
             logger.info("Analyzer disabled, not running...");
             return;
@@ -59,6 +65,7 @@ public class Analyzer implements Runnable {
 
         List all = dataSource.findAll();
 
+        //TODO trigger by number of logs
         //here we can check whether there are enough logs to process, such as
         //if(all.size() < 300) return;
 
@@ -89,163 +96,20 @@ public class Analyzer implements Runnable {
 
 
     public Set<MethodEntry> analyzeAndReturn(List<LogTrace> logList) {
-
-        logger.info("LightweightMetricAspect.metrics: {}", LightweightMetricAspect.metrics);
-
-
         logger.info("Starting the analysis of cacheable methods from logs: " + logList.size() + " loaded.");
-        //TODO trigger by number of logs
-//        if(logList.size() < 500000)
-//            return;
 
         FlowchartWorkFlow workflow = new FlowchartWorkFlow(cacheInfo, logList);
         Set<MethodEntry> process = workflow.filterCacheableMethods(adaptiveConfig.expiryInterval());
         logger.info(process.size() + " cacheable methods identified.");
 
+        //TODO find by annotations @Ignore and remove the methods marked
 
-
-//        //TODO find by annotations @Ignore and remove the methods marked
-//
-//        //        for (MethodInfo method : cacheableStats.getMethodsInfoMap().keySet()) {
-////            if(cacheableStats.getMethodsInfoMap().get(method).hitRatio() > 10)
-////                System.out.println(method.getSignature() + ": " + cacheableStats.getMethodsInfoMap().get(method).hitRatio());
-////        }
-
-
-//        //TODO if disable monitoring, should not schedule future analysis
+        //if disable monitoring, should not schedule future analysis
         if (adaptiveConfig.disableMonitoringAfterAnalysis()) {
             TracerAspect.tracerEnabled = false;
             logger.info("Adaptive caching monitoring disabled since the model was built.");
         }
-        return process;
-    }
-
 
-
-    //used to test
-    public static void main(String[] args) {
-        //TODO db according to properties file...
-        MongoClient mongo = new MongoClient("localhost", 27017);
-        MongoDatabase database = mongo.getDatabase("cachemonitoring");
-        Repository repositoryTarget = new MongoRepository<MethodEntry>(database.getCollection("petclinicCacheable"), MethodEntry.class);
-        Repository repositorySource = new MongoRepository<LogTrace>(database.getCollection("cloudstore"), LogTrace.class);
-//        Repository repositorySource = new RedisRepository<LogTrace>();
-
-        //TODO db according to properties file...
-        GuavaCacheManager cacheManager = new GuavaCacheManager();
-        Cache cache = cacheManager.getCache("test");
-        CacheMonitor cacheMonitor = new GuavaMonitor((GuavaCache) cache);
-        logger.debug("Cache provider is configured to Guava.");
-
-        AdaptiveCaching adaptiveCaching = new AdaptiveCaching() {
-            @Override
-            public boolean equals(Object obj) {
-                return false;
-            }
-
-            @Override
-            public int hashCode() {
-                return 0;
-            }
-
-            @Override
-            public String toString() {
-                return null;
-            }
-
-            @Override
-            public Class<? extends Annotation> annotationType() {
-                return null;
-            }
-
-            @Override
-            public boolean enabled() {
-                return true;
-            }
-
-            @Override
-            public RepositoryType logRepository() {
-                return RepositoryType.MONGODB;
-            }
-
-            @Override
-            public CacheProviderType cacheProvider() {
-                return CacheProviderType.GUAVA;
-            }
-
-            @Override
-            public Modelling modelling() {
-                return Modelling.ACCUMULATION;
-            }
-
-            @Override
-            public long expiryInterval() {
-                return 5;
-            }
-
-            @Override
-            public long firstExpiry() {
-                return 5;
-            }
-
-            @Override
-            public long analysisInterval() {
-                return 5;
-            }
-
-            @Override
-            public long firstAnalysis() {
-                return 2;
-            }
-
-            @Override
-            public TriggerType triggerType() {
-                return TriggerType.TIME;
-            }
-
-            @Override
-            public long triggerTime() {
-                return 5;
-            }
-
-            @Override
-            public boolean analyzerEnabled() {
-                return true;
-            }
-
-            @Override
-            public boolean disableMonitoringAfterAnalysis() {
-                return false;
-            }
-
-            @Override
-            public boolean traceAsync() {
-                return true;
-            }
-
-            @Override
-            public boolean clearMonitoringDataOnStart() {
-                return false;
-            }
-
-            @Override
-            public boolean enableBlacklist() {
-                return false;
-            }
-
-            @Override
-            public boolean tracerEnabled() {
-                return true;
-            }
-
-            @Override
-            public boolean analyzeOnce() {
-                return false;
-            }
-        };
-        Analyzer analyzer = new Analyzer(repositorySource, null, cacheMonitor.getCacheInfo(), adaptiveCaching);
-
-        //TODO trigger by time?
-        analyzer.run();
+        return process;
     }
 }
diff --git a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/FlowchartWorkFlow.java b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/FlowchartWorkFlow.java
index 341db7d..6a4811a 100644
--- a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/FlowchartWorkFlow.java
+++ b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/FlowchartWorkFlow.java
@@ -10,15 +10,13 @@ import br.ufrgs.inf.prosoft.adaptivecaching.monitoring.application.metadata.LogT
 import br.ufrgs.inf.prosoft.adaptivecaching.monitoring.application.metadata.MethodInfo;
 import br.ufrgs.inf.prosoft.adaptivecaching.monitoring.cache.CacheInfo;
 import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.PrintWriter;
-import java.math.BigDecimal;
-import java.math.RoundingMode;
-import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Set;
@@ -26,7 +24,6 @@ import java.util.stream.Collectors;
 
 public class FlowchartWorkFlow {
 
-    private final long population;
     protected HashMap<MethodInfo, MethodStats> methodsInfoMap;
     Logger logger = LoggerFactory.getLogger(FlowchartWorkFlow.class);
 
@@ -43,7 +40,6 @@ public class FlowchartWorkFlow {
         this.decider = new CacheabilityPatternDecider(logList.size(), this);
         this.cacheInfo = cacheInfo;
         this.methodsInfoMap = countOccurrences(logList);
-        this.population = logList.size();
 
         logger.debug(methodsInfoMap.size() + " unique method calls identified from "
                 + logList.size() + " original traces");
@@ -77,38 +73,7 @@ public class FlowchartWorkFlow {
                 .collect(Collectors.toSet());
 
         logger.info(cacheableMethods.size() + " cacheable methods detected. Printing files...");
-
-        //TODO remove: print all unique methods and metrics to csv file
-        try {
-            final PrintWriter pw = new PrintWriter(new File("allmethods.csv"));
-            pw.write("isStaticData,changeMoreThanUsed,usedByManyRequests,isUserSpecific,isCacheSizeLarge,isDataSizeLarge,isExpensive,signature,numberOfSameOccurrences,numberOfDifferentReturnOccurrences,totalOccurrences,sameOccurrencesAverageExecutionTime,sameOccurrencesTotalExecutionTime,hitRatio,missRatio\n");
-            getMethodsInfoMap().keySet().stream().forEach(mi -> pw.write(CacheabilityMetrics.allMetricsToString(cacheInfo, mi, getMethodsInfoMap().get(mi), this, getMethodsInfoMap().size()) + "," + new MethodEntry(mi, getMethodsInfoMap().get(mi), System.currentTimeMillis() + expiryTime).getMethodInfo().getSignature() + "," + new MethodEntry(mi, getMethodsInfoMap().get(mi), System.currentTimeMillis() + expiryTime).getMethodStats().toCSV() + '\n'));
-            pw.close();
-        } catch (FileNotFoundException e) {
-            e.printStackTrace();
-        }
-
-        //TODO remove: print cacheable methods to csv file
-        try {
-            final PrintWriter pw = new PrintWriter(new File("cacheablemethods.csv"));
-            pw.write("isStaticData,changeMoreThanUsed,usedByManyRequests,isUserSpecific,isCacheSizeLarge,isDataSizeLarge,isExpensive,signature,numberOfSameOccurrences,numberOfDifferentReturnOccurrences,totalOccurrences,sameOccurrencesAverageExecutionTime,sameOccurrencesTotalExecutionTime,hitRatio,missRatio\n");
-            cacheableMethods.stream().forEach(ma -> pw.write(CacheabilityMetrics.allMetricsToString(cacheInfo, ma.getMethodInfo(), ma.getMethodStats(), this, getMethodsInfoMap().size()) + "," + ma.getMethodInfo().getSignature() + "," + ma.getMethodStats().toCSV() + '\n'));
-            pw.close();
-        } catch (FileNotFoundException e) {
-            e.printStackTrace();
-        }
-
-        //TODO remove: print black list to csv file
-        try {
-            if (TracerAspect.methodBlackList != null && !TracerAspect.methodBlackList.isEmpty()) {
-                final PrintWriter pw = new PrintWriter(new File("backlisted.csv"));
-                pw.write("signature\n");
-                TracerAspect.methodBlackList.stream().forEach(ma -> pw.write(ma + '\n'));
-                pw.close();
-            }
-        } catch (FileNotFoundException e) {
-            e.printStackTrace();
-        }
+        printAll(expiryTime, cacheableMethods);
 
         return cacheableMethods;
     }
@@ -124,24 +89,18 @@ public class FlowchartWorkFlow {
 
             MethodStats methodStats = new MethodStats(logTrace);
 
-            for (int j = 0; j < logs.size(); j++) {
+            for (int j = i+1; j < logs.size(); j++) {
                 LogTrace traceCompare = logs.get(j);
 
-                if(i == j)
-                    continue;
-
                 //if similar methods: same signature and params, different return
-//                if(logTrace.getMethodInfo().getHashedArguments())
-                if (traceCompare.getMethodInfo().equalsWithoutReturnedValue(logTrace.getMethodInfo())
-//                      || traceCompare.getMethodInfo().equalsHashedWithoutReturnedValue(logTrace.getMethodInfo())
-                        ) {
-
+                if (traceCompare.getMethodInfo()
+                        .equalsWithoutReturnedValue(logTrace.getMethodInfo())) {
                     //if identical methods
-                    if (EqualsBuilder.reflectionEquals(traceCompare.getMethodInfo().getReturnedValue(), logTrace.getMethodInfo().getReturnedValue())
-//                            || Objects.equals(traceCompare.getMethodInfo().getHashedReturnedValue(), logTrace.getMethodInfo().getHashedReturnedValue())
-)
+                    if (EqualsBuilder.reflectionEquals(
+                            traceCompare.getMethodInfo().getReturnedValue(),
+                            logTrace.getMethodInfo().getReturnedValue())) {
                         methodStats.addSameOccurrence(traceCompare);
-
+                    }
                     else methodStats.addDifferentReturnOccurrence();
                 }
             }
@@ -151,8 +110,8 @@ public class FlowchartWorkFlow {
             executionTimeStats.addValue(methodStats.getSameOccurrencesTotalExecutionTime());
             shareabilityStats.addValue(methodStats.shareability());
             frequencyStats.addValue(methodStats.getNumberOfSameOccurrences());
-            missStats.addValue(methodStats.hitRatio());
-            hitStats.addValue(methodStats.missRatio());
+            missStats.addValue(methodStats.missRatio());
+            hitStats.addValue(methodStats.hitRatio());
         }
 
         return methodInfoMap;
@@ -221,4 +180,38 @@ public class FlowchartWorkFlow {
         }
         return occurrences / methods;
     }
+
+    private void printAll(long expiryTime, Set<MethodEntry> cacheableMethods) {
+        //TODO remove: print all unique methods and metrics to csv file
+        try {
+            final PrintWriter pw = new PrintWriter(new File("allmethods.csv"));
+            pw.write("isStaticData,changeMoreThanUsed,usedByManyRequests,isUserSpecific,isCacheSizeLarge,isDataSizeLarge,isExpensive,signature,numberOfSameOccurrences,numberOfDifferentReturnOccurrences,totalOccurrences,sameOccurrencesAverageExecutionTime,sameOccurrencesTotalExecutionTime,hitRatio,missRatio\n");
+            getMethodsInfoMap().keySet().stream().forEach(mi -> pw.write(CacheabilityMetrics.allMetricsToString(cacheInfo, mi, getMethodsInfoMap().get(mi), this, getMethodsInfoMap().size()) + "," + new MethodEntry(mi, getMethodsInfoMap().get(mi), System.currentTimeMillis() + expiryTime).getMethodInfo().getSignature() + "," + new MethodEntry(mi, getMethodsInfoMap().get(mi), System.currentTimeMillis() + expiryTime).getMethodStats().toCSV() + '\n'));
+            pw.close();
+        } catch (FileNotFoundException e) {
+            e.printStackTrace();
+        }
+
+        //TODO remove: print cacheable methods to csv file
+        try {
+            final PrintWriter pw = new PrintWriter(new File("cacheablemethods.csv"));
+            pw.write("isStaticData,changeMoreThanUsed,usedByManyRequests,isUserSpecific,isCacheSizeLarge,isDataSizeLarge,isExpensive,signature,numberOfSameOccurrences,numberOfDifferentReturnOccurrences,totalOccurrences,sameOccurrencesAverageExecutionTime,sameOccurrencesTotalExecutionTime,hitRatio,missRatio\n");
+            cacheableMethods.stream().forEach(ma -> pw.write(CacheabilityMetrics.allMetricsToString(cacheInfo, ma.getMethodInfo(), ma.getMethodStats(), this, getMethodsInfoMap().size()) + "," + ma.getMethodInfo().getSignature() + "," + ma.getMethodStats().toCSV() + '\n'));
+            pw.close();
+        } catch (FileNotFoundException e) {
+            e.printStackTrace();
+        }
+
+        //TODO remove: print black list to csv file
+        try {
+            if (TracerAspect.methodBlackList != null && !TracerAspect.methodBlackList.isEmpty()) {
+                final PrintWriter pw = new PrintWriter(new File("backlisted.csv"));
+                pw.write("signature\n");
+                TracerAspect.methodBlackList.stream().forEach(ma -> pw.write(ma + '\n'));
+                pw.close();
+            }
+        } catch (FileNotFoundException e) {
+            e.printStackTrace();
+        }
+    }
 }
diff --git a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/monitoring/application/aspects/TracerAspect.java b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/monitoring/application/aspects/TracerAspect.java
index 929b4d1..fdb22db 100644
--- a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/monitoring/application/aspects/TracerAspect.java
+++ b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/monitoring/application/aspects/TracerAspect.java
@@ -202,7 +202,6 @@ public class TracerAspect {
 //        }
 
             methodBlackList = new ArrayList<>();
-            tempTraces = Collections.synchronizedList(new ArrayList<>());
 
 
         } catch (ConfigurationException e) {
diff --git a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/metrics/LightweightMetricAspect.java b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/metrics/LightweightMetricAspect.java
index 300ef8c..ad6e807 100644
--- a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/metrics/LightweightMetricAspect.java
+++ b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/metrics/LightweightMetricAspect.java
@@ -1,6 +1,8 @@
 package br.ufrgs.inf.prosoft.adaptivecaching.sampling.metrics;
 
+import org.aspectj.lang.JoinPoint;
 import org.aspectj.lang.ProceedingJoinPoint;
+import org.aspectj.lang.annotation.AfterThrowing;
 import org.aspectj.lang.annotation.Around;
 import org.aspectj.lang.annotation.Aspect;
 import org.aspectj.lang.annotation.Pointcut;
diff --git a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/metrics/LightweightMetrics.java b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/metrics/LightweightMetrics.java
index b18f9de..79e5714 100644
--- a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/metrics/LightweightMetrics.java
+++ b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/metrics/LightweightMetrics.java
@@ -1,5 +1,8 @@
 package br.ufrgs.inf.prosoft.adaptivecaching.sampling.metrics;
 
+import org.apache.commons.math3.stat.StatUtils;
+import org.apache.commons.math3.util.FastMath;
+
 import java.util.ArrayList;
 import java.util.List;
 
@@ -62,13 +65,4 @@ public class LightweightMetrics {
                 "occurrences=" + occurrences +
                 '}';
     }
-
-    //TODO use commons math to in other parts of the code
-    private double mean(List<Number> values){
-        return StatUtils.mean(values);
-    }
-
-    private double standardDeviation(List<Number> values){
-        return FastMath.sqrt(StatUtils.variance(values));
-    }
 }
diff --git a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/statistics/StatisticalTest.java b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/statistics/StatisticalTest.java
index 4234c07..d3596bd 100644
--- a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/statistics/StatisticalTest.java
+++ b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/statistics/StatisticalTest.java
@@ -1,18 +1,21 @@
 
 package br.ufrgs.inf.prosoft.adaptivecaching.sampling.statistics;
 
+import org.apache.commons.math3.distribution.NormalDistribution;
+import org.apache.commons.math3.stat.inference.TestUtils;
+
 public class StatisticalTest {
 
-    public static isNormalDistribution(){
+    public static void isNormalDistribution(){
         //commons math
-        final NormalDistribution unitNormal = new NormalDistribution(0d, 1d);
-        TestUtils.kolmogorovSmirnovTest(unitNormal, sample, false);
+//        final NormalDistribution unitNormal = new NormalDistribution(0d, 1d);
+//        TestUtils.kolmogorovSmirnovTest(unitNormal, sample, false);
 
 
         //https://github.com/umontreal-simul/ssj
 
         //http://jdistlib.sourceforge.net/
-        NormalityTest.xxx_statistic(double[] x);
-        NormalityTest.xxx_p_value(double stat, int df);
+//        NormalityTest.xxx_statistic(double[] x);
+//        NormalityTest.xxx_p_value(double stat, int df);
     }
 }
diff --git a/framework/autonomicmanager/src/test/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/AnalyzerTest.java b/framework/autonomicmanager/src/test/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/AnalyzerTest.java
new file mode 100644
index 0000000..1016c18
--- /dev/null
+++ b/framework/autonomicmanager/src/test/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/AnalyzerTest.java
@@ -0,0 +1,149 @@
+package br.ufrgs.inf.prosoft.adaptivecaching.analysis.decision.flowchart;
+
+import br.ufrgs.inf.prosoft.adaptivecaching.analysis.Analyzer;
+import br.ufrgs.inf.prosoft.adaptivecaching.analysis.decision.flowchart.model.MethodEntry;
+import br.ufrgs.inf.prosoft.adaptivecaching.cachemanager.extensions.guava.GuavaCache;
+import br.ufrgs.inf.prosoft.adaptivecaching.cachemanager.extensions.guava.GuavaCacheManager;
+import br.ufrgs.inf.prosoft.adaptivecaching.cachemanager.model.Cache;
+import br.ufrgs.inf.prosoft.adaptivecaching.configuration.annotation.AdaptiveCaching;
+import br.ufrgs.inf.prosoft.adaptivecaching.configuration.annotation.types.CacheProviderType;
+import br.ufrgs.inf.prosoft.adaptivecaching.configuration.annotation.types.Modelling;
+import br.ufrgs.inf.prosoft.adaptivecaching.configuration.annotation.types.RepositoryType;
+import br.ufrgs.inf.prosoft.adaptivecaching.configuration.annotation.types.TriggerType;
+import br.ufrgs.inf.prosoft.adaptivecaching.monitoring.application.metadata.LogTrace;
+import br.ufrgs.inf.prosoft.adaptivecaching.monitoring.cache.CacheMonitor;
+import br.ufrgs.inf.prosoft.adaptivecaching.monitoring.cache.vendors.guava.GuavaMonitor;
+import br.ufrgs.inf.prosoft.adaptivecaching.monitoring.storage.Repository;
+import br.ufrgs.inf.prosoft.adaptivecaching.monitoring.storage.providers.MongoRepository;
+import com.mongodb.MongoClient;
+import com.mongodb.client.MongoDatabase;
+
+import java.lang.annotation.Annotation;
+
+public class AnalyzerTest {
+
+//    @Test
+    public void test(){
+        //TODO db according to properties file...
+        MongoClient mongo = new MongoClient("localhost", 27017);
+        MongoDatabase database = mongo.getDatabase("cachemonitoring");
+        Repository repositoryTarget = new MongoRepository<MethodEntry>(database.getCollection("petclinicCacheable"), MethodEntry.class);
+        Repository repositorySource = new MongoRepository<LogTrace>(database.getCollection("cloudstore"), LogTrace.class);
+
+        //TODO db according to properties file...
+        GuavaCacheManager cacheManager = new GuavaCacheManager();
+        Cache cache = cacheManager.getCache("test");
+        CacheMonitor cacheMonitor = new GuavaMonitor((GuavaCache) cache);
+
+        AdaptiveCaching adaptiveCaching = new AdaptiveCaching() {
+            @Override
+            public boolean equals(Object obj) {
+                return false;
+            }
+
+            @Override
+            public int hashCode() {
+                return 0;
+            }
+
+            @Override
+            public String toString() {
+                return null;
+            }
+
+            @Override
+            public Class<? extends Annotation> annotationType() {
+                return null;
+            }
+
+            @Override
+            public boolean enabled() {
+                return true;
+            }
+
+            @Override
+            public RepositoryType logRepository() {
+                return RepositoryType.MONGODB;
+            }
+
+            @Override
+            public CacheProviderType cacheProvider() {
+                return CacheProviderType.GUAVA;
+            }
+
+            @Override
+            public Modelling modelling() {
+                return Modelling.ACCUMULATION;
+            }
+
+            @Override
+            public long expiryInterval() {
+                return 5;
+            }
+
+            @Override
+            public long firstExpiry() {
+                return 5;
+            }
+
+            @Override
+            public long analysisInterval() {
+                return 5;
+            }
+
+            @Override
+            public long firstAnalysis() {
+                return 2;
+            }
+
+            @Override
+            public TriggerType triggerType() {
+                return TriggerType.TIME;
+            }
+
+            @Override
+            public long triggerTime() {
+                return 5;
+            }
+
+            @Override
+            public boolean analyzerEnabled() {
+                return true;
+            }
+
+            @Override
+            public boolean disableMonitoringAfterAnalysis() {
+                return false;
+            }
+
+            @Override
+            public boolean traceAsync() {
+                return true;
+            }
+
+            @Override
+            public boolean clearMonitoringDataOnStart() {
+                return false;
+            }
+
+            @Override
+            public boolean enableBlacklist() {
+                return false;
+            }
+
+            @Override
+            public boolean tracerEnabled() {
+                return true;
+            }
+
+            @Override
+            public boolean analyzeOnce() {
+                return false;
+            }
+        };
+        Analyzer analyzer = new Analyzer(repositorySource, null, cacheMonitor.getCacheInfo(), adaptiveCaching);
+
+        //TODO trigger by time?
+        analyzer.run();
+    }
+}