adaptive-caching-framework

Details

diff --git a/framework/autonomicmanager/pom.xml b/framework/autonomicmanager/pom.xml
index e0df42e..1af3d54 100644
--- a/framework/autonomicmanager/pom.xml
+++ b/framework/autonomicmanager/pom.xml
@@ -209,5 +209,4 @@
         </plugins>
     </build>
 
-
 </project>
\ No newline at end of file
diff --git a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/Analyzer.java b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/Analyzer.java
index 01eef01..1adccc0 100644
--- a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/Analyzer.java
+++ b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/Analyzer.java
@@ -72,7 +72,7 @@ public class Analyzer implements Runnable {
 
         //todo maybe clean the cache here?
         TracerAspect.cacheableMethods = process;
-        TracerAspect.cacheableMethodKeys = process.stream().map(MethodEntry::getMethodInfoKey).collect(Collectors.toSet());
+        TracerAspect.cacheableMethodKeys = process.stream().parallel().map(MethodEntry::getMethodInfoKey).collect(Collectors.toSet());
 
         String methods = "";
         for (MethodEntry me : process) methods = methods.concat(me.getMethodInfo().getSignature() + ",");
diff --git a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/FlowchartWorkFlow.java b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/FlowchartWorkFlow.java
index 7021d39..341db7d 100644
--- a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/FlowchartWorkFlow.java
+++ b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/FlowchartWorkFlow.java
@@ -33,16 +33,11 @@ public class FlowchartWorkFlow {
     private CacheDecider decider;
     private CacheInfo cacheInfo;
 
-    private double sumMissRatio;
-    private double sumHitRatio;
-    private double sumExecutionTime;
-    private double sumShareability;
-    private double sumFrequency;
-    private List<Double> hitRatios = new ArrayList<>();
-    private List<Double> missRatios = new ArrayList<>();
-    private List<Long> executionTimes = new ArrayList<>();
-    private List<Double> shareabilities = new ArrayList<>();
-    private List<Long> frequencies = new ArrayList<>();
+    SummaryStatistics executionTimeStats = new SummaryStatistics();
+    SummaryStatistics shareabilityStats = new SummaryStatistics();
+    SummaryStatistics frequencyStats = new SummaryStatistics();
+    SummaryStatistics missStats = new SummaryStatistics();
+    SummaryStatistics hitStats = new SummaryStatistics();
 
     public FlowchartWorkFlow(CacheInfo cacheInfo, List<LogTrace> logList) {
         this.decider = new CacheabilityPatternDecider(logList.size(), this);
@@ -50,26 +45,19 @@ public class FlowchartWorkFlow {
         this.methodsInfoMap = countOccurrences(logList);
         this.population = logList.size();
 
-        logger.debug(methodsInfoMap.size() + " unique method calls identified from " + logList.size() + " original traces");
-
-        //sorting to get threshold, also excluding duplicates
-//        Collections.sort(executionTimes);
-//        hitRatios = hitRatios.stream().distinct().collect(Collectors.toList());
-//        Collections.sort(hitRatios);
-//        missRatios = missRatios.stream().distinct().collect(Collectors.toList());
-//        Collections.sort(missRatios);
-//        shareabilities = shareabilities.stream().distinct().collect(Collectors.toList());
-//        Collections.sort(shareabilities);
-
-        logger.debug("Average ExecutionTime: " + getAverageExecutionTime());
-        logger.debug("Average HitRatio: " + getAverageHitRatio());
-        logger.debug("Average MissRatio: " + getAverageMissRatio());
-        logger.debug("Average shareability: " + getAverageShareability());
-        logger.debug("StdDv ExecutionTime: " + getStdDevExecutionTimeRatio());
-        logger.debug("StdDv HitRatio: " + getStdDevHitRatio());
-        logger.debug("StdDv MissRatio: " + getStdDevMissRatio());
-        logger.debug("StdDv shareability: " + getStdDevShareability());
-        logger.debug("StdDv frequency: " + getStdDevFrequency());
+        logger.debug(methodsInfoMap.size() + " unique method calls identified from "
+                + logList.size() + " original traces");
+
+        logger.debug("Average ExecutionTime: " + executionTimeStats.getMean());
+        logger.debug("Average HitRatio: " + hitStats.getMean());
+        logger.debug("Average MissRatio: " + missStats.getMean());
+        logger.debug("Average shareability: " + shareabilityStats.getMean());
+        logger.debug("Average frequency: " + frequencyStats.getMean());
+        logger.debug("StdDv ExecutionTime: " + executionTimeStats.getStandardDeviation());
+        logger.debug("StdDv HitRatio: " + hitStats.getStandardDeviation());
+        logger.debug("StdDv MissRatio: " + missStats.getStandardDeviation());
+        logger.debug("StdDv shareability: " + shareabilityStats.getStandardDeviation());
+        logger.debug("StdDv frequency: " + frequencyStats.getStandardDeviation());
 
         int k = 0;
         logger.debug("Using " + k + " stdDev to calculate thresholds...");
@@ -80,13 +68,12 @@ public class FlowchartWorkFlow {
         logger.debug("Threshold frequency: " + frequencyThreshold(k));
     }
 
-
     public Set<MethodEntry> filterCacheableMethods(long expiryTime) {
         logger.debug("Deciding if methods are cacheable...");
 
         Set<MethodEntry> cacheableMethods = getMethodsInfoMap().keySet().stream()
                 .filter(mi -> decider.isCacheable(cacheInfo, mi, getMethodsInfoMap().get(mi)))
-                .map(mi -> new MethodEntry(mi, getMethodsInfoMap().get(mi), System.currentTimeMillis() + expiryTime))
+                .parallel().map(mi -> new MethodEntry(mi, getMethodsInfoMap().get(mi), System.currentTimeMillis() + expiryTime))
                 .collect(Collectors.toSet());
 
         logger.info(cacheableMethods.size() + " cacheable methods detected. Printing files...");
@@ -127,13 +114,6 @@ public class FlowchartWorkFlow {
     }
 
     public HashMap<MethodInfo, MethodStats> countOccurrences(List<LogTrace> logs) {
-
-        sumExecutionTime = 0;
-        sumHitRatio = 0;
-        sumMissRatio = 0;
-        sumShareability = 0;
-        sumFrequency = 0;
-
         HashMap<MethodInfo, MethodStats> methodInfoMap = new HashMap<>();
 
         for (int i = 0; i < logs.size(); i++) {
@@ -168,142 +148,42 @@ public class FlowchartWorkFlow {
 
             methodInfoMap.put(logTrace.getMethodInfo(), methodStats);
 
-            sumExecutionTime += methodStats.getSameOccurrencesTotalExecutionTime();
-            executionTimes.add(methodStats.getSameOccurrencesTotalExecutionTime());
-
-            sumHitRatio += methodStats.hitRatio();
-            hitRatios.add(methodStats.hitRatio());
-
-            sumMissRatio += methodStats.missRatio();
-            missRatios.add(methodStats.missRatio());
-
-            sumShareability += methodStats.shareability();
-            shareabilities.add(methodStats.shareability());
-
-            sumFrequency += methodStats.getNumberOfSameOccurrences();
-            frequencies.add(methodStats.getNumberOfSameOccurrences());
+            executionTimeStats.addValue(methodStats.getSameOccurrencesTotalExecutionTime());
+            shareabilityStats.addValue(methodStats.shareability());
+            frequencyStats.addValue(methodStats.getNumberOfSameOccurrences());
+            missStats.addValue(methodStats.hitRatio());
+            hitStats.addValue(methodStats.missRatio());
         }
 
         return methodInfoMap;
     }
 
-    /**
-     * General mean hit ratio of all calls
-     *
-     * @return
-     */
-    public double getAverageHitRatio() {
-        return new BigDecimal(sumHitRatio).divide(new BigDecimal(population), 5, RoundingMode.HALF_UP).doubleValue();
-    }
-
-    public double getAverageMissRatio() {
-        return new BigDecimal(sumMissRatio).divide(new BigDecimal(population), 5, RoundingMode.HALF_UP).doubleValue();
-    }
-
-    public double getAverageExecutionTime() {
-        return new BigDecimal(sumExecutionTime).divide(new BigDecimal(population), 5, RoundingMode.HALF_UP).doubleValue();
-    }
-
-    public double getAverageShareability() {
-        return new BigDecimal(sumShareability).divide(new BigDecimal(population), 5, RoundingMode.HALF_UP).doubleValue();
-    }
-
-    private Double StdDevHitRatio;
-    public double getStdDevHitRatio() {
-
-        if(StdDevHitRatio != null)
-            return StdDevHitRatio;
-
-        double mean = getAverageHitRatio();
-        double temp = 0;
-        for (double a : hitRatios)
-            temp += (a - mean) * (a - mean);
-        StdDevHitRatio = Math.sqrt(temp / population);
-        return StdDevHitRatio;
-    }
-
-    private Double StdDevMissRatio;
-    public double getStdDevMissRatio() {
-
-        if(StdDevMissRatio != null)
-            return StdDevMissRatio;
-
-        double mean = getAverageMissRatio();
-        double temp = 0;
-        for (double a : missRatios)
-            temp += (a - mean) * (a - mean);
-        StdDevMissRatio = Math.sqrt(temp / population);
-        return StdDevMissRatio;
-    }
-
-    private Double StdDevExecutionTimeRatio;
-    public double getStdDevExecutionTimeRatio() {
-
-        if(StdDevExecutionTimeRatio != null)
-            return StdDevExecutionTimeRatio;
-
-        double mean = getAverageExecutionTime();
-        double temp = 0;
-        for (Long a : executionTimes)
-            temp += (a - mean) * (a - mean);
-        StdDevExecutionTimeRatio = Math.sqrt(temp / population);
-        return StdDevExecutionTimeRatio;
-    }
-
-    private Double StdDevFrequency;
-    public double getStdDevFrequency() {
-
-        if(StdDevFrequency != null)
-            return StdDevFrequency;
-
-        double mean = getAverageFrequency();
-        double temp = 0;
-        for (Long a : frequencies)
-            temp += (a - mean) * (a - mean);
-        StdDevFrequency = Math.sqrt(temp / population);
-        return StdDevFrequency;
-    }
-
-    private Double StdDevShareability;
-    public double getStdDevShareability() {
-
-        if(StdDevShareability != null)
-            return StdDevShareability;
-
-        double mean = getAverageShareability();
-        double temp = 0;
-        for (Double a : shareabilities)
-            temp += (a - mean) * (a - mean);
-        StdDevShareability = Math.sqrt(temp / population);
-        return StdDevShareability;
-    }
-
     public HashMap<MethodInfo, MethodStats> getMethodsInfoMap() {
         return methodsInfoMap;
     }
 
     //getting X% with most hits
     public double hitThreshold(int kStdDev) {
-        return getAverageHitRatio() + (kStdDev * getStdDevHitRatio());
+        return hitStats.getMean() + (kStdDev * hitStats.getStandardDeviation());
     }
 
     //getting X% with most misses
     public double missThreshold(int kStdDev) {
-        return getAverageMissRatio() + (kStdDev * getStdDevMissRatio());
+        return missStats.getMean() + (kStdDev * missStats.getStandardDeviation());
     }
 
     //getting X% most expensive methods
     public double expensivenessThreshold(int kStdDev) {
-        return getAverageExecutionTime() + (kStdDev * getStdDevExecutionTimeRatio());
+        return executionTimeStats.getMean() + (kStdDev * executionTimeStats.getStandardDeviation());
     }
 
     public double shareabilityThreshold(int kStdDev) {
-        return getAverageShareability() + (kStdDev * getStdDevShareability());
+        return shareabilityStats.getMean() + (kStdDev * shareabilityStats.getStandardDeviation());
     }
 
     //getting X% most frenquent
     public double frequencyThreshold(int kStdDev) {
-        return getAverageFrequency() + (kStdDev * getStdDevFrequency());
+        return frequencyStats.getMean() + (kStdDev * frequencyStats.getStandardDeviation());
     }
 
     /**
@@ -341,13 +221,4 @@ public class FlowchartWorkFlow {
         }
         return occurrences / methods;
     }
-
-    private Double AverageFrequency;
-    public double getAverageFrequency() {
-        if(AverageFrequency != null)
-            return AverageFrequency;
-
-        AverageFrequency = new BigDecimal(sumFrequency).divide(new BigDecimal(frequencies.size()), 5, RoundingMode.HALF_UP).doubleValue();
-        return AverageFrequency;
-    }
 }
diff --git a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/stats/CacheabilityMetrics.java b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/stats/CacheabilityMetrics.java
index d873dd8..cd56c19 100644
--- a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/stats/CacheabilityMetrics.java
+++ b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/analysis/decision/flowchart/stats/CacheabilityMetrics.java
@@ -4,16 +4,11 @@ import br.ufrgs.inf.prosoft.adaptivecaching.analysis.decision.flowchart.Flowchar
 import br.ufrgs.inf.prosoft.adaptivecaching.analysis.decision.flowchart.model.MethodStats;
 import br.ufrgs.inf.prosoft.adaptivecaching.monitoring.application.metadata.MethodInfo;
 import br.ufrgs.inf.prosoft.adaptivecaching.monitoring.cache.CacheInfo;
-import org.ehcache.sizeof.SizeOf;
 
-import java.math.BigDecimal;
-import java.math.RoundingMode;
 import java.util.Optional;
 
 public class CacheabilityMetrics {
 
-    private static SizeOf sizeOf = SizeOf.newInstance();
-
     //total da população
     public static long sampleSize(long N, double Z, double e) {
         //Nível de confiança 90% -> Z=1.645
@@ -83,47 +78,13 @@ public class CacheabilityMetrics {
     }
 
     public static Optional<Boolean> isCacheSizeLarge(CacheInfo cacheInfo) {
-
-        //TODO concept considered while caching
-        if (true)
-            return Optional.of(true);
-
-
-        //unbounded cache size
-        if (cacheInfo.getTotalSpace() == null)
-            return Optional.of(true);
-
-        BigDecimal bd = new BigDecimal(cacheInfo.getFreeSpace())
-                .multiply(new BigDecimal(100))
-                .divide(new BigDecimal(cacheInfo.getTotalSpace()), 5, RoundingMode.HALF_UP);
-
-        double freePercent = bd.doubleValue();
-
-        if (freePercent >= 20.0)
-            return Optional.of(true);
-        else
-            return Optional.of(false);
+        //concept considered while caching
+        return Optional.of(true);
     }
 
     public static Optional<Boolean> isDataSizeLarge(CacheInfo cacheInfo, MethodInfo methodInfo) {
-
-        //TODO concept considered while caching
-        if (true)
-            return Optional.of(false);
-
-        if (cacheInfo.getTotalSpace() == null)
-            return Optional.empty();
-
-        long shallowSize = sizeOf.sizeOf(methodInfo.getReturnedValue());
-
-        BigDecimal bd = new BigDecimal(shallowSize)
-                .multiply(new BigDecimal(100))
-                .divide(new BigDecimal(cacheInfo.getFreeSpace()), 5, RoundingMode.HALF_UP);
-        double dataSizePercent = bd.doubleValue();
-
-        if (dataSizePercent <= 2.0)
-            return Optional.of(false);
-        else return Optional.of(true);
+        //concept considered while caching
+        return Optional.of(false);
     }
 
     public static Optional<Boolean> isExpensive(MethodStats methodStats, FlowchartWorkFlow workflow) {
diff --git a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/monitoring/application/aspects/support/Tracer.java b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/monitoring/application/aspects/support/Tracer.java
index 21b4f78..f9a82b6 100644
--- a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/monitoring/application/aspects/support/Tracer.java
+++ b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/monitoring/application/aspects/support/Tracer.java
@@ -8,6 +8,10 @@ import org.slf4j.LoggerFactory;
 
 import java.util.Objects;
 
+/**
+ * @deprecated used to trace methods async, not working due to hibernate sessions...
+ */
+@Deprecated
 public class Tracer implements Runnable {
 
     Logger logger = LoggerFactory.getLogger(Tracer.class);
diff --git a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/monitoring/application/aspects/TracerAspect.java b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/monitoring/application/aspects/TracerAspect.java
index 0e60b5b..929b4d1 100644
--- a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/monitoring/application/aspects/TracerAspect.java
+++ b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/monitoring/application/aspects/TracerAspect.java
@@ -56,7 +56,7 @@ public class TracerAspect {
     public static Set<MethodEntry> cacheableMethods = new HashSet<>();
     //used in case the method info is not required
     public static Set<Object> cacheableMethodKeys = new HashSet<>();
-
+    //used to list methods that raise exceptions due to tracing, they deal with internal and low level classes of spring and others, shoud be avoided
     public static List<String> methodBlackList = new ArrayList<>();
     /**
      * Enable and disable tracer
@@ -69,18 +69,17 @@ public class TracerAspect {
             "adaptivecaching-analyzer",
             "identifying cacheable methods"
     ));
+    //not used: in case of the framework being responsible for expiring content, ehcache already provides a native feature
     private final ScheduledExecutorService expirationExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreads(
             "adaptivecaching-expiration",
             "expiring old cacheable methods"
     ));
-    private final ScheduledExecutorService statsExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreads(
-            "adaptivecaching-stats",
-            "showing stats of the framework"
-    ));
+    //not used: in case of offline analysis, which demands cacheable methods to be load from other sources
     private final ScheduledExecutorService loaderExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreads(
             "adaptivecaching-loading",
             "loading cacheable methods from storage (offline analyzed)"
     ));
+    //not used: in case of async tracing
     private final ExecutorService tracerExecutor = Executors.newSingleThreadScheduledExecutor(
             new NamedThreads(
                     "adaptivecaching-tracer",
@@ -100,12 +99,6 @@ public class TracerAspect {
     private Properties properties;
     private UserGetter userGetter;
     private Repository repository;
-    private List<LogTrace> tempTraces;
-
-    //stats
-    private int count;
-    private long hashAndStructureTime;
-    private long traceTime;
 
     @Pointcut(
             //any execution except the own framework
@@ -195,7 +188,7 @@ public class TracerAspect {
             }
 
             //TODO trigger by time
-            //TODO in some cases (Ehcache) it is better to set a timetolive directcly on cache provider
+            //TODO in some cases (Ehcache) it is better to set a timetolive directly on cache provider
 //            this.expirationExecutor.scheduleWithFixedDelay(
 //                    new VerboseRunnable(() -> TracerAspect.this.clean()),
 //                    cachingConfig.firstExpiry(), cachingConfig.expiryInterval(), TimeUnit.MILLISECONDS
@@ -208,11 +201,6 @@ public class TracerAspect {
 //            );
 //        }
 
-//        this.statsExecutor.scheduleWithFixedDelay(
-//                new VerboseRunnable(() -> TracerAspect.this.stats()),
-//                10, 5, TimeUnit.SECONDS
-//        );
-
             methodBlackList = new ArrayList<>();
             tempTraces = Collections.synchronizedList(new ArrayList<>());
 
@@ -232,8 +220,8 @@ public class TracerAspect {
     @Around("anyCall()")
     public Object aroundMethods(ProceedingJoinPoint joinPoint) throws Throwable {
 
-        //see if a method is being caught
-        //traceSpecificMethod(joinPoint);
+        //DEBUG: see if a method is being caught
+        //DEBUG: traceSpecificMethod(joinPoint);
 
         if (!isAllowed(joinPoint))
             return joinPoint.proceed();
@@ -241,28 +229,9 @@ public class TracerAspect {
         //generate a hash of the method that will be used as: key to cache and compare if the method is allowed or not
         Key key = new Key(joinPoint);
         //when method is already cached and obtained from it, no trace will be generated
-        //System.out.println(key + "generated to " + joinPoint.getSignature().toLongString());
 
         if (cacheableMethodKeys.contains(key)) {
             this.cacher.cache(key, joinPoint);
-
-            //caching methods
-//            for (MethodEntry methodAnalysis : cacheableMethods) {
-//                if (joinPoint.getSignature().toLongString().equals(methodAnalysis.getMethodInfo().getSignature())) {
-//
-//                    MethodInfo methodInfo = new MethodInfo(joinPoint.getSignature().toLongString(), joinPoint.getArgs());
-//
-//                    //TODO hash or not???
-//                    //        methodInfo.setArguments(HashCodeBuilder.reflectionHashCode(joinPoint.getArgs()));
-//                    //        methodInfo.setReturnedValue(HashCodeBuilder.reflectionHashCode(result));
-//
-//                    if (methodAnalysis.getMethodInfo().equalsWithoutReturnedValue(methodInfo)) {
-//                        //todo we should trace cached methods in order to provide the runtime, otherwise the cached method
-//                        //will not be cacheable on the second time
-//                        return cache(joinPoint);
-//                    }
-//                }
-//            }
         }
 
         if (tracerEnabled) {
@@ -305,10 +274,7 @@ public class TracerAspect {
         long endTime = currentTimeMillis();
 
         //we do not cache null returns, but we trace them
-        //maybe the method can sometimes return null
-//        if (result == null)
-//            return null;
-
+        //maybe the method can sometimes return null... so there is not verification here
         LogTrace logTrace = new LogTrace();
         logTrace.setStartTime(startTime);
         logTrace.setEndTime(endTime);
@@ -319,24 +285,8 @@ public class TracerAspect {
         MethodInfo methodInfo = new MethodInfo(joinPoint.getSignature().toLongString(), joinPointArgs, result, key);
         logTrace.setMethodInfo(methodInfo);
 
-        //in case of batch processing...
-//        traces.add(logTrace);
-
-//        count++;
-//        System.out.println(count);
-
-//        temp = currentTimeMillis();
-        //could not trace async due to database sessions, when saving the session could be closed already.
-        //TODO solutions: configure jackson to avoid empty attributes / configure hibernate version as a datatype module
-//        Tracer tracer = new Tracer(repository, logTrace);
-//        if (cachingConfig.traceAsync()) {
-//            tracerExecutor.execute(tracer);
-//        } else {
-//            tracer.run();
-//        }
-
-//        long la = System.currentTimeMillis();
         try {
+            //could not trace async or batch due to database sessions, when saving the session could be closed already
             repository.save(logTrace);
             logger.debug("New trace entry: " + logTrace);// " serialization and save time: " + (System.currentTimeMillis() - la));
         } catch (Exception e) {
@@ -350,60 +300,6 @@ public class TracerAspect {
         return result;
     }
 
-    private void stats() {
-        logger.debug("Tempo hash: " + hashAndStructureTime + " Tempo save: " + traceTime + " Count: " + count);
-        logger.debug("Mean Tempo hash: " + (hashAndStructureTime / count) + " Tempo save: " + (traceTime / count));
-    }
-
-    /**
-     * Load cacheable methods
-     */
-    private void cacheableMethodsloader() {
-        try {
-            //TODO get db info from properties
-            MongoClient mongo = new MongoClient("localhost", 27017);
-            MongoDatabase database = mongo.getDatabase("cachemonitoring");
-            Repository cacheableRepository = new MongoRepository<MethodEntry>(database.getCollection("petclinicCacheable"), MethodEntry.class);
-
-            TracerAspect.cacheableMethods = (Set<MethodEntry>) cacheableRepository.findAll();
-            if (!TracerAspect.cacheableMethods.isEmpty()) {
-                logger.info(TracerAspect.cacheableMethods.size() + " cacheable methods loaded. Starting to work with them...");
-                if (cachingConfig.disableMonitoringAfterAnalysis()) {
-                    TracerAspect.enabled = false;
-                    logger.info("Tracer disabled after analysis...");
-                }
-            }
-        } catch (MongoTimeoutException e) {
-            logger.error("Cannot connect with MongoDB to get the cacheable methods.", e);
-        }
-    }
-
-    public void traceBatch() {
-        if (tempTraces.size() > 50000) {
-            logger.debug("Maximum number of traces in memory achieved. Analyzing them...");
-            Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
-
-            TracerAspect.tracerEnabled = false;
-            logger.debug("Disabling traces...");
-
-            Analyzer analyzer = new Analyzer(repository, repository, cacheMonitor.getCacheInfo(), cachingConfig);
-
-//            List<LogTrace> toSave = new ArrayList<LogTrace>(traces.size());
-            synchronized (tempTraces) {
-                Set<MethodEntry> cacheable = analyzer.analyzeAndReturn(tempTraces);
-                for (MethodEntry ma : cacheable)
-                    TracerAspect.cacheableMethods = cacheable;
-
-//                logger.info("Trying to save the traces: " + traces.size());
-//                repository.saveAll(traces);
-                tempTraces.clear();
-                logger.debug("Traces list clear: " + tempTraces.size());
-            }
-            //TODO get the future and reenables the trace?
-        }
-        TracerAspect.tracerEnabled = true;
-    }
-
     private Class<?> getAvailableConfigurationClass() {
         //TODO Decouple??
         Reflections reflections = new Reflections(
@@ -435,12 +331,66 @@ public class TracerAspect {
         return properties;
     }
 
-    private void traceSpecificMethod(ProceedingJoinPoint joinPoint) {
-        if (joinPoint.getSignature().toLongString().contains("findOwnerById")) {
-            System.out.println("pointcut: " + joinPoint);
-            for (StackTraceElement st : new Throwable().getStackTrace()) {
-                System.out.println(st.getClassName() + ":" + st.getMethodName());
-            }
-        }
-    }
+    /**
+     * Load cacheable methods
+     */
+//    private void cacheableMethodsloader() {
+//        try {
+//            //TODO get db info from properties
+//            MongoClient mongo = new MongoClient("localhost", 27017);
+//            MongoDatabase database = mongo.getDatabase("cachemonitoring");
+//            Repository cacheableRepository = new MongoRepository<MethodEntry>(database.getCollection("petclinicCacheable"), MethodEntry.class);
+//
+//            TracerAspect.cacheableMethods = (Set<MethodEntry>) cacheableRepository.findAll();
+//            if (!TracerAspect.cacheableMethods.isEmpty()) {
+//                logger.info(TracerAspect.cacheableMethods.size() + " cacheable methods loaded. Starting to work with them...");
+//                if (cachingConfig.disableMonitoringAfterAnalysis()) {
+//                    TracerAspect.enabled = false;
+//                    logger.info("Tracer disabled after analysis...");
+//                }
+//            }
+//        } catch (MongoTimeoutException e) {
+//            logger.error("Cannot connect with MongoDB to get the cacheable methods.", e);
+//        }
+//    }
+//
+//    public void traceBatch() {
+//        if (tempTraces.size() > 50000) {
+//            logger.debug("Maximum number of traces in memory achieved. Analyzing them...");
+//            Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
+//
+//            TracerAspect.tracerEnabled = false;
+//            logger.debug("Disabling traces...");
+//
+//            Analyzer analyzer = new Analyzer(repository, repository, cacheMonitor.getCacheInfo(), cachingConfig);
+//
+////            List<LogTrace> toSave = new ArrayList<LogTrace>(traces.size());
+//            synchronized (tempTraces) {
+//                Set<MethodEntry> cacheable = analyzer.analyzeAndReturn(tempTraces);
+//                for (MethodEntry ma : cacheable)
+//                    TracerAspect.cacheableMethods = cacheable;
+//
+////                logger.info("Trying to save the traces: " + traces.size());
+////                repository.saveAll(traces);
+//                tempTraces.clear();
+//                logger.debug("Traces list clear: " + tempTraces.size());
+//            }
+//            //TODO get the future and reenables the trace?
+//        }
+//        TracerAspect.tracerEnabled = true;
+//    }
+//
+//    private void traceSpecificMethod(ProceedingJoinPoint joinPoint) {
+//        if (joinPoint.getSignature().toLongString().contains("findOwnerById")) {
+//            System.out.println("pointcut: " + joinPoint);
+//            for (StackTraceElement st : new Throwable().getStackTrace()) {
+//                System.out.println(st.getClassName() + ":" + st.getMethodName());
+//            }
+//        }
+//    }
+
+
+//    hashing example:
+//    methodInfo.setArguments(HashCodeBuilder.reflectionHashCode(joinPoint.getArgs()));
+//    methodInfo.setReturnedValue(HashCodeBuilder.reflectionHashCode(result));
 }
\ No newline at end of file
diff --git a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/adaptation/ResourceUsage.java b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/adaptation/ResourceUsage.java
index 60a132b..5768034 100644
--- a/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/adaptation/ResourceUsage.java
+++ b/framework/autonomicmanager/src/main/java/br/ufrgs/inf/prosoft/adaptivecaching/sampling/adaptation/ResourceUsage.java
@@ -35,4 +35,8 @@ public class ResourceUsage {
         // returns a percentage value with 1 decimal point precision
         return ((int)(value * 1000) / 10.0);
     }
+
+//    import org.ehcache.sizeof.SizeOf;
+//    private static SizeOf sizeOf = SizeOf.newInstance();
+//    sizeOf.sizeOf(methodInfo.getReturnedValue());
 }