AdaptiveCachingCoordinator.java

195 lines | 7.149 kB Blame History Raw Download
package br.ufrgs.inf.prosoft.adaptivecaching;

import br.ufrgs.inf.prosoft.adaptivecaching.analysis.decision.flowchart.FlowchartWorkFlow;
import br.ufrgs.inf.prosoft.adaptivecaching.analysis.decision.flowchart.model.MethodEntry;
import br.ufrgs.inf.prosoft.adaptivecaching.cache.CacheMonitor;
import br.ufrgs.inf.prosoft.adaptivecaching.cache.CacheMonitorFactory;
import br.ufrgs.inf.prosoft.adaptivecaching.cachemanager.cacher.AdaptiveMethodCacher;
import br.ufrgs.inf.prosoft.adaptivecaching.cachemanager.util.threads.NamedThreads;
import br.ufrgs.inf.prosoft.adaptivecaching.configuration.annotation.AdaptiveCaching;
import br.ufrgs.inf.prosoft.tigris.configuration.types.Modelling;
import br.ufrgs.inf.prosoft.tigris.monitoring.aspects.CustomMonitoring;
import br.ufrgs.inf.prosoft.tigris.monitoring.metadata.Key;
import br.ufrgs.inf.prosoft.tigris.monitoring.metadata.LogTrace;
import br.ufrgs.inf.prosoft.tigris.monitoring.storage.Repository;
import br.ufrgs.inf.prosoft.tigris.monitoring.usersession.UserGetter;
import br.ufrgs.inf.prosoft.tigris.monitoring.usersession.UserGetterFactory;
import br.ufrgs.inf.prosoft.utils.ConfigurationUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

public class AdaptiveCachingCoordinator implements CustomMonitoring {

    //used in case the method info is required
    private Set<MethodEntry> cacheableMethods = new ConcurrentSkipListSet<>();
    //used in case the method info is not required
    private Set<Object> cacheableMethodKeys = new ConcurrentSkipListSet<>();

    private boolean enabled = true;

    private final ScheduledExecutorService analyzerExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreads(
            "adaptivecaching-analyzer",
            "identifying cacheable methods"
    ));

    Logger logger = LoggerFactory.getLogger(AdaptiveCachingCoordinator.class);

    private AdaptiveMethodCacher cacher;
    private CacheMonitor cacheMonitor;

    //adaptive caching configurations
    private AdaptiveCaching cachingConfig;
    private Properties properties;
    private UserGetter userGetter;
    private Repository repository;

    public boolean isCacheable(Key key){
        return cacheableMethodKeys.contains(key);
    }

    private Properties getPropertiesFile() throws IOException {
        Properties properties = new Properties();
        properties.load(getClass().getClassLoader().getResourceAsStream("adaptivecaching.properties"));
        return properties;
    }

    public boolean isEnabled() {
        return enabled;
    }

    public void turnoff() {
        enabled = false;
    }

    public Object getFromCache(Key key) throws Throwable {
        return this.cacher.getFromCache(key);
    }

    public void putInCache(Key key, Object value) throws Throwable {
        this.cacher.putInCache(key, value);
    }

    @Override
    public void run() {

        if(!enabled) {
            logger.info("Analyzer disabled, waiting until next run to check again...");
            return;
        }

        List all = repository.findAll();

        Set<MethodEntry> process = analyzeAndReturn(all);

        //todo maybe clean the cache here?
        cacheableMethods = process;
        cacheableMethodKeys = process.stream().parallel().map(MethodEntry::getMethodInfoKey).collect(Collectors.toSet());

        String methods = "";
        for (MethodEntry me : process) methods = methods.concat(me.getMethodInfo().getSignature() + ",");

        //TODO maybe after the analysis end, new data would be already registered, which should not be deleted
        if (cachingConfig.modelling().equals(Modelling.FULLEXPLORATION)) {
            repository.removeAll();
            logger.info("Old monitoring data deleted.");
        }

        if (cachingConfig.analyzeOnce())
            enabled = false;

        logger.info("Start caching...");
    }

    /**
     * Analyze and return set.
     *
     * @param logList the log list
     * @return the set
     */
    private Set<MethodEntry> analyzeAndReturn(List<LogTrace> logList) {
        logger.info("Starting the analysis of cacheable methods from logs: " + logList.size() + " loaded.");

        FlowchartWorkFlow workflow = new FlowchartWorkFlow(cacheMonitor.getCacheInfo(), logList);
        Set<MethodEntry> process = workflow.filterCacheableMethods(cachingConfig.expiryInterval());
        logger.info(process.size() + " cacheable methods identified.");

        //TODO find by annotations @Ignore and remove the methods marked

        //if disable monitoring, should not schedule future analysis
        if (cachingConfig.disableMonitoringAfterAnalysis()) {
            enabled = false;
            logger.info("Adaptive caching monitoring disabled since the model was built.");
        }

        return process;
    }

    @Override
    public void initialize(Repository repository) {
        Class<?> adaptiveCachingConfig = ConfigurationUtils.getAvailableConfigurationClass(AdaptiveCaching.class);
        this.repository = repository;
        this.cachingConfig = adaptiveCachingConfig.getAnnotation(AdaptiveCaching.class);

        //todo load uncacheable annotation and methods and add to the blacklist
        //todo load cacheable annotation and methods and add to the must-cache list

        //setting up the repository
        try {
            properties = getPropertiesFile();
        } catch (IOException e) {
            e.printStackTrace();
        }
        if (properties == null || properties.isEmpty()) {
            logger.error("adaptivecaching.properties is not defined, adaptive caching disabled.");
            enabled = false;
        } else
            logger.info("adaptivecaching.properties found, loading properties...");

        //TODO switch and build: pass a parameter and build
        userGetter = UserGetterFactory.getInstance();

        this.cacher = new AdaptiveMethodCacher(this.cachingConfig.cacheProvider(), this.cachingConfig.expiryInterval());
        this.cacheMonitor = CacheMonitorFactory.getCacheMonitor(this.cacher.getCache(), this.cachingConfig.cacheProvider());

        //TODO load another options from @AdaptiveCaching
        //TODO get time from properties or see the external process
        //TODO trigger by time
        analyzerExecutor.scheduleWithFixedDelay(
                this::run,
                this.cachingConfig.firstAnalysis(), this.cachingConfig.analysisInterval(), TimeUnit.MILLISECONDS);
    }

    @Override
    public boolean beforeExecuting(Key key) {
        return isCacheable(key);
    }

    @Override
    public void afterExecuting(Key key, Object result) {
        try {
            putInCache(key, result);
        } catch (Throwable throwable) {
            throwable.printStackTrace();
        }
    }

    @Override
    public Object returnResult(Key key) {
        try {
            return getFromCache(key);
        } catch (Throwable throwable) {
            throwable.printStackTrace();
        }
        return null;
    }
}